diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 806949d81e..81996d5432 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -231,6 +231,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: | rustup component add clippy @@ -251,20 +253,23 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + shared-key: ruff-linux-debug + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install cargo insta" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-insta - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: "true" - name: ty mdtests (GitHub annotations) @@ -277,8 +282,8 @@ jobs: run: cargo test -p ty_python_semantic --test mdtest || true - name: "Run tests" run: cargo insta test --all-features --unreferenced reject --test-runner nextest - # Dogfood ty on py-fuzzer - - run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer + - name: Dogfood ty on py-fuzzer + run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer # Check for broken links in the documentation. - run: cargo doc --all --no-deps env: @@ -291,14 +296,6 @@ jobs: env: # Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025). RUSTDOCFLAGS: "-D warnings" - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: ruff - path: target/debug/ruff - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: ty - path: target/debug/ty cargo-test-linux-release: name: "cargo test (linux, release)" @@ -315,20 +312,22 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install cargo insta" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-insta - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: "true" - name: "Run tests" @@ -350,14 +349,16 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install cargo nextest" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: "true" - name: "Run tests" @@ -376,9 +377,11 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 cache: "npm" @@ -411,6 +414,8 @@ jobs: file: "Cargo.toml" field: "workspace.package.rust-version" - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" env: MSRV: ${{ steps.msrv.outputs.value }} @@ -435,10 +440,13 @@ jobs: - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: workspaces: "fuzz -> target" + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show + - name: "Install mold" + uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo-binstall" - uses: cargo-bins/cargo-binstall@afcf9780305558bcc9e4bc94b7589ab2bb8b6106 # v1.15.9 + uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11 - name: "Install cargo-fuzz" # Download the latest version from quick install and not the github releases because github releases only has MUSL targets. run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm @@ -447,9 +455,7 @@ jobs: fuzz-parser: name: "fuzz parser" runs-on: ubuntu-latest - needs: - - cargo-test-linux - - determine_changes + needs: determine_changes if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }} timeout-minutes: 20 env: @@ -458,27 +464,24 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - name: Download Ruff binary to test - id: download-cached-binary + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: - name: ruff - path: ruff-to-test + shared-key: ruff-linux-debug + save-if: false + - name: "Install Rust toolchain" + run: rustup show + - name: Build Ruff binary + run: cargo build --bin ruff - name: Fuzz - env: - DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x "${DOWNLOAD_PATH}/ruff" - ( uv run \ --python="${PYTHON_VERSION}" \ --project=./python/py-fuzzer \ --locked \ fuzz \ - --test-executable="${DOWNLOAD_PATH}/ruff" \ + --test-executable=target/debug/ruff \ --bin=ruff \ 0-500 ) @@ -494,7 +497,9 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup component add rustfmt # Run all code generation scripts, and verify that the current output is @@ -518,9 +523,7 @@ jobs: ecosystem: name: "ecosystem" runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }} - needs: - - cargo-test-linux - - determine_changes + needs: determine_changes # Only runs on pull requests, since that is the only we way we can find the base version for comparison. # Ecosystem check needs linter and/or formatter changes. if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }} @@ -528,26 +531,37 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: + ref: ${{ github.event.pull_request.base.ref }} persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: python-version: ${{ env.PYTHON_VERSION }} activate-environment: true - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - name: Download comparison Ruff binary - id: ruff-target - with: - name: ruff - path: target/debug + - name: "Install Rust toolchain" + run: rustup show - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download baseline Ruff binary + - name: "Install mold" + uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 + + - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: - name: ruff - branch: ${{ github.event.pull_request.base.ref }} - workflow: "ci.yaml" - check_artifacts: true + shared-key: ruff-linux-debug + save-if: false + + - name: Build baseline version + run: | + cargo build --bin ruff + mv target/debug/ruff target/debug/ruff-baseline + + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + clean: false + + - name: Build comparison version + run: cargo build --bin ruff - name: Install ruff-ecosystem run: | @@ -555,16 +569,11 @@ jobs: - name: Run `ruff check` stable ecosystem check if: ${{ needs.determine_changes.outputs.linter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable + ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY" echo "### Linter (stable)" > ecosystem-result @@ -573,16 +582,11 @@ jobs: - name: Run `ruff check` preview ecosystem check if: ${{ needs.determine_changes.outputs.linter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview + ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY" echo "### Linter (preview)" >> ecosystem-result @@ -591,16 +595,11 @@ jobs: - name: Run `ruff format` stable ecosystem check if: ${{ needs.determine_changes.outputs.formatter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable + ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY" echo "### Formatter (stable)" >> ecosystem-result @@ -609,32 +608,19 @@ jobs: - name: Run `ruff format` preview ecosystem check if: ${{ needs.determine_changes.outputs.formatter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview + ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY" echo "### Formatter (preview)" >> ecosystem-result cat ecosystem-result-format-preview >> ecosystem-result echo "" >> ecosystem-result - - name: Export pull request number - run: | - echo ${{ github.event.number }} > pr-number - - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - name: Upload PR Number - with: - name: pr-number - path: pr-number - + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 name: Upload Results with: @@ -645,36 +631,38 @@ jobs: name: "Fuzz for new ty panics" runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }} needs: - - cargo-test-linux - determine_changes # Only runs on pull requests, since that is the only we way we can find the base version for comparison. if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }} - timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 5 || 20 }} + timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 10 || 20 }} steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: + fetch-depth: 0 persist-credentials: false - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - name: Download new ty binary - id: ty-new + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 + - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: - name: ty - path: target/debug - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download baseline ty binary - with: - name: ty - branch: ${{ github.event.pull_request.base.ref }} - workflow: "ci.yaml" - check_artifacts: true - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + save-if: ${{ github.ref == 'refs/heads/main' }} + - name: "Install Rust toolchain" + run: rustup show + - name: "Install mold" + uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: Fuzz env: FORCE_COLOR: 1 - NEW_TY: ${{ steps.ty-new.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x "${PWD}/ty" "${NEW_TY}/ty" + echo "new commit" + git rev-list --format=%s --max-count=1 "$GITHUB_SHA" + cargo build --profile=profiling --bin=ty + mv target/profiling/ty ty-new + + MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")" + git checkout -b old_commit "$MERGE_BASE" + echo "old commit (merge base)" + git rev-list --format=%s --max-count=1 old_commit + cargo build --profile=profiling --bin=ty + mv target/profiling/ty ty-old ( uv run \ @@ -682,8 +670,8 @@ jobs: --project=./python/py-fuzzer \ --locked \ fuzz \ - --test-executable="${NEW_TY}/ty" \ - --baseline-executable="${PWD}/ty" \ + --test-executable=ty-new \ + --baseline-executable=ty-old \ --only-new-bugs \ --bin=ty \ 0-1000 @@ -698,7 +686,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: cargo-bins/cargo-binstall@afcf9780305558bcc9e4bc94b7589ab2bb8b6106 # v1.15.9 + - uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11 - run: cargo binstall --no-confirm cargo-shear - run: cargo shear @@ -711,12 +699,16 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show + - name: "Install mold" + uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Run ty completion evaluation" - run: cargo run --release --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv + run: cargo run --profile profiling --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv - name: "Ensure there are no changes" run: diff ./crates/ty_completion_eval/completion-evaluation-tasks.csv /tmp/completion-evaluation-tasks.csv @@ -734,6 +726,8 @@ jobs: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Prep README.md" run: python scripts/transform_readme.py --target pypi - name: "Build wheels" @@ -756,9 +750,11 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 - name: "Cache pre-commit" @@ -788,6 +784,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Add SSH key" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1 @@ -796,7 +794,7 @@ jobs: - name: "Install Rust toolchain" run: rustup show - name: Install uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: python-version: 3.13 activate-environment: true @@ -830,6 +828,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Run checks" @@ -843,9 +843,7 @@ jobs: name: "test ruff-lsp" runs-on: ubuntu-latest timeout-minutes: 5 - needs: - - cargo-test-linux - - determine_changes + needs: determine_changes if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} steps: - uses: extractions/setup-just@e33e0265a09d6d736e2ee1e0eb685ef1de4669ff # v3.0.0 @@ -853,37 +851,46 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: "Download ruff-lsp source" + name: "Checkout ruff source" + with: + persist-credentials: false + + - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + shared-key: ruff-linux-debug + save-if: false + + - name: "Install Rust toolchain" + run: rustup show + + - name: Build Ruff binary + run: cargo build -p ruff --bin ruff + + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + name: "Checkout ruff-lsp source" with: persist-credentials: false repository: "astral-sh/ruff-lsp" + path: ruff-lsp - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: # installation fails on 3.13 and newer python-version: "3.12" - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - name: Download development ruff binary - id: ruff-target - with: - name: ruff - path: target/debug - - name: Install ruff-lsp dependencies run: | + cd ruff-lsp just install - name: Run ruff-lsp tests - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | # Setup development binary pip uninstall --yes ruff - chmod +x "${DOWNLOAD_PATH}/ruff" - export PATH="${DOWNLOAD_PATH}:${PATH}" + export PATH="${PWD}/target/debug:${PATH}" ruff version + cd ruff-lsp just test check-playground: @@ -900,7 +907,9 @@ jobs: - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 cache: "npm" @@ -938,21 +947,23 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-codspeed - name: "Build benchmarks" - run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser + run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser - name: "Run benchmarks" - uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 + uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3 with: mode: instrumentation run: cargo codspeed run @@ -976,21 +987,23 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-codspeed - name: "Build benchmarks" - run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty + run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty - name: "Run benchmarks" - uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 + uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3 with: mode: instrumentation run: cargo codspeed run @@ -1014,21 +1027,23 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-codspeed - name: "Build benchmarks" - run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark + run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark - name: "Run benchmarks" - uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 + uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3 env: # enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't # appear to provide much useful insight for our walltime benchmarks right now diff --git a/.github/workflows/daily_fuzz.yaml b/.github/workflows/daily_fuzz.yaml index c299e00fda..171f0c481a 100644 --- a/.github/workflows/daily_fuzz.yaml +++ b/.github/workflows/daily_fuzz.yaml @@ -34,7 +34,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show - name: "Install mold" diff --git a/.github/workflows/mypy_primer.yaml b/.github/workflows/mypy_primer.yaml index 672a038537..4e292823e4 100644 --- a/.github/workflows/mypy_primer.yaml +++ b/.github/workflows/mypy_primer.yaml @@ -43,7 +43,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: @@ -59,20 +59,15 @@ jobs: run: | cd ruff scripts/mypy_primer.sh - echo ${{ github.event.number }} > ../pr-number + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - name: Upload diff uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: mypy_primer_diff path: mypy_primer.diff - - name: Upload pr-number - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: pr-number - path: pr-number - memory_usage: name: Run memory statistics runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }} @@ -85,7 +80,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: diff --git a/.github/workflows/mypy_primer_comment.yaml b/.github/workflows/mypy_primer_comment.yaml deleted file mode 100644 index 895956e766..0000000000 --- a/.github/workflows/mypy_primer_comment.yaml +++ /dev/null @@ -1,122 +0,0 @@ -name: PR comment (mypy_primer) - -on: # zizmor: ignore[dangerous-triggers] - workflow_run: - workflows: [Run mypy_primer] - types: [completed] - workflow_dispatch: - inputs: - workflow_run_id: - description: The mypy_primer workflow that triggers the workflow run - required: true - -jobs: - comment: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - steps: - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download PR number - with: - name: pr-number - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - name: Parse pull request number - id: pr-number - run: | - if [[ -f pr-number ]] - then - echo "pr-number=$(> "$GITHUB_OUTPUT" - fi - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download mypy_primer results" - id: download-mypy_primer_diff - if: steps.pr-number.outputs.pr-number - with: - name: mypy_primer_diff - workflow: mypy_primer.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/mypy_primer_diff - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download mypy_primer memory results" - id: download-mypy_primer_memory_diff - if: steps.pr-number.outputs.pr-number - with: - name: mypy_primer_memory_diff - workflow: mypy_primer.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/mypy_primer_memory_diff - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - name: Generate comment content - id: generate-comment - if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }} - run: | - # Guard against malicious mypy_primer results that symlink to a secret - # file on this runner - if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]] - then - echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink" - exit 1 - fi - - # Note this identifier is used to find the comment to update on - # subsequent runs - echo '' >> comment.txt - - echo '## `mypy_primer` results' >> comment.txt - if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then - echo '
' >> comment.txt - echo 'Changes were detected when running on open source projects' >> comment.txt - echo '' >> comment.txt - echo '```diff' >> comment.txt - cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt - echo '```' >> comment.txt - echo '
' >> comment.txt - else - echo 'No ecosystem changes detected ✅' >> comment.txt - fi - - if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then - echo '
' >> comment.txt - echo 'Memory usage changes were detected when running on open source projects' >> comment.txt - echo '' >> comment.txt - echo '```diff' >> comment.txt - cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt - echo '```' >> comment.txt - echo '
' >> comment.txt - else - echo 'No memory usage changes detected ✅' >> comment.txt - fi - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.txt >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.txt - edit-mode: replace diff --git a/.github/workflows/pr-comment.yaml b/.github/workflows/pr-comment.yaml deleted file mode 100644 index 0ef00644c1..0000000000 --- a/.github/workflows/pr-comment.yaml +++ /dev/null @@ -1,88 +0,0 @@ -name: Ecosystem check comment - -on: - workflow_run: - workflows: [CI] - types: [completed] - workflow_dispatch: - inputs: - workflow_run_id: - description: The ecosystem workflow that triggers the workflow run - required: true - -jobs: - comment: - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download pull request number - with: - name: pr-number - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - name: Parse pull request number - id: pr-number - run: | - if [[ -f pr-number ]] - then - echo "pr-number=$(> "$GITHUB_OUTPUT" - fi - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download ecosystem results" - id: download-ecosystem-result - if: steps.pr-number.outputs.pr-number - with: - name: ecosystem-result - workflow: ci.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/ecosystem - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - name: Generate comment content - id: generate-comment - if: steps.download-ecosystem-result.outputs.found_artifact == 'true' - run: | - # Guard against malicious ecosystem results that symlink to a secret - # file on this runner - if [[ -L pr/ecosystem/ecosystem-result ]] - then - echo "Error: ecosystem-result cannot be a symlink" - exit 1 - fi - - # Note this identifier is used to find the comment to update on - # subsequent runs - echo '' >> comment.txt - - echo '## `ruff-ecosystem` results' >> comment.txt - cat pr/ecosystem/ecosystem-result >> comment.txt - echo "" >> comment.txt - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.txt >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.txt - edit-mode: replace diff --git a/.github/workflows/publish-playground.yml b/.github/workflows/publish-playground.yml index 24bf4b4fef..8986a6d130 100644 --- a/.github/workflows/publish-playground.yml +++ b/.github/workflows/publish-playground.yml @@ -31,7 +31,7 @@ jobs: persist-credentials: false - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 package-manager-cache: false diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index e5473f80a3..5bfeee7f5b 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -22,7 +22,7 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: pattern: wheels-* diff --git a/.github/workflows/publish-ty-playground.yml b/.github/workflows/publish-ty-playground.yml index f28086517c..a745e80794 100644 --- a/.github/workflows/publish-ty-playground.yml +++ b/.github/workflows/publish-ty-playground.yml @@ -35,7 +35,7 @@ jobs: persist-credentials: false - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 package-manager-cache: false diff --git a/.github/workflows/publish-wasm.yml b/.github/workflows/publish-wasm.yml index a51c888286..a0c6226406 100644 --- a/.github/workflows/publish-wasm.yml +++ b/.github/workflows/publish-wasm.yml @@ -45,7 +45,7 @@ jobs: jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json mv /tmp/package.json crates/ruff_wasm/pkg - run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 registry-url: "https://registry.npmjs.org" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e2a385715f..10730750a5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -68,7 +68,7 @@ jobs: # we specify bash to get pipefail; it guards against the `curl` command # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh" + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh" - name: Cache dist uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: @@ -166,8 +166,8 @@ jobs: - custom-build-binaries - custom-build-docker - build-global-artifacts - # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) - if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }} + # Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine) + if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} runs-on: "depot-ubuntu-latest-4" diff --git a/.github/workflows/sync_typeshed.yaml b/.github/workflows/sync_typeshed.yaml index f7bb4c5426..cadb98cc0b 100644 --- a/.github/workflows/sync_typeshed.yaml +++ b/.github/workflows/sync_typeshed.yaml @@ -77,7 +77,7 @@ jobs: run: | git config --global user.name typeshedbot git config --global user.email '<>' - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: Sync typeshed stubs run: | rm -rf "ruff/${VENDORED_TYPESHED}" @@ -131,7 +131,7 @@ jobs: with: persist-credentials: true ref: ${{ env.UPSTREAM_BRANCH}} - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: Setup git run: | git config --global user.name typeshedbot @@ -170,7 +170,7 @@ jobs: with: persist-credentials: true ref: ${{ env.UPSTREAM_BRANCH}} - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: Setup git run: | git config --global user.name typeshedbot @@ -207,12 +207,12 @@ jobs: uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" if: ${{ success() }} - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install cargo insta" if: ${{ success() }} - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-insta - name: Update snapshots diff --git a/.github/workflows/ty-ecosystem-analyzer.yaml b/.github/workflows/ty-ecosystem-analyzer.yaml index a59cc6c947..417cf7d1f5 100644 --- a/.github/workflows/ty-ecosystem-analyzer.yaml +++ b/.github/workflows/ty-ecosystem-analyzer.yaml @@ -33,7 +33,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact @@ -112,8 +112,6 @@ jobs: cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY" - echo ${{ github.event.number }} > pr-number - - name: "Deploy to Cloudflare Pages" if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }} id: deploy @@ -131,18 +129,14 @@ jobs: echo >> comment.md echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - name: Upload comment uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: comment.md path: comment.md - - name: Upload pr-number - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: pr-number - path: pr-number - - name: Upload diagnostics diff uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: diff --git a/.github/workflows/ty-ecosystem-analyzer_comment.yaml b/.github/workflows/ty-ecosystem-analyzer_comment.yaml deleted file mode 100644 index f237f45e1e..0000000000 --- a/.github/workflows/ty-ecosystem-analyzer_comment.yaml +++ /dev/null @@ -1,85 +0,0 @@ -name: PR comment (ty ecosystem-analyzer) - -on: # zizmor: ignore[dangerous-triggers] - workflow_run: - workflows: [ty ecosystem-analyzer] - types: [completed] - workflow_dispatch: - inputs: - workflow_run_id: - description: The ty ecosystem-analyzer workflow that triggers the workflow run - required: true - -jobs: - comment: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - steps: - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download PR number - with: - name: pr-number - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - name: Parse pull request number - id: pr-number - run: | - if [[ -f pr-number ]] - then - echo "pr-number=$(> "$GITHUB_OUTPUT" - fi - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download comment.md" - id: download-comment - if: steps.pr-number.outputs.pr-number - with: - name: comment.md - workflow: ty-ecosystem-analyzer.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/comment - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - name: Generate comment content - id: generate-comment - if: ${{ steps.download-comment.outputs.found_artifact == 'true' }} - run: | - # Guard against malicious ty ecosystem-analyzer results that symlink to a secret - # file on this runner - if [[ -L pr/comment/comment.md ]] - then - echo "Error: comment.md cannot be a symlink" - exit 1 - fi - - # Note: this identifier is used to find the comment to update on subsequent runs - echo '' > comment.md - echo >> comment.md - cat pr/comment/comment.md >> comment.md - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.md >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.md - edit-mode: replace diff --git a/.github/workflows/ty-ecosystem-report.yaml b/.github/workflows/ty-ecosystem-report.yaml index 30b3bc93ab..2078478505 100644 --- a/.github/workflows/ty-ecosystem-report.yaml +++ b/.github/workflows/ty-ecosystem-report.yaml @@ -29,7 +29,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact diff --git a/.github/workflows/typing_conformance.yaml b/.github/workflows/typing_conformance.yaml index ed23e6c084..aefe0b6c40 100644 --- a/.github/workflows/typing_conformance.yaml +++ b/.github/workflows/typing_conformance.yaml @@ -24,7 +24,7 @@ env: CARGO_TERM_COLOR: always RUSTUP_MAX_RETRIES: 10 RUST_BACKTRACE: 1 - CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc + CONFORMANCE_SUITE_COMMIT: 9f6d8ced7cd1c8d92687a4e9c96d7716452e471e jobs: typing_conformance: @@ -94,21 +94,18 @@ jobs: touch typing_conformance_diagnostics.diff fi - echo ${{ github.event.number }} > pr-number echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - name: Upload diff uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: typing_conformance_diagnostics_diff path: typing_conformance_diagnostics.diff - - name: Upload pr-number - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: pr-number - path: pr-number - + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - name: Upload conformance suite commit uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: diff --git a/.github/workflows/typing_conformance_comment.yaml b/.github/workflows/typing_conformance_comment.yaml deleted file mode 100644 index f596507448..0000000000 --- a/.github/workflows/typing_conformance_comment.yaml +++ /dev/null @@ -1,112 +0,0 @@ -name: PR comment (typing_conformance) - -on: # zizmor: ignore[dangerous-triggers] - workflow_run: - workflows: [Run typing conformance] - types: [completed] - workflow_dispatch: - inputs: - workflow_run_id: - description: The typing_conformance workflow that triggers the workflow run - required: true - -jobs: - comment: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - steps: - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download PR number - with: - name: pr-number - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - name: Parse pull request number - id: pr-number - run: | - if [[ -f pr-number ]] - then - echo "pr-number=$(> "$GITHUB_OUTPUT" - fi - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download typing conformance suite commit - with: - name: conformance-suite-commit - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download typing_conformance results" - id: download-typing_conformance_diff - if: steps.pr-number.outputs.pr-number - with: - name: typing_conformance_diagnostics_diff - workflow: typing_conformance.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/typing_conformance_diagnostics_diff - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - name: Generate comment content - id: generate-comment - if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }} - run: | - # Guard against malicious typing_conformance results that symlink to a secret - # file on this runner - if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]] - then - echo "Error: typing_conformance_diagnostics.diff cannot be a symlink" - exit 1 - fi - - # Note this identifier is used to find the comment to update on - # subsequent runs - echo '' >> comment.txt - - if [[ -f conformance-suite-commit ]] - then - echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(> comment.txt - else - echo "conformance-suite-commit file not found" - echo "## Diagnostic diff on typing conformance tests" >> comment.txt - fi - - if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then - echo '
' >> comment.txt - echo 'Changes were detected when running ty on typing conformance tests' >> comment.txt - echo '' >> comment.txt - echo '```diff' >> comment.txt - cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt - echo '```' >> comment.txt - echo '
' >> comment.txt - else - echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt - fi - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.txt >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.txt - edit-mode: replace diff --git a/.github/zizmor.yml b/.github/zizmor.yml index 237af95e7b..28ec2a61ef 100644 --- a/.github/zizmor.yml +++ b/.github/zizmor.yml @@ -3,9 +3,6 @@ # # TODO: can we remove the ignores here so that our workflows are more secure? rules: - dangerous-triggers: - ignore: - - pr-comment.yaml cache-poisoning: ignore: - build-docker.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 4689757c34..07e8d25786 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,105 @@ # Changelog +## 0.14.4 + +Released on 2025-11-06. + +### Preview features + +- [formatter] Allow newlines after function headers without docstrings ([#21110](https://github.com/astral-sh/ruff/pull/21110)) +- [formatter] Avoid extra parentheses for long `match` patterns with `as` captures ([#21176](https://github.com/astral-sh/ruff/pull/21176)) +- \[`refurb`\] Expand fix safety for keyword arguments and `Decimal`s (`FURB164`) ([#21259](https://github.com/astral-sh/ruff/pull/21259)) +- \[`refurb`\] Preserve argument ordering in autofix (`FURB103`) ([#20790](https://github.com/astral-sh/ruff/pull/20790)) + +### Bug fixes + +- [server] Fix missing diagnostics for notebooks ([#21156](https://github.com/astral-sh/ruff/pull/21156)) +- \[`flake8-bugbear`\] Ignore non-NFKC attribute names in `B009` and `B010` ([#21131](https://github.com/astral-sh/ruff/pull/21131)) +- \[`refurb`\] Fix false negative for underscores before sign in `Decimal` constructor (`FURB157`) ([#21190](https://github.com/astral-sh/ruff/pull/21190)) +- \[`ruff`\] Fix false positives on starred arguments (`RUF057`) ([#21256](https://github.com/astral-sh/ruff/pull/21256)) + +### Rule changes + +- \[`airflow`\] extend deprecated argument `concurrency` in `airflow..DAG` (`AIR301`) ([#21220](https://github.com/astral-sh/ruff/pull/21220)) + +### Documentation + +- Improve `extend` docs ([#21135](https://github.com/astral-sh/ruff/pull/21135)) +- \[`flake8-comprehensions`\] Fix typo in `C416` documentation ([#21184](https://github.com/astral-sh/ruff/pull/21184)) +- Revise Ruff setup instructions for Zed editor ([#20935](https://github.com/astral-sh/ruff/pull/20935)) + +### Other changes + +- Make `ruff analyze graph` work with jupyter notebooks ([#21161](https://github.com/astral-sh/ruff/pull/21161)) + +### Contributors + +- [@chirizxc](https://github.com/chirizxc) +- [@Lee-W](https://github.com/Lee-W) +- [@musicinmybrain](https://github.com/musicinmybrain) +- [@MichaReiser](https://github.com/MichaReiser) +- [@tjkuson](https://github.com/tjkuson) +- [@danparizher](https://github.com/danparizher) +- [@renovate](https://github.com/renovate) +- [@ntBre](https://github.com/ntBre) +- [@gauthsvenkat](https://github.com/gauthsvenkat) +- [@LoicRiegel](https://github.com/LoicRiegel) + +## 0.14.3 + +Released on 2025-10-30. + +### Preview features + +- Respect `--output-format` with `--watch` ([#21097](https://github.com/astral-sh/ruff/pull/21097)) +- \[`pydoclint`\] Fix false positive on explicit exception re-raising (`DOC501`, `DOC502`) ([#21011](https://github.com/astral-sh/ruff/pull/21011)) +- \[`pyflakes`\] Revert to stable behavior if imports for module lie in alternate branches for `F401` ([#20878](https://github.com/astral-sh/ruff/pull/20878)) +- \[`pylint`\] Implement `stop-iteration-return` (`PLR1708`) ([#20733](https://github.com/astral-sh/ruff/pull/20733)) +- \[`ruff`\] Add support for additional eager conversion patterns (`RUF065`) ([#20657](https://github.com/astral-sh/ruff/pull/20657)) + +### Bug fixes + +- Fix finding keyword range for clause header after statement ending with semicolon ([#21067](https://github.com/astral-sh/ruff/pull/21067)) +- Fix syntax error false positive on nested alternative patterns ([#21104](https://github.com/astral-sh/ruff/pull/21104)) +- \[`ISC001`\] Fix panic when string literals are unclosed ([#21034](https://github.com/astral-sh/ruff/pull/21034)) +- \[`flake8-django`\] Apply `DJ001` to annotated fields ([#20907](https://github.com/astral-sh/ruff/pull/20907)) +- \[`flake8-pyi`\] Fix `PYI034` to not trigger on metaclasses (`PYI034`) ([#20881](https://github.com/astral-sh/ruff/pull/20881)) +- \[`flake8-type-checking`\] Fix `TC003` false positive with `future-annotations` ([#21125](https://github.com/astral-sh/ruff/pull/21125)) +- \[`pyflakes`\] Fix false positive for `__class__` in lambda expressions within class definitions (`F821`) ([#20564](https://github.com/astral-sh/ruff/pull/20564)) +- \[`pyupgrade`\] Fix false positive for `TypeVar` with default on Python \<3.13 (`UP046`,`UP047`) ([#21045](https://github.com/astral-sh/ruff/pull/21045)) + +### Rule changes + +- Add missing docstring sections to the numpy list ([#20931](https://github.com/astral-sh/ruff/pull/20931)) +- \[`airflow`\] Extend `airflow.models..Param` check (`AIR311`) ([#21043](https://github.com/astral-sh/ruff/pull/21043)) +- \[`airflow`\] Warn that `airflow....DAG.create_dagrun` has been removed (`AIR301`) ([#21093](https://github.com/astral-sh/ruff/pull/21093)) +- \[`refurb`\] Preserve digit separators in `Decimal` constructor (`FURB157`) ([#20588](https://github.com/astral-sh/ruff/pull/20588)) + +### Server + +- Avoid sending an unnecessary "clear diagnostics" message for clients supporting pull diagnostics ([#21105](https://github.com/astral-sh/ruff/pull/21105)) + +### Documentation + +- \[`flake8-bandit`\] Fix correct example for `S308` ([#21128](https://github.com/astral-sh/ruff/pull/21128)) + +### Other changes + +- Clearer error message when `line-length` goes beyond threshold ([#21072](https://github.com/astral-sh/ruff/pull/21072)) + +### Contributors + +- [@danparizher](https://github.com/danparizher) +- [@jvacek](https://github.com/jvacek) +- [@ntBre](https://github.com/ntBre) +- [@augustelalande](https://github.com/augustelalande) +- [@prakhar1144](https://github.com/prakhar1144) +- [@TaKO8Ki](https://github.com/TaKO8Ki) +- [@dylwil3](https://github.com/dylwil3) +- [@fatelei](https://github.com/fatelei) +- [@ShaharNaveh](https://github.com/ShaharNaveh) +- [@Lee-W](https://github.com/Lee-W) + ## 0.14.2 Released on 2025-10-23. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6c9a99aed0..bb6758451f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -280,6 +280,55 @@ Note that plugin-specific configuration options are defined in their own modules Finally, regenerate the documentation and generated code with `cargo dev generate-all`. +### Opening a PR + +After you finish your changes, the next step is to open a PR. By default, two +sections will be filled into the PR body: the summary and the test plan. + +#### The summary + +The summary is intended to give us as maintainers information about your PR. +This should typically include a link to the relevant issue(s) you're addressing +in your PR, as well as a summary of the issue and your approach to fixing it. If +you have any questions about your approach or design, or if you considered +alternative approaches, that can also be helpful to include. + +AI can be helpful in generating both the code and summary of your PR, but a +successful contribution should still be carefully reviewed by you and the +summary editorialized before submitting a PR. A great summary is thorough but +also succinct and gives us the context we need to review your PR. + +You can find examples of excellent issues and PRs by searching for the +[`great writeup`](https://github.com/astral-sh/ruff/issues?q=label%3A%22great%20writeup%22) +label. + +#### The test plan + +The test plan is likely to be shorter than the summary and can be as simple as +"Added new snapshot tests for `RUF123`," at least for rule bugs. For LSP or some +types of CLI changes, in particular, it can also be helpful to include +screenshots or recordings of your change in action. + +#### Ecosystem report + +After opening the PR, an ecosystem report will be run as part of CI. This shows +a diff of linter and formatter behavior before and after the changes in your PR. +Going through these changes and reporting your findings in the PR summary or an +additional comment help us to review your PR more efficiently. It's also a great +way to find new test cases to incorporate into your PR if you identify any +issues. + +#### PR status + +To help us know when your PR is ready for review again, please either move your +PR back to a draft while working on it (marking it ready for review afterwards +will ping the previous reviewers) or explicitly re-request a review. This helps +us to avoid re-reviewing a PR while you're still working on it and also to +prioritize PRs that are definitely ready for review. + +You can also thumbs-up or mark as resolved any comments we leave to let us know +you addressed them. + ## MkDocs > [!NOTE] diff --git a/Cargo.lock b/Cargo.lock index 070f471e4e..d9f831125d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,9 +10,9 @@ checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] @@ -45,7 +45,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" dependencies = [ "anstyle", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -106,7 +106,7 @@ dependencies = [ "anstyle-lossy", "anstyle-parse", "html-escape", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -240,10 +240,10 @@ version = "0.72.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "cexpr", "clang-sys", - "itertools 0.10.5", + "itertools 0.13.0", "log", "prettyplease", "proc-macro2", @@ -262,9 +262,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.4" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "bitvec" @@ -287,6 +287,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "block2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" +dependencies = [ + "objc2", +] + [[package]] name = "boxcar" version = "0.2.14" @@ -433,9 +442,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.50" +version = "4.5.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2cfd7bf8a6017ddaa4e32ffe7403d547790db06bd171c1c53926faab501623" +checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5" dependencies = [ "clap_builder", "clap_derive", @@ -443,9 +452,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.50" +version = "4.5.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4c05b9e80c5ccd3a7ef080ad7b6ba7d6fc00a985b8b157197075677c82c7a0" +checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a" dependencies = [ "anstream", "anstyle", @@ -510,7 +519,7 @@ checksum = "85a8ab73a1c02b0c15597b22e09c7dc36e63b2f601f9d1e83ac0c3decd38b1ae" dependencies = [ "nix 0.29.0", "terminfo", - "thiserror 2.0.16", + "thiserror 2.0.17", "which", "windows-sys 0.59.0", ] @@ -687,7 +696,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width 0.2.1", + "unicode-width", "windows-sys 0.61.0", ] @@ -856,14 +865,14 @@ dependencies = [ [[package]] name = "csv" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" +checksum = "52cd9d68cf7efc6ddfaaee42e7288d3a99d613d4b50f76ce9827ae0c6e14f938" dependencies = [ "csv-core", "itoa", "ryu", - "serde", + "serde_core", ] [[package]] @@ -877,11 +886,11 @@ dependencies = [ [[package]] name = "ctrlc" -version = "3.5.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "881c5d0a13b2f1498e2306e82cbada78390e152d4b1378fb28a84f4dcd0dc4f3" +checksum = "73736a89c4aff73035ba2ed2e565061954da00d4970fc9ac25dcc85a2a20d790" dependencies = [ - "dispatch", + "dispatch2", "nix 0.30.1", "windows-sys 0.61.0", ] @@ -1011,10 +1020,16 @@ dependencies = [ ] [[package]] -name = "dispatch" -version = "0.2.0" +name = "dispatch2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" +checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +dependencies = [ + "bitflags 2.10.0", + "block2", + "libc", + "objc2", +] [[package]] name = "displaydoc" @@ -1115,13 +1130,12 @@ dependencies = [ [[package]] name = "etcetera" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c7b13d0780cb82722fd59f6f57f925e143427e4a75313a6c77243bf5326ae6" +checksum = "de48cc4d1c1d97a20fd819def54b890cadde72ed3ad0c614822a0a433361be96" dependencies = [ "cfg-if", - "home", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -1251,7 +1265,7 @@ version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" dependencies = [ - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -1287,9 +1301,9 @@ checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "globset" -version = "0.4.17" +version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab69130804d941f8075cfd713bf8848a2c3b3f201a9457a11e6f87e1ab62305" +checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3" dependencies = [ "aho-corasick", "bstr", @@ -1304,7 +1318,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "ignore", "walkdir", ] @@ -1366,15 +1380,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" -[[package]] -name = "home" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" -dependencies = [ - "windows-sys 0.59.0", -] - [[package]] name = "html-escape" version = "0.2.13" @@ -1523,9 +1528,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.24" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81776e6f9464432afcc28d03e52eb101c93b6f0566f52aef2427663e700f0403" +checksum = "d3d782a365a015e0f5c04902246139249abf769125006fbe7649e2ee88169b4a" dependencies = [ "crossbeam-deque", "globset", @@ -1558,25 +1563,25 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.11.4" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" dependencies = [ "equivalent", - "hashbrown 0.15.5", + "hashbrown 0.16.0", "serde", "serde_core", ] [[package]] name = "indicatif" -version = "0.18.0" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd" +checksum = "ade6dfcba0dfb62ad59e59e7241ec8912af34fd29e0e743e3db992bd278e8b65" dependencies = [ "console 0.16.1", "portable-atomic", - "unicode-width 0.2.1", + "unicode-width", "unit-prefix", "vt100", "web-time", @@ -1584,9 +1589,12 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.6" +version = "2.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" +checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" +dependencies = [ + "rustversion", +] [[package]] name = "inotify" @@ -1594,7 +1602,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "inotify-sys", "libc", ] @@ -1744,24 +1752,24 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jiff" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49" +checksum = "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35" dependencies = [ "jiff-static", "jiff-tzdb-platform", "log", "portable-atomic", "portable-atomic-util", - "serde", + "serde_core", "windows-sys 0.52.0", ] [[package]] name = "jiff-static" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" +checksum = "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69" dependencies = [ "proc-macro2", "quote", @@ -1801,9 +1809,9 @@ checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24" [[package]] name = "js-sys" -version = "0.3.80" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852f13bec5eba4ba9afbeb93fd7c13fe56147f055939ae21c43a29a0ecb2702e" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" dependencies = [ "once_cell", "wasm-bindgen", @@ -1843,9 +1851,9 @@ checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "libcst" -version = "1.8.5" +version = "1.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d56bcd52d9b5e5f43e7fba20eb1f423ccb18c84cdf1cb506b8c1b95776b0b49" +checksum = "6aea7143e4a0ed59b87a1ee71e198500889f8b005311136be15e84c97a6fcd8d" dependencies = [ "annotate-snippets", "libcst_derive", @@ -1853,14 +1861,14 @@ dependencies = [ "paste", "peg", "regex", - "thiserror 2.0.16", + "thiserror 2.0.17", ] [[package]] name = "libcst_derive" -version = "1.8.5" +version = "1.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fcf5a725c4db703660124fe0edb98285f1605d0b87b7ee8684b699764a4f01a" +checksum = "0903173ea316c34a44d0497161e04d9210af44f5f5e89bf2f55d9a254c9a0e8d" dependencies = [ "quote", "syn", @@ -1892,7 +1900,7 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "libc", "redox_syscall", ] @@ -2011,9 +2019,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "matchit" -version = "0.8.6" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f926ade0c4e170215ae43342bf13b9310a437609c81f29f86c5df6657582ef9" +checksum = "9ea5f97102eb9e54ab99fb70bb175589073f554bdadfb74d9bd656482ea73e2a" [[package]] name = "memchr" @@ -2097,7 +2105,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "cfg-if", "cfg_aliases", "libc", @@ -2109,7 +2117,7 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "cfg-if", "cfg_aliases", "libc", @@ -2137,7 +2145,7 @@ version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "fsevent-sys", "inotify", "kqueue", @@ -2183,6 +2191,21 @@ dependencies = [ "libc", ] +[[package]] +name = "objc2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" +dependencies = [ + "objc2-encode", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + [[package]] name = "once_cell" version = "1.21.3" @@ -2331,7 +2354,7 @@ checksum = "31095ca1f396e3de32745f42b20deef7bc09077f918b085307e8eab6ddd8fb9c" dependencies = [ "once_cell", "serde", - "unicode-width 0.2.1", + "unicode-width", "unscanny", "version-ranges", ] @@ -2352,7 +2375,7 @@ dependencies = [ "serde", "smallvec", "thiserror 1.0.69", - "unicode-width 0.2.1", + "unicode-width", "url", "urlencoding", "version-ranges", @@ -2371,7 +2394,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21e0a3a33733faeaf8651dfee72dd0f388f0c8e5ad496a3478fa5a922f49cfa8" dependencies = [ "memchr", - "thiserror 2.0.16", + "thiserror 2.0.17", "ucd-trie", ] @@ -2560,9 +2583,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.101" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] @@ -2577,7 +2600,7 @@ dependencies = [ "pep440_rs", "pep508_rs", "serde", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", ] @@ -2592,7 +2615,7 @@ dependencies = [ "newtype-uuid", "quick-xml", "strip-ansi-escapes", - "thiserror 2.0.16", + "thiserror 2.0.17", "uuid", ] @@ -2627,9 +2650,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.41" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] @@ -2753,7 +2776,7 @@ version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", ] [[package]] @@ -2764,7 +2787,7 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ "getrandom 0.2.16", "libredox", - "thiserror 2.0.16", + "thiserror 2.0.17", ] [[package]] @@ -2789,9 +2812,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.3" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -2835,13 +2858,13 @@ dependencies = [ [[package]] name = "ruff" -version = "0.14.2" +version = "0.14.4" dependencies = [ "anyhow", "argfile", "assert_fs", "bincode", - "bitflags 2.9.4", + "bitflags 2.10.0", "cachedir", "clap", "clap_complete_command", @@ -2886,7 +2909,7 @@ dependencies = [ "strum", "tempfile", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "tikv-jemallocator", "toml", "tracing", @@ -2906,7 +2929,7 @@ dependencies = [ "snapbox", "toml", "tryfn", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -2982,7 +3005,7 @@ dependencies = [ "serde_json", "similar", "tempfile", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", "tracing-subscriber", "ty_static", @@ -3057,7 +3080,7 @@ dependencies = [ "serde", "static_assertions", "tracing", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -3092,11 +3115,11 @@ dependencies = [ [[package]] name = "ruff_linter" -version = "0.14.2" +version = "0.14.4" dependencies = [ "aho-corasick", "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "clap", "colored 3.0.0", "fern", @@ -3143,11 +3166,11 @@ dependencies = [ "strum_macros", "tempfile", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "typed-arena", "unicode-normalization", - "unicode-width 0.2.1", + "unicode-width", "unicode_names2", "url", ] @@ -3186,7 +3209,7 @@ dependencies = [ "serde_json", "serde_with", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "uuid", ] @@ -3202,7 +3225,7 @@ name = "ruff_python_ast" version = "0.0.0" dependencies = [ "aho-corasick", - "bitflags 2.9.4", + "bitflags 2.10.0", "compact_str", "get-size2", "is-macro", @@ -3218,7 +3241,7 @@ dependencies = [ "schemars", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.17", ] [[package]] @@ -3272,7 +3295,7 @@ dependencies = [ "similar", "smallvec", "static_assertions", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", ] @@ -3306,7 +3329,7 @@ dependencies = [ name = "ruff_python_literal" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "itertools 0.14.0", "ruff_python_ast", "unic-ucd-category", @@ -3317,7 +3340,7 @@ name = "ruff_python_parser" version = "0.0.0" dependencies = [ "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "bstr", "compact_str", "get-size2", @@ -3342,7 +3365,7 @@ dependencies = [ name = "ruff_python_semantic" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "insta", "is-macro", "ruff_cache", @@ -3363,7 +3386,7 @@ dependencies = [ name = "ruff_python_stdlib" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "unicode-ident", ] @@ -3417,7 +3440,7 @@ dependencies = [ "serde", "serde_json", "shellexpand", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "tracing", "tracing-log", @@ -3447,7 +3470,7 @@ dependencies = [ [[package]] name = "ruff_wasm" -version = "0.14.2" +version = "0.14.4" dependencies = [ "console_error_panic_hook", "console_log", @@ -3541,7 +3564,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "errno", "libc", "linux-raw-sys", @@ -3563,7 +3586,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "salsa" version = "0.24.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=cdd0b85516a52c18b8a6d17a2279a96ed6c3e198#cdd0b85516a52c18b8a6d17a2279a96ed6c3e198" +source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09" dependencies = [ "boxcar", "compact_str", @@ -3587,12 +3610,12 @@ dependencies = [ [[package]] name = "salsa-macro-rules" version = "0.24.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=cdd0b85516a52c18b8a6d17a2279a96ed6c3e198#cdd0b85516a52c18b8a6d17a2279a96ed6c3e198" +source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09" [[package]] name = "salsa-macros" version = "0.24.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=cdd0b85516a52c18b8a6d17a2279a96ed6c3e198#cdd0b85516a52c18b8a6d17a2279a96ed6c3e198" +source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09" dependencies = [ "proc-macro2", "quote", @@ -3611,9 +3634,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +checksum = "1317c3bf3e7df961da95b0a56a172a02abead31276215a0497241a7624b487ce" dependencies = [ "dyn-clone", "ref-cast", @@ -3624,9 +3647,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" +checksum = "5f760a6150d45dd66ec044983c124595ae76912e77ed0b44124cb3e415cce5d9" dependencies = [ "proc-macro2", "quote", @@ -3724,9 +3747,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5417783452c2be558477e104686f7de5dae53dba813c28435e0e70f82d9b04ee" +checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" dependencies = [ "serde_core", ] @@ -3742,20 +3765,19 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.14.1" +version = "3.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c522100790450cf78eeac1507263d0a350d4d5b30df0c8e1fe051a10c22b376e" +checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" dependencies = [ - "serde", - "serde_derive", + "serde_core", "serde_with_macros", ] [[package]] name = "serde_with_macros" -version = "3.14.1" +version = "3.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327ada00f7d64abaac1e55a6911e90cf665aa051b9a561c7006c157f4633135e" +checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" dependencies = [ "darling", "proc-macro2", @@ -3818,9 +3840,9 @@ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "snapbox" -version = "0.6.22" +version = "0.6.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "805d09a74586d9b17061e5be6ee5f8cc37e5982c349948114ffc5f68093fe5ec" +checksum = "96fa1ce81be900d083b30ec2d481e6658c2acfaa2cfc7be45ccc2cc1b820edb3" dependencies = [ "anstream", "anstyle", @@ -3838,9 +3860,9 @@ dependencies = [ [[package]] name = "snapbox-macros" -version = "0.3.10" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af" +checksum = "3b750c344002d7cc69afb9da00ebd9b5c0f8ac2eb7d115d9d45d5b5f47718d74" dependencies = [ "anstream", ] @@ -3905,9 +3927,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" dependencies = [ "proc-macro2", "quote", @@ -3933,9 +3955,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.22.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.4", @@ -4031,11 +4053,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.16" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.16", + "thiserror-impl 2.0.17", ] [[package]] @@ -4051,9 +4073,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.16" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", @@ -4080,9 +4102,9 @@ dependencies = [ [[package]] name = "tikv-jemalloc-sys" -version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d" +checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" dependencies = [ "cc", "libc", @@ -4090,9 +4112,9 @@ dependencies = [ [[package]] name = "tikv-jemallocator" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865" +checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" dependencies = [ "libc", "tikv-jemalloc-sys", @@ -4135,9 +4157,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml" -version = "0.9.7" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00e5e5d9bf2475ac9d4f0d9edab68cc573dc2fd644b0dba36b0c30a92dd9eaa0" +checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" dependencies = [ "indexmap", "serde_core", @@ -4150,9 +4172,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" dependencies = [ "serde_core", ] @@ -4171,18 +4193,18 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" dependencies = [ "winnow", ] [[package]] name = "toml_writer" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109" +checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2" [[package]] name = "tracing" @@ -4352,7 +4374,7 @@ dependencies = [ name = "ty_ide" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "camino", "get-size2", "insta", @@ -4409,7 +4431,7 @@ dependencies = [ "schemars", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "tracing", "ty_combine", @@ -4423,7 +4445,7 @@ name = "ty_python_semantic" version = "0.0.0" dependencies = [ "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "bitvec", "camino", "colored 3.0.0", @@ -4466,7 +4488,7 @@ dependencies = [ "strum_macros", "tempfile", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", "ty_python_semantic", "ty_static", @@ -4479,7 +4501,7 @@ name = "ty_server" version = "0.0.0" dependencies = [ "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "crossbeam", "dunce", "insta", @@ -4500,7 +4522,7 @@ dependencies = [ "serde_json", "shellexpand", "tempfile", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", "tracing-subscriber", "ty_combine", @@ -4540,7 +4562,7 @@ dependencies = [ "serde", "smallvec", "tempfile", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "tracing", "ty_python_semantic", @@ -4651,9 +4673,9 @@ checksum = "70ba288e709927c043cbe476718d37be306be53fb1fafecd0dbe36d072be2580" [[package]] name = "unicode-ident" -version = "1.0.19" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "unicode-normalization" @@ -4666,15 +4688,9 @@ dependencies = [ [[package]] name = "unicode-width" -version = "0.1.14" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" - -[[package]] -name = "unicode-width" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" +checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" [[package]] name = "unicode_names2" @@ -4805,25 +4821,13 @@ checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" [[package]] name = "vt100" -version = "0.15.2" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de" +checksum = "054ff75fb8fa83e609e685106df4faeffdf3a735d3c74ebce97ec557d5d36fd9" dependencies = [ "itoa", - "log", - "unicode-width 0.1.14", - "vte 0.11.1", -] - -[[package]] -name = "vte" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197" -dependencies = [ - "arrayvec", - "utf8parse", - "vte_generate_state_changes", + "unicode-width", + "vte 0.15.0", ] [[package]] @@ -4836,13 +4840,13 @@ dependencies = [ ] [[package]] -name = "vte_generate_state_changes" -version = "0.1.2" +name = "vte" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e369bee1b05d510a7b4ed645f5faa90619e05437111783ea5848f28d97d3c2e" +checksum = "a5924018406ce0063cd67f8e008104968b74b563ee1b85dde3ed1f7cb87d3dbd" dependencies = [ - "proc-macro2", - "quote", + "arrayvec", + "memchr", ] [[package]] @@ -4881,9 +4885,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab10a69fbd0a177f5f649ad4d8d3305499c42bab9aef2f7ff592d0ec8f833819" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" dependencies = [ "cfg-if", "once_cell", @@ -4892,25 +4896,11 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.103" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb702423545a6007bbc368fde243ba47ca275e549c8a28617f56f6ba53b1d1c" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - [[package]] name = "wasm-bindgen-futures" -version = "0.4.53" +version = "0.4.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0b221ff421256839509adbb55998214a70d829d3a28c69b4a6672e9d2a42f67" +checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" dependencies = [ "cfg-if", "js-sys", @@ -4921,9 +4911,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc65f4f411d91494355917b605e1480033152658d71f722a90647f56a70c88a0" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4931,31 +4921,31 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc003a991398a8ee604a401e194b6b3a39677b3173d6e74495eb51b82e99a32" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" dependencies = [ + "bumpalo", "proc-macro2", "quote", "syn", - "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" dependencies = [ "unicode-ident", ] [[package]] name = "wasm-bindgen-test" -version = "0.3.53" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aee0a0f5343de9221a0d233b04520ed8dc2e6728dce180b1dcd9288ec9d9fa3c" +checksum = "bfc379bfb624eb59050b509c13e77b4eb53150c350db69628141abce842f2373" dependencies = [ "js-sys", "minicov", @@ -4966,9 +4956,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.53" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a369369e4360c2884c3168d22bded735c43cccae97bbc147586d4b480edd138d" +checksum = "085b2df989e1e6f9620c1311df6c996e83fe16f57792b272ce1e024ac16a90f1" dependencies = [ "proc-macro2", "quote", @@ -4977,9 +4967,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.80" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbe734895e869dc429d78c4b433f8d17d95f8d05317440b4fad5ab2d33e596dc" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/Cargo.toml b/Cargo.toml index 1cce423668..525366136c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ resolver = "2" [workspace.package] # Please update rustfmt.toml when bumping the Rust edition edition = "2024" -rust-version = "1.88" +rust-version = "1.89" homepage = "https://docs.astral.sh/ruff" documentation = "https://docs.astral.sh/ruff" repository = "https://github.com/astral-sh/ruff" @@ -84,7 +84,7 @@ dashmap = { version = "6.0.1" } dir-test = { version = "0.4.0" } dunce = { version = "1.0.5" } drop_bomb = { version = "0.1.5" } -etcetera = { version = "0.10.0" } +etcetera = { version = "0.11.0" } fern = { version = "0.7.0" } filetime = { version = "0.2.23" } getrandom = { version = "0.3.1" } @@ -103,7 +103,7 @@ hashbrown = { version = "0.16.0", default-features = false, features = [ "inline-more", ] } heck = "0.5.0" -ignore = { version = "0.4.22" } +ignore = { version = "0.4.24" } imara-diff = { version = "0.1.5" } imperative = { version = "1.0.4" } indexmap = { version = "2.6.0" } @@ -124,7 +124,7 @@ lsp-server = { version = "0.7.6" } lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [ "proposed", ] } -matchit = { version = "0.8.1" } +matchit = { version = "0.9.0" } memchr = { version = "2.7.1" } mimalloc = { version = "0.1.39" } natord = { version = "1.0.9" } @@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" } rustc-hash = { version = "2.0.0" } rustc-stable-hash = { version = "0.1.2" } # When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml` -salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "cdd0b85516a52c18b8a6d17a2279a96ed6c3e198", default-features = false, features = [ +salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "05a9af7f554b64b8aadc2eeb6f2caf73d0408d09", default-features = false, features = [ "compact_str", "macros", "salsa_unstable", diff --git a/README.md b/README.md index 92d707838a..dd86c69390 100644 --- a/README.md +++ b/README.md @@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh powershell -c "irm https://astral.sh/ruff/install.ps1 | iex" # For a specific version. -curl -LsSf https://astral.sh/ruff/0.14.2/install.sh | sh -powershell -c "irm https://astral.sh/ruff/0.14.2/install.ps1 | iex" +curl -LsSf https://astral.sh/ruff/0.14.4/install.sh | sh +powershell -c "irm https://astral.sh/ruff/0.14.4/install.ps1 | iex" ``` You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff), @@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.4 hooks: # Run the linter. - id: ruff-check @@ -491,6 +491,7 @@ Ruff is used by a number of major open-source projects and companies, including: - [PyTorch](https://github.com/pytorch/pytorch) - [Pydantic](https://github.com/pydantic/pydantic) - [Pylint](https://github.com/PyCQA/pylint) +- [PyScripter](https://github.com/pyscripter/pyscripter) - [PyVista](https://github.com/pyvista/pyvista) - [Reflex](https://github.com/reflex-dev/reflex) - [River](https://github.com/online-ml/river) diff --git a/crates/ruff/Cargo.toml b/crates/ruff/Cargo.toml index c1511d805b..a0daa3486e 100644 --- a/crates/ruff/Cargo.toml +++ b/crates/ruff/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff" -version = "0.14.2" +version = "0.14.4" publish = true authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff/src/args.rs b/crates/ruff/src/args.rs index eb4bcd0a92..f1d38336f2 100644 --- a/crates/ruff/src/args.rs +++ b/crates/ruff/src/args.rs @@ -7,6 +7,8 @@ use std::sync::Arc; use crate::commands::completions::config::{OptionString, OptionStringParser}; use anyhow::bail; +use clap::builder::Styles; +use clap::builder::styling::{AnsiColor, Effects}; use clap::builder::{TypedValueParser, ValueParserFactory}; use clap::{Parser, Subcommand, command}; use colored::Colorize; @@ -78,6 +80,13 @@ impl GlobalConfigArgs { } } +// Configures Clap v3-style help menu colors +const STYLES: Styles = Styles::styled() + .header(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .usage(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD)) + .placeholder(AnsiColor::Cyan.on_default()); + #[derive(Debug, Parser)] #[command( author, @@ -86,6 +95,7 @@ impl GlobalConfigArgs { after_help = "For help with a specific command, see: `ruff help `." )] #[command(version)] +#[command(styles = STYLES)] pub struct Args { #[command(subcommand)] pub(crate) command: Command, @@ -405,8 +415,13 @@ pub struct CheckCommand { )] pub statistics: bool, /// Enable automatic additions of `noqa` directives to failing lines. + /// Optionally provide a reason to append after the codes. #[arg( long, + value_name = "REASON", + default_missing_value = "", + num_args = 0..=1, + require_equals = true, // conflicts_with = "add_noqa", conflicts_with = "show_files", conflicts_with = "show_settings", @@ -418,7 +433,7 @@ pub struct CheckCommand { conflicts_with = "fix", conflicts_with = "diff", )] - pub add_noqa: bool, + pub add_noqa: Option, /// See the files Ruff will be run against with the current settings. #[arg( long, @@ -1047,7 +1062,7 @@ Possible choices: /// etc.). #[expect(clippy::struct_excessive_bools)] pub struct CheckArguments { - pub add_noqa: bool, + pub add_noqa: Option, pub diff: bool, pub exit_non_zero_on_fix: bool, pub exit_zero: bool, diff --git a/crates/ruff/src/commands/add_noqa.rs b/crates/ruff/src/commands/add_noqa.rs index d5eaeb0170..ff6a07c758 100644 --- a/crates/ruff/src/commands/add_noqa.rs +++ b/crates/ruff/src/commands/add_noqa.rs @@ -21,6 +21,7 @@ pub(crate) fn add_noqa( files: &[PathBuf], pyproject_config: &PyprojectConfig, config_arguments: &ConfigArguments, + reason: Option<&str>, ) -> Result { // Collect all the files to check. let start = Instant::now(); @@ -76,7 +77,14 @@ pub(crate) fn add_noqa( return None; } }; - match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) { + match add_noqa_to_path( + path, + package, + &source_kind, + source_type, + &settings.linter, + reason, + ) { Ok(count) => Some(count), Err(e) => { error!("Failed to add noqa to {}: {e}", path.display()); diff --git a/crates/ruff/src/commands/analyze_graph.rs b/crates/ruff/src/commands/analyze_graph.rs index ffd7cc2d15..d4085e8ed0 100644 --- a/crates/ruff/src/commands/analyze_graph.rs +++ b/crates/ruff/src/commands/analyze_graph.rs @@ -7,6 +7,7 @@ use path_absolutize::CWD; use ruff_db::system::{SystemPath, SystemPathBuf}; use ruff_graph::{Direction, ImportMap, ModuleDb, ModuleImports}; use ruff_linter::package::PackageRoot; +use ruff_linter::source_kind::SourceKind; use ruff_linter::{warn_user, warn_user_once}; use ruff_python_ast::{PySourceType, SourceType}; use ruff_workspace::resolver::{ResolvedFile, match_exclusion, python_files_in_path}; @@ -127,10 +128,6 @@ pub(crate) fn analyze_graph( }, Some(language) => PySourceType::from(language), }; - if matches!(source_type, PySourceType::Ipynb) { - debug!("Ignoring Jupyter notebook: {}", path.display()); - continue; - } // Convert to system paths. let Ok(package) = package.map(SystemPathBuf::from_path_buf).transpose() else { @@ -147,13 +144,34 @@ pub(crate) fn analyze_graph( let root = root.clone(); let result = inner_result.clone(); scope.spawn(move |_| { + // Extract source code (handles both .py and .ipynb files) + let source_kind = match SourceKind::from_path(path.as_std_path(), source_type) { + Ok(Some(source_kind)) => source_kind, + Ok(None) => { + debug!("Skipping non-Python notebook: {path}"); + return; + } + Err(err) => { + warn!("Failed to read source for {path}: {err}"); + return; + } + }; + + let source_code = source_kind.source_code(); + // Identify any imports via static analysis. - let mut imports = - ModuleImports::detect(&db, &path, package.as_deref(), string_imports) - .unwrap_or_else(|err| { - warn!("Failed to generate import map for {path}: {err}"); - ModuleImports::default() - }); + let mut imports = ModuleImports::detect( + &db, + source_code, + source_type, + &path, + package.as_deref(), + string_imports, + ) + .unwrap_or_else(|err| { + warn!("Failed to generate import map for {path}: {err}"); + ModuleImports::default() + }); debug!("Discovered {} imports for {}", imports.len(), path); diff --git a/crates/ruff/src/commands/format.rs b/crates/ruff/src/commands/format.rs index 1f79e59339..0e245efa8c 100644 --- a/crates/ruff/src/commands/format.rs +++ b/crates/ruff/src/commands/format.rs @@ -370,7 +370,7 @@ pub(crate) fn format_source( let line_index = LineIndex::from_source_text(unformatted); let byte_range = range.to_text_range(unformatted, &line_index); format_range(unformatted, byte_range, options).map(|formatted_range| { - let mut formatted = unformatted.to_string(); + let mut formatted = unformatted.clone(); formatted.replace_range( std::ops::Range::::from(formatted_range.source_range()), formatted_range.as_code(), diff --git a/crates/ruff/src/commands/linter.rs b/crates/ruff/src/commands/linter.rs index 083102c09c..19e7d3df20 100644 --- a/crates/ruff/src/commands/linter.rs +++ b/crates/ruff/src/commands/linter.rs @@ -16,6 +16,8 @@ struct LinterInfo { prefix: &'static str, name: &'static str, #[serde(skip_serializing_if = "Option::is_none")] + url: Option<&'static str>, + #[serde(skip_serializing_if = "Option::is_none")] categories: Option>, } @@ -50,6 +52,7 @@ pub(crate) fn linter(format: HelpFormat) -> Result<()> { .map(|linter_info| LinterInfo { prefix: linter_info.common_prefix(), name: linter_info.name(), + url: linter_info.url(), categories: linter_info.upstream_categories().map(|cats| { cats.iter() .map(|c| LinterCategoryInfo { diff --git a/crates/ruff/src/lib.rs b/crates/ruff/src/lib.rs index 3bd457de8c..3ea0d94fad 100644 --- a/crates/ruff/src/lib.rs +++ b/crates/ruff/src/lib.rs @@ -319,12 +319,20 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result cannot contain newline characters" + )); + } + + let reason_opt = (!reason.is_empty()).then_some(reason.as_str()); + let modifications = - commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?; + commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?; if modifications > 0 && config_arguments.log_level >= LogLevel::Default { let s = if modifications == 1 { "" } else { "s" }; #[expect(clippy::print_stderr)] diff --git a/crates/ruff/tests/analyze_graph.rs b/crates/ruff/tests/analyze_graph.rs index 2c300029ea..993ebf3b59 100644 --- a/crates/ruff/tests/analyze_graph.rs +++ b/crates/ruff/tests/analyze_graph.rs @@ -653,3 +653,133 @@ fn venv() -> Result<()> { Ok(()) } + +#[test] +fn notebook_basic() -> Result<()> { + let tempdir = TempDir::new()?; + let root = ChildPath::new(tempdir.path()); + + root.child("ruff").child("__init__.py").write_str("")?; + root.child("ruff") + .child("a.py") + .write_str(indoc::indoc! {r#" + def helper(): + pass + "#})?; + + // Create a basic notebook with a simple import + root.child("notebook.ipynb").write_str(indoc::indoc! {r#" + { + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ruff.a import helper" + ] + } + ], + "metadata": { + "language_info": { + "name": "python", + "version": "3.12.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 + } + "#})?; + + insta::with_settings!({ + filters => INSTA_FILTERS.to_vec(), + }, { + assert_cmd_snapshot!(command().current_dir(&root), @r###" + success: true + exit_code: 0 + ----- stdout ----- + { + "notebook.ipynb": [ + "ruff/a.py" + ], + "ruff/__init__.py": [], + "ruff/a.py": [] + } + + ----- stderr ----- + "###); + }); + + Ok(()) +} + +#[test] +fn notebook_with_magic() -> Result<()> { + let tempdir = TempDir::new()?; + let root = ChildPath::new(tempdir.path()); + + root.child("ruff").child("__init__.py").write_str("")?; + root.child("ruff") + .child("a.py") + .write_str(indoc::indoc! {r#" + def helper(): + pass + "#})?; + + // Create a notebook with IPython magic commands and imports + root.child("notebook.ipynb").write_str(indoc::indoc! {r#" + { + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ruff.a import helper" + ] + } + ], + "metadata": { + "language_info": { + "name": "python", + "version": "3.12.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 + } + "#})?; + + insta::with_settings!({ + filters => INSTA_FILTERS.to_vec(), + }, { + assert_cmd_snapshot!(command().current_dir(&root), @r###" + success: true + exit_code: 0 + ----- stdout ----- + { + "notebook.ipynb": [ + "ruff/a.py" + ], + "ruff/__init__.py": [], + "ruff/a.py": [] + } + + ----- stderr ----- + "###); + }); + + Ok(()) +} diff --git a/crates/ruff/tests/cli/lint.rs b/crates/ruff/tests/cli/lint.rs index ebd202b052..25500ed346 100644 --- a/crates/ruff/tests/cli/lint.rs +++ b/crates/ruff/tests/cli/lint.rs @@ -1760,6 +1760,64 @@ from foo import ( # noqa: F401 Ok(()) } +#[test] +fn add_noqa_with_reason() -> Result<()> { + let fixture = CliTest::new()?; + fixture.write_file( + "test.py", + r#"import os + +def foo(): + x = 1 +"#, + )?; + + assert_cmd_snapshot!(fixture + .check_command() + .arg("--add-noqa=TODO: fix") + .arg("--select=F401,F841") + .arg("test.py"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Added 2 noqa directives. + "); + + let content = fs::read_to_string(fixture.root().join("test.py"))?; + insta::assert_snapshot!(content, @r" +import os # noqa: F401 TODO: fix + +def foo(): + x = 1 # noqa: F841 TODO: fix +"); + + Ok(()) +} + +#[test] +fn add_noqa_with_newline_in_reason() -> Result<()> { + let fixture = CliTest::new()?; + fixture.write_file("test.py", "import os\n")?; + + assert_cmd_snapshot!(fixture + .check_command() + .arg("--add-noqa=line1\nline2") + .arg("--select=F401") + .arg("test.py"), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + ruff failed + Cause: --add-noqa cannot contain newline characters + "###); + + Ok(()) +} + /// Infer `3.11` from `requires-python` in `pyproject.toml`. #[test] fn requires_python() -> Result<()> { diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs index 47bff641d7..697a0c989d 100644 --- a/crates/ruff_benchmark/benches/ty_walltime.rs +++ b/crates/ruff_benchmark/benches/ty_walltime.rs @@ -71,16 +71,13 @@ impl Display for Benchmark<'_> { } } -fn check_project(db: &ProjectDatabase, max_diagnostics: usize) { +fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) { let result = db.check(); let diagnostics = result.len(); assert!( diagnostics > 1 && diagnostics <= max_diagnostics, - "Expected between {} and {} diagnostics but got {}", - 1, - max_diagnostics, - diagnostics + "Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}", ); } @@ -146,7 +143,7 @@ static FREQTRADE: Benchmark = Benchmark::new( max_dep_date: "2025-06-17", python_version: PythonVersion::PY312, }, - 400, + 525, ); static PANDAS: Benchmark = Benchmark::new( @@ -184,7 +181,7 @@ static PYDANTIC: Benchmark = Benchmark::new( max_dep_date: "2025-06-17", python_version: PythonVersion::PY39, }, - 1000, + 5000, ); static SYMPY: Benchmark = Benchmark::new( @@ -226,7 +223,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new( max_dep_date: "2025-08-09", python_version: PythonVersion::PY311, }, - 800, + 900, ); #[track_caller] @@ -234,11 +231,11 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) { bencher .with_inputs(|| benchmark.setup_iteration()) .bench_local_refs(|db| { - check_project(db, benchmark.max_diagnostics); + check_project(db, benchmark.project.name, benchmark.max_diagnostics); }); } -#[bench(args=[&ALTAIR, &FREQTRADE, &PYDANTIC, &TANJUN], sample_size=2, sample_count=3)] +#[bench(args=[&ALTAIR, &FREQTRADE, &TANJUN], sample_size=2, sample_count=3)] fn small(bencher: Bencher, benchmark: &Benchmark) { run_single_threaded(bencher, benchmark); } @@ -248,12 +245,12 @@ fn medium(bencher: Bencher, benchmark: &Benchmark) { run_single_threaded(bencher, benchmark); } -#[bench(args=[&SYMPY], sample_size=1, sample_count=2)] +#[bench(args=[&SYMPY, &PYDANTIC], sample_size=1, sample_count=2)] fn large(bencher: Bencher, benchmark: &Benchmark) { run_single_threaded(bencher, benchmark); } -#[bench(args=[&PYDANTIC], sample_size=3, sample_count=8)] +#[bench(args=[&ALTAIR], sample_size=3, sample_count=8)] fn multithreaded(bencher: Bencher, benchmark: &Benchmark) { let thread_pool = ThreadPoolBuilder::new().build().unwrap(); @@ -261,7 +258,7 @@ fn multithreaded(bencher: Bencher, benchmark: &Benchmark) { .with_inputs(|| benchmark.setup_iteration()) .bench_local_values(|db| { thread_pool.install(|| { - check_project(&db, benchmark.max_diagnostics); + check_project(&db, benchmark.project.name, benchmark.max_diagnostics); db }) }); @@ -285,7 +282,7 @@ fn main() { // branch when looking up the ingredient index. { let db = TANJUN.setup_iteration(); - check_project(&db, TANJUN.max_diagnostics); + check_project(&db, TANJUN.project.name, TANJUN.max_diagnostics); } divan::main(); diff --git a/crates/ruff_db/src/diagnostic/render/full.rs b/crates/ruff_db/src/diagnostic/render/full.rs index c87413a84e..0784297755 100644 --- a/crates/ruff_db/src/diagnostic/render/full.rs +++ b/crates/ruff_db/src/diagnostic/render/full.rs @@ -112,16 +112,16 @@ impl std::fmt::Display for Diff<'_> { // `None`, indicating a regular script file, all the lines will be in one "cell" under the // `None` key. let cells = if let Some(notebook_index) = &self.notebook_index { - let mut last_cell = OneIndexed::MIN; + let mut last_cell_index = OneIndexed::MIN; let mut cells: Vec<(Option, TextSize)> = Vec::new(); - for (row, cell) in notebook_index.iter() { - if cell != last_cell { - let offset = source_code.line_start(row); - cells.push((Some(last_cell), offset)); - last_cell = cell; + for cell in notebook_index.iter() { + if cell.cell_index() != last_cell_index { + let offset = source_code.line_start(cell.start_row()); + cells.push((Some(last_cell_index), offset)); + last_cell_index = cell.cell_index(); } } - cells.push((Some(last_cell), source_text.text_len())); + cells.push((Some(last_cell_index), source_text.text_len())); cells } else { vec![(None, source_text.text_len())] diff --git a/crates/ruff_db/src/files.rs b/crates/ruff_db/src/files.rs index 754b65642a..1c322419e0 100644 --- a/crates/ruff_db/src/files.rs +++ b/crates/ruff_db/src/files.rs @@ -470,6 +470,17 @@ impl File { self.source_type(db).is_stub() } + /// Returns `true` if the file is an `__init__.pyi` + pub fn is_package_stub(self, db: &dyn Db) -> bool { + self.path(db).as_str().ends_with("__init__.pyi") + } + + /// Returns `true` if the file is an `__init__.pyi` + pub fn is_package(self, db: &dyn Db) -> bool { + let path = self.path(db).as_str(); + path.ends_with("__init__.pyi") || path.ends_with("__init__.py") + } + pub fn source_type(self, db: &dyn Db) -> PySourceType { match self.path(db) { FilePath::System(path) => path diff --git a/crates/ruff_db/src/system/path.rs b/crates/ruff_db/src/system/path.rs index 71a92fb4c8..a387ae54f6 100644 --- a/crates/ruff_db/src/system/path.rs +++ b/crates/ruff_db/src/system/path.rs @@ -723,10 +723,11 @@ impl ruff_cache::CacheKey for SystemPathBuf { /// A slice of a virtual path on [`System`](super::System) (akin to [`str`]). #[repr(transparent)] +#[derive(Eq, PartialEq, Hash, PartialOrd, Ord)] pub struct SystemVirtualPath(str); impl SystemVirtualPath { - pub fn new(path: &str) -> &SystemVirtualPath { + pub const fn new(path: &str) -> &SystemVirtualPath { // SAFETY: SystemVirtualPath is marked as #[repr(transparent)] so the conversion from a // *const str to a *const SystemVirtualPath is valid. unsafe { &*(path as *const str as *const SystemVirtualPath) } @@ -767,8 +768,8 @@ pub struct SystemVirtualPathBuf(String); impl SystemVirtualPathBuf { #[inline] - pub fn as_path(&self) -> &SystemVirtualPath { - SystemVirtualPath::new(&self.0) + pub const fn as_path(&self) -> &SystemVirtualPath { + SystemVirtualPath::new(self.0.as_str()) } } @@ -852,6 +853,12 @@ impl ruff_cache::CacheKey for SystemVirtualPathBuf { } } +impl Borrow for SystemVirtualPathBuf { + fn borrow(&self) -> &SystemVirtualPath { + self.as_path() + } +} + /// Deduplicates identical paths and removes nested paths. /// /// # Examples diff --git a/crates/ruff_dev/src/generate_options.rs b/crates/ruff_dev/src/generate_options.rs index 8b8579d730..49a898d6fe 100644 --- a/crates/ruff_dev/src/generate_options.rs +++ b/crates/ruff_dev/src/generate_options.rs @@ -62,7 +62,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec) { generate_set( output, Set::Named { - name: set_name.to_string(), + name: set_name.clone(), set: *sub_set, }, parents, diff --git a/crates/ruff_dev/src/generate_ty_options.rs b/crates/ruff_dev/src/generate_ty_options.rs index af7794a0b2..4e4ab0a949 100644 --- a/crates/ruff_dev/src/generate_ty_options.rs +++ b/crates/ruff_dev/src/generate_ty_options.rs @@ -104,7 +104,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec) { generate_set( output, Set::Named { - name: set_name.to_string(), + name: set_name.clone(), set: *sub_set, }, parents, diff --git a/crates/ruff_formatter/src/builders.rs b/crates/ruff_formatter/src/builders.rs index 14da643355..ab60103d99 100644 --- a/crates/ruff_formatter/src/builders.rs +++ b/crates/ruff_formatter/src/builders.rs @@ -1006,7 +1006,7 @@ impl std::fmt::Debug for Align<'_, Context> { /// Block indents indent a block of code, such as in a function body, and therefore insert a line /// break before and after the content. /// -/// Doesn't create an indentation if the passed in content is [`FormatElement.is_empty`]. +/// Doesn't create an indentation if the passed in content is empty. /// /// # Examples /// diff --git a/crates/ruff_formatter/src/format_element.rs b/crates/ruff_formatter/src/format_element.rs index 529992c642..715eeb3cfd 100644 --- a/crates/ruff_formatter/src/format_element.rs +++ b/crates/ruff_formatter/src/format_element.rs @@ -487,7 +487,7 @@ pub trait FormatElements { /// Represents the width by adding 1 to the actual width so that the width can be represented by a [`NonZeroU32`], /// allowing [`TextWidth`] or [`Option`] fit in 4 bytes rather than 8. /// -/// This means that 2^32 can not be precisely represented and instead has the same value as 2^32-1. +/// This means that 2^32 cannot be precisely represented and instead has the same value as 2^32-1. /// This imprecision shouldn't matter in practice because either text are longer than any configured line width /// and thus, the text should break. #[derive(Copy, Clone, Debug, Eq, PartialEq)] diff --git a/crates/ruff_graph/src/lib.rs b/crates/ruff_graph/src/lib.rs index eaf307018d..377f1e89e9 100644 --- a/crates/ruff_graph/src/lib.rs +++ b/crates/ruff_graph/src/lib.rs @@ -3,8 +3,9 @@ use std::collections::{BTreeMap, BTreeSet}; use anyhow::Result; use ruff_db::system::{SystemPath, SystemPathBuf}; +use ruff_python_ast::PySourceType; use ruff_python_ast::helpers::to_module_path; -use ruff_python_parser::{Mode, ParseOptions, parse}; +use ruff_python_parser::{ParseOptions, parse}; use crate::collector::Collector; pub use crate::db::ModuleDb; @@ -24,13 +25,14 @@ impl ModuleImports { /// Detect the [`ModuleImports`] for a given Python file. pub fn detect( db: &ModuleDb, + source: &str, + source_type: PySourceType, path: &SystemPath, package: Option<&SystemPath>, string_imports: StringImports, ) -> Result { - // Read and parse the source code. - let source = std::fs::read_to_string(path)?; - let parsed = parse(&source, ParseOptions::from(Mode::Module))?; + // Parse the source code. + let parsed = parse(source, ParseOptions::from(source_type))?; let module_path = package.and_then(|package| to_module_path(package.as_std_path(), path.as_std_path())); diff --git a/crates/ruff_linter/Cargo.toml b/crates/ruff_linter/Cargo.toml index bc25d4574f..d1f48c4276 100644 --- a/crates/ruff_linter/Cargo.toml +++ b/crates/ruff_linter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_linter" -version = "0.14.2" +version = "0.14.4" publish = false authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py b/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py index ce35d79338..e275a54bcd 100644 --- a/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py +++ b/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py @@ -22,6 +22,7 @@ DAG(dag_id="class_schedule_interval", schedule_interval="@hourly") DAG(dag_id="class_timetable", timetable=NullTimetable()) +DAG(dag_id="class_concurrency", concurrency=12) DAG(dag_id="class_fail_stop", fail_stop=True) diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py b/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py index ce6e5c291e..3562a5a989 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py @@ -70,3 +70,12 @@ builtins.getattr(foo, "bar") # Regression test for: https://github.com/astral-sh/ruff/issues/18353 setattr(foo, "__debug__", 0) + +# Regression test for: https://github.com/astral-sh/ruff/issues/21126 +# Non-NFKC attribute names should be marked as unsafe. Python normalizes identifiers in +# attribute access (obj.attr) using NFKC, but does not normalize string +# arguments passed to getattr/setattr. Rewriting `getattr(ns, "ſ")` to +# `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +# Example: the long s character "ſ" normalizes to "s" under NFKC. +getattr(foo, "ſ") +setattr(foo, "ſ", 1) diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py index e0ac4190ed..00771b5f70 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py @@ -46,7 +46,8 @@ def func(): def func(): - # OK (index doesn't start at 0 + # SIM113 + # https://github.com/astral-sh/ruff/pull/21395 idx = 10 for x in range(5): g(x, idx) diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py index e1e299c98e..71fc606386 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py @@ -204,3 +204,15 @@ x = 1 print(f"{x=}" or "bar") # SIM222 (lambda: 1) or True # SIM222 (i for i in range(1)) or "bar" # SIM222 + +# https://github.com/astral-sh/ruff/issues/21136 +def get_items(): + return tuple(item for item in Item.objects.all()) or None # OK + + +def get_items_list(): + return tuple([item for item in items]) or None # OK + + +def get_items_set(): + return tuple({item for item in items}) or None # OK diff --git a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py index ea93e16cc1..9bdac6788f 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py +++ b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py @@ -371,6 +371,61 @@ class Foo: """ return +# DOC102 - Test case from issue #20959: comma-separated parameters +def leq(x: object, y: object) -> bool: + """Compare two objects for loose equality. + + Parameters + ---------- + x1, x2 : object + Objects. + + Returns + ------- + bool + Whether the objects are identical or equal. + """ + return x is y or x == y + + +# OK - comma-separated parameters that match function signature +def compare_values(x1: int, x2: int) -> bool: + """Compare two integer values. + + Parameters + ---------- + x1, x2 : int + Values to compare. + + Returns + ------- + bool + True if values are equal. + """ + return x1 == x2 + + +# DOC102 - mixed comma-separated and regular parameters +def process_data(data, x1: str, x2: str) -> str: + """Process data with multiple string parameters. + + Parameters + ---------- + data : list + Input data to process. + x1, x2 : str + String parameters for processing. + extra_param : str + Extra parameter not in signature. + + Returns + ------- + str + Processed result. + """ + return f"{x1}{x2}{len(data)}" + + # OK def baz(x: int) -> int: """ @@ -389,3 +444,21 @@ def baz(x: int) -> int: int """ return x + + +# OK - comma-separated parameters without type annotations +def add_numbers(a, b): + """ + Adds two numbers and returns the result. + + Parameters + ---------- + a, b + The numbers to add. + + Returns + ------- + int + The sum of the two numbers. + """ + return a + b diff --git a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py index 9709d9ff53..916b11c634 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py +++ b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py @@ -83,6 +83,37 @@ def calculate_speed(distance: float, time: float) -> float: raise +# DOC502 regression for Sphinx directive after Raises (issue #18959) +def foo(): + """First line. + + Raises: + ValueError: + some text + + .. versionadded:: 0.7.0 + The ``init_kwargs`` argument. + """ + raise ValueError + + +# DOC502 regression for following section with colons +def example_with_following_section(): + """Summary. + + Returns: + str: The resulting expression. + + Raises: + ValueError: If the unit is not valid. + + Relation to `time_range_lookup`: + - Handles the "start of" modifier. + - Example: "start of month" → `DATETRUNC()`. + """ + raise ValueError + + # This should NOT trigger DOC502 because OSError is explicitly re-raised def f(): """Do nothing. diff --git a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py index 5e8bf5f36e..c32814597d 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py +++ b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py @@ -117,3 +117,33 @@ def calculate_speed(distance: float, time: float) -> float: except TypeError: print("Not a number? Shame on you!") raise + + +# DOC502 regression for Sphinx directive after Raises (issue #18959) +def foo(): + """First line. + + Raises + ------ + ValueError + some text + + .. versionadded:: 0.7.0 + The ``init_kwargs`` argument. + """ + raise ValueError + +# Make sure we don't bail out on a Sphinx directive in the description of one +# of the exceptions +def foo(): + """First line. + + Raises + ------ + ValueError + some text + .. math:: e^{xception} + ZeroDivisionError + Will not be raised, DOC502 + """ + raise ValueError diff --git a/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py b/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py new file mode 100644 index 0000000000..34ff94fa90 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py @@ -0,0 +1,5 @@ +from .builtins import next +from ..builtins import str +from ...builtins import int +from .builtins import next as _next + diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py index 31b1ccd341..77306cfe18 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py @@ -125,3 +125,18 @@ with open(*filename, mode="r") as f: # `buffering`. with open(*filename, file="file.txt", mode="r") as f: x = f.read() + +# FURB101 +with open("file.txt", encoding="utf-8") as f: + contents: str = f.read() + +# FURB101 but no fix because it would remove the assignment to `x` +with open("file.txt", encoding="utf-8") as f: + contents, x = f.read(), 2 + +# FURB101 but no fix because it would remove the `process_contents` call +with open("file.txt", encoding="utf-8") as f: + contents = process_contents(f.read()) + +with open("file.txt", encoding="utf-8") as f: + contents: str = process_contents(f.read()) diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py index b6d8e1d034..35d9600d41 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py @@ -145,3 +145,11 @@ with open("file.txt", "w") as f: with open("file.txt", "w") as f: for line in text: f.write(line) + +# See: https://github.com/astral-sh/ruff/issues/20785 +import json + +data = {"price": 100} + +with open("test.json", "wb") as f: + f.write(json.dumps(data, indent=4).encode("utf-8")) \ No newline at end of file diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py index 86a6584f3a..aca23813b3 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py @@ -19,6 +19,9 @@ print("", *args, sep="") print("", **kwargs) print(sep="\t") print(sep=print(1)) +print(f"") +print(f"", sep=",") +print(f"", end="bar") # OK. @@ -33,3 +36,4 @@ print("foo", "", sep=",") print("foo", "", "bar", "", sep=",") print("", "", **kwargs) print(*args, sep=",") +print(f"foo") diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py index d795fd1941..db49315f54 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py @@ -85,3 +85,9 @@ Decimal("1234_5678") # Safe fix: preserves non-thousands separators Decimal("0001_2345") Decimal("000_1_2345") Decimal("000_000") + +# Test cases for underscores before sign +# https://github.com/astral-sh/ruff/issues/21186 +Decimal("_-1") # Should flag as verbose +Decimal("_+1") # Should flag as verbose +Decimal("_-1_000") # Should flag as verbose diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py index 9a03919ca9..81422d2cf8 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py @@ -64,3 +64,8 @@ _ = Decimal.from_float(True) _ = Decimal.from_float(float("-nan")) _ = Decimal.from_float(float("\x2dnan")) _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan")) + +# See: https://github.com/astral-sh/ruff/issues/21257 +# fixes must be safe +_ = Fraction.from_float(f=4.2) +_ = Fraction.from_decimal(dec=4) \ No newline at end of file diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py index 2db91ac322..bb43b6d1d4 100644 --- a/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py +++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py @@ -81,3 +81,7 @@ round(# a comment round( 17 # a comment ) + +# See: https://github.com/astral-sh/ruff/issues/21209 +print(round(125, **{"ndigits": -2})) +print(round(125, *[-2])) \ No newline at end of file diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF065.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_0.py similarity index 100% rename from crates/ruff_linter/resources/test/fixtures/ruff/RUF065.py rename to crates/ruff_linter/resources/test/fixtures/ruff/RUF065_0.py diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py new file mode 100644 index 0000000000..048a5be48c --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py @@ -0,0 +1,18 @@ +import logging + +# Test cases for str() that should NOT be flagged (issue #21315) +# str() with no arguments - should not be flagged +logging.warning("%s", str()) + +# str() with multiple arguments - should not be flagged +logging.warning("%s", str(b"\xe2\x9a\xa0", "utf-8")) + +# str() with starred arguments - should not be flagged +logging.warning("%s", str(*(b"\xf0\x9f\x9a\xa7", "utf-8"))) + +# str() with keyword unpacking - should not be flagged +logging.warning("%s", str(**{"object": b"\xf0\x9f\x9a\xa8", "encoding": "utf-8"})) + +# str() with single keyword argument - should be flagged (equivalent to str("!")) +logging.warning("%s", str(object="!")) + diff --git a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs index 7c0037e10d..1232dc52a9 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs @@ -43,9 +43,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { pycodestyle::rules::ambiguous_variable_name(checker, name, name.range()); } } - if checker.is_rule_enabled(Rule::NonlocalWithoutBinding) { - pylint::rules::nonlocal_without_binding(checker, nonlocal); - } if checker.is_rule_enabled(Rule::NonlocalAndGlobal) { pylint::rules::nonlocal_and_global(checker, nonlocal); } @@ -720,7 +717,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { } if checker.is_rule_enabled(Rule::UnnecessaryBuiltinImport) { if let Some(module) = module { - pyupgrade::rules::unnecessary_builtin_import(checker, stmt, module, names); + pyupgrade::rules::unnecessary_builtin_import( + checker, stmt, module, names, level, + ); } } if checker.any_rule_enabled(&[ diff --git a/crates/ruff_linter/src/checkers/ast/mod.rs b/crates/ruff_linter/src/checkers/ast/mod.rs index 2280dc33cb..f3315b3b47 100644 --- a/crates/ruff_linter/src/checkers/ast/mod.rs +++ b/crates/ruff_linter/src/checkers/ast/mod.rs @@ -73,7 +73,8 @@ use crate::rules::pyflakes::rules::{ UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction, }; use crate::rules::pylint::rules::{ - AwaitOutsideAsync, LoadBeforeGlobalDeclaration, YieldFromInAsyncFunction, + AwaitOutsideAsync, LoadBeforeGlobalDeclaration, NonlocalWithoutBinding, + YieldFromInAsyncFunction, }; use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade}; use crate::settings::rule_table::RuleTable; @@ -641,6 +642,10 @@ impl SemanticSyntaxContext for Checker<'_> { self.semantic.global(name) } + fn has_nonlocal_binding(&self, name: &str) -> bool { + self.semantic.nonlocal(name).is_some() + } + fn report_semantic_error(&self, error: SemanticSyntaxError) { match error.kind { SemanticSyntaxErrorKind::LateFutureImport => { @@ -717,6 +722,12 @@ impl SemanticSyntaxContext for Checker<'_> { self.report_diagnostic(pyflakes::rules::ContinueOutsideLoop, error.range); } } + SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => { + // PLE0117 + if self.is_rule_enabled(Rule::NonlocalWithoutBinding) { + self.report_diagnostic(NonlocalWithoutBinding { name }, error.range); + } + } SemanticSyntaxErrorKind::ReboundComprehensionVariable | SemanticSyntaxErrorKind::DuplicateTypeParameter | SemanticSyntaxErrorKind::MultipleCaseAssignment(_) diff --git a/crates/ruff_linter/src/line_width.rs b/crates/ruff_linter/src/line_width.rs index c8cf857621..80915c9f59 100644 --- a/crates/ruff_linter/src/line_width.rs +++ b/crates/ruff_linter/src/line_width.rs @@ -51,13 +51,17 @@ impl<'de> serde::Deserialize<'de> for LineLength { where D: serde::Deserializer<'de>, { - let value = u16::deserialize(deserializer)?; - Self::try_from(value).map_err(|_| { - serde::de::Error::custom(format!( - "line-length must be between 1 and {} (got {value})", - Self::MAX, - )) - }) + let value = i64::deserialize(deserializer)?; + + u16::try_from(value) + .ok() + .and_then(|u16_value| Self::try_from(u16_value).ok()) + .ok_or_else(|| { + serde::de::Error::custom(format!( + "line-length must be between 1 and {} (got {value})", + Self::MAX, + )) + }) } } diff --git a/crates/ruff_linter/src/linter.rs b/crates/ruff_linter/src/linter.rs index 2e4f284bee..3ec070dd26 100644 --- a/crates/ruff_linter/src/linter.rs +++ b/crates/ruff_linter/src/linter.rs @@ -377,6 +377,7 @@ pub fn add_noqa_to_path( source_kind: &SourceKind, source_type: PySourceType, settings: &LinterSettings, + reason: Option<&str>, ) -> Result { // Parse once. let target_version = settings.resolve_target_version(path); @@ -425,6 +426,7 @@ pub fn add_noqa_to_path( &settings.external, &directives.noqa_line_for, stylist.line_ending(), + reason, ) } diff --git a/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap b/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap index 1d077b7321..f22a079523 100644 --- a/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap +++ b/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap @@ -4,4 +4,4 @@ expression: content --- syntax_errors.py: 1:15 invalid-syntax: Expected one or more symbol names after import - 3:12 invalid-syntax: Expected ')', found newline + 3:12 invalid-syntax: Expected `)`, found newline diff --git a/crates/ruff_linter/src/noqa.rs b/crates/ruff_linter/src/noqa.rs index 606ac5ad3b..da9535817e 100644 --- a/crates/ruff_linter/src/noqa.rs +++ b/crates/ruff_linter/src/noqa.rs @@ -39,7 +39,7 @@ pub fn generate_noqa_edits( let exemption = FileExemption::from(&file_directives); let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator); let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); - build_noqa_edits_by_diagnostic(comments, locator, line_ending) + build_noqa_edits_by_diagnostic(comments, locator, line_ending, None) } /// A directive to ignore a set of rules either for a given line of Python source code or an entire file (e.g., @@ -715,6 +715,7 @@ impl Display for LexicalError { impl Error for LexicalError {} /// Adds noqa comments to suppress all messages of a file. +#[expect(clippy::too_many_arguments)] pub(crate) fn add_noqa( path: &Path, diagnostics: &[Diagnostic], @@ -723,6 +724,7 @@ pub(crate) fn add_noqa( external: &[String], noqa_line_for: &NoqaMapping, line_ending: LineEnding, + reason: Option<&str>, ) -> Result { let (count, output) = add_noqa_inner( path, @@ -732,12 +734,14 @@ pub(crate) fn add_noqa( external, noqa_line_for, line_ending, + reason, ); fs::write(path, output)?; Ok(count) } +#[expect(clippy::too_many_arguments)] fn add_noqa_inner( path: &Path, diagnostics: &[Diagnostic], @@ -746,6 +750,7 @@ fn add_noqa_inner( external: &[String], noqa_line_for: &NoqaMapping, line_ending: LineEnding, + reason: Option<&str>, ) -> (usize, String) { let mut count = 0; @@ -757,7 +762,7 @@ fn add_noqa_inner( let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); - let edits = build_noqa_edits_by_line(comments, locator, line_ending); + let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason); let contents = locator.contents(); @@ -783,6 +788,7 @@ fn build_noqa_edits_by_diagnostic( comments: Vec>, locator: &Locator, line_ending: LineEnding, + reason: Option<&str>, ) -> Vec> { let mut edits = Vec::default(); for comment in comments { @@ -794,6 +800,7 @@ fn build_noqa_edits_by_diagnostic( FxHashSet::from_iter([comment.code]), locator, line_ending, + reason, ) { edits.push(Some(noqa_edit.into_edit())); } @@ -808,6 +815,7 @@ fn build_noqa_edits_by_line<'a>( comments: Vec>>, locator: &Locator, line_ending: LineEnding, + reason: Option<&'a str>, ) -> BTreeMap> { let mut comments_by_line = BTreeMap::default(); for comment in comments.into_iter().flatten() { @@ -831,6 +839,7 @@ fn build_noqa_edits_by_line<'a>( .collect(), locator, line_ending, + reason, ) { edits.insert(offset, edit); } @@ -927,6 +936,7 @@ struct NoqaEdit<'a> { noqa_codes: FxHashSet<&'a SecondaryCode>, codes: Option<&'a Codes<'a>>, line_ending: LineEnding, + reason: Option<&'a str>, } impl NoqaEdit<'_> { @@ -954,6 +964,9 @@ impl NoqaEdit<'_> { push_codes(writer, self.noqa_codes.iter().sorted_unstable()); } } + if let Some(reason) = self.reason { + write!(writer, " {reason}").unwrap(); + } write!(writer, "{}", self.line_ending.as_str()).unwrap(); } } @@ -970,6 +983,7 @@ fn generate_noqa_edit<'a>( noqa_codes: FxHashSet<&'a SecondaryCode>, locator: &Locator, line_ending: LineEnding, + reason: Option<&'a str>, ) -> Option> { let line_range = locator.full_line_range(offset); @@ -999,6 +1013,7 @@ fn generate_noqa_edit<'a>( noqa_codes, codes, line_ending, + reason, }) } @@ -2832,6 +2847,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 0); assert_eq!(output, format!("{contents}")); @@ -2855,6 +2871,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: F841\n"); @@ -2885,6 +2902,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: E741, F841\n"); @@ -2915,6 +2933,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 0); assert_eq!(output, "x = 1 # noqa"); diff --git a/crates/ruff_linter/src/preview.rs b/crates/ruff_linter/src/preview.rs index d84035c762..239466f599 100644 --- a/crates/ruff_linter/src/preview.rs +++ b/crates/ruff_linter/src/preview.rs @@ -261,16 +261,6 @@ pub(crate) const fn is_b006_unsafe_fix_preserve_assignment_expr_enabled( settings.preview.is_enabled() } -// https://github.com/astral-sh/ruff/pull/20520 -pub(crate) const fn is_fix_read_whole_file_enabled(settings: &LinterSettings) -> bool { - settings.preview.is_enabled() -} - -// https://github.com/astral-sh/ruff/pull/20520 -pub(crate) const fn is_fix_write_whole_file_enabled(settings: &LinterSettings) -> bool { - settings.preview.is_enabled() -} - pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSettings) -> bool { settings.preview.is_enabled() } @@ -279,3 +269,8 @@ pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSett pub(crate) const fn is_extended_i18n_function_matching_enabled(settings: &LinterSettings) -> bool { settings.preview.is_enabled() } + +// https://github.com/astral-sh/ruff/pull/21395 +pub(crate) const fn is_enumerate_for_loop_int_index_enabled(settings: &LinterSettings) -> bool { + settings.preview.is_enabled() +} diff --git a/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs b/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs index 78c89da0b4..562f37230d 100644 --- a/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs +++ b/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs @@ -196,6 +196,7 @@ fn check_call_arguments(checker: &Checker, qualified_name: &QualifiedName, argum match qualified_name.segments() { ["airflow", .., "DAG" | "dag"] => { // with replacement + diagnostic_for_argument(checker, arguments, "concurrency", Some("max_active_tasks")); diagnostic_for_argument(checker, arguments, "fail_stop", Some("fail_fast")); diagnostic_for_argument(checker, arguments, "schedule_interval", Some("schedule")); diagnostic_for_argument(checker, arguments, "timetable", Some("schedule")); diff --git a/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap b/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap index 6f783edc9f..e0daf99000 100644 --- a/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap +++ b/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap @@ -28,6 +28,8 @@ AIR301 [*] `timetable` is removed in Airflow 3.0 22 | 23 | DAG(dag_id="class_timetable", timetable=NullTimetable()) | ^^^^^^^^^ +24 | +25 | DAG(dag_id="class_concurrency", concurrency=12) | help: Use `schedule` instead 20 | @@ -36,249 +38,271 @@ help: Use `schedule` instead - DAG(dag_id="class_timetable", timetable=NullTimetable()) 23 + DAG(dag_id="class_timetable", schedule=NullTimetable()) 24 | -25 | -26 | DAG(dag_id="class_fail_stop", fail_stop=True) +25 | DAG(dag_id="class_concurrency", concurrency=12) +26 | -AIR301 [*] `fail_stop` is removed in Airflow 3.0 - --> AIR301_args.py:26:31 +AIR301 [*] `concurrency` is removed in Airflow 3.0 + --> AIR301_args.py:25:33 | -26 | DAG(dag_id="class_fail_stop", fail_stop=True) - | ^^^^^^^^^ -27 | -28 | DAG(dag_id="class_default_view", default_view="dag_default_view") +23 | DAG(dag_id="class_timetable", timetable=NullTimetable()) +24 | +25 | DAG(dag_id="class_concurrency", concurrency=12) + | ^^^^^^^^^^^ +26 | +27 | DAG(dag_id="class_fail_stop", fail_stop=True) | -help: Use `fail_fast` instead +help: Use `max_active_tasks` instead +22 | 23 | DAG(dag_id="class_timetable", timetable=NullTimetable()) 24 | -25 | + - DAG(dag_id="class_concurrency", concurrency=12) +25 + DAG(dag_id="class_concurrency", max_active_tasks=12) +26 | +27 | DAG(dag_id="class_fail_stop", fail_stop=True) +28 | + +AIR301 [*] `fail_stop` is removed in Airflow 3.0 + --> AIR301_args.py:27:31 + | +25 | DAG(dag_id="class_concurrency", concurrency=12) +26 | +27 | DAG(dag_id="class_fail_stop", fail_stop=True) + | ^^^^^^^^^ +28 | +29 | DAG(dag_id="class_default_view", default_view="dag_default_view") + | +help: Use `fail_fast` instead +24 | +25 | DAG(dag_id="class_concurrency", concurrency=12) +26 | - DAG(dag_id="class_fail_stop", fail_stop=True) -26 + DAG(dag_id="class_fail_stop", fail_fast=True) -27 | -28 | DAG(dag_id="class_default_view", default_view="dag_default_view") -29 | +27 + DAG(dag_id="class_fail_stop", fail_fast=True) +28 | +29 | DAG(dag_id="class_default_view", default_view="dag_default_view") +30 | AIR301 `default_view` is removed in Airflow 3.0 - --> AIR301_args.py:28:34 + --> AIR301_args.py:29:34 | -26 | DAG(dag_id="class_fail_stop", fail_stop=True) -27 | -28 | DAG(dag_id="class_default_view", default_view="dag_default_view") +27 | DAG(dag_id="class_fail_stop", fail_stop=True) +28 | +29 | DAG(dag_id="class_default_view", default_view="dag_default_view") | ^^^^^^^^^^^^ -29 | -30 | DAG(dag_id="class_orientation", orientation="BT") +30 | +31 | DAG(dag_id="class_orientation", orientation="BT") | AIR301 `orientation` is removed in Airflow 3.0 - --> AIR301_args.py:30:33 + --> AIR301_args.py:31:33 | -28 | DAG(dag_id="class_default_view", default_view="dag_default_view") -29 | -30 | DAG(dag_id="class_orientation", orientation="BT") +29 | DAG(dag_id="class_default_view", default_view="dag_default_view") +30 | +31 | DAG(dag_id="class_orientation", orientation="BT") | ^^^^^^^^^^^ -31 | -32 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates") +32 | +33 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates") | AIR301 [*] `schedule_interval` is removed in Airflow 3.0 - --> AIR301_args.py:41:6 + --> AIR301_args.py:42:6 | -41 | @dag(schedule_interval="0 * * * *") +42 | @dag(schedule_interval="0 * * * *") | ^^^^^^^^^^^^^^^^^ -42 | def decorator_schedule_interval(): -43 | pass +43 | def decorator_schedule_interval(): +44 | pass | help: Use `schedule` instead -38 | pass -39 | +39 | pass 40 | +41 | - @dag(schedule_interval="0 * * * *") -41 + @dag(schedule="0 * * * *") -42 | def decorator_schedule_interval(): -43 | pass -44 | +42 + @dag(schedule="0 * * * *") +43 | def decorator_schedule_interval(): +44 | pass +45 | AIR301 [*] `timetable` is removed in Airflow 3.0 - --> AIR301_args.py:46:6 + --> AIR301_args.py:47:6 | -46 | @dag(timetable=NullTimetable()) +47 | @dag(timetable=NullTimetable()) | ^^^^^^^^^ -47 | def decorator_timetable(): -48 | pass +48 | def decorator_timetable(): +49 | pass | help: Use `schedule` instead -43 | pass -44 | +44 | pass 45 | +46 | - @dag(timetable=NullTimetable()) -46 + @dag(schedule=NullTimetable()) -47 | def decorator_timetable(): -48 | pass -49 | +47 + @dag(schedule=NullTimetable()) +48 | def decorator_timetable(): +49 | pass +50 | AIR301 [*] `execution_date` is removed in Airflow 3.0 - --> AIR301_args.py:54:62 + --> AIR301_args.py:55:62 | -52 | def decorator_deprecated_operator_args(): -53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator( -54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04" +53 | def decorator_deprecated_operator_args(): +54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator( +55 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04" | ^^^^^^^^^^^^^^ -55 | ) -56 | trigger_dagrun_op2 = TriggerDagRunOperator( +56 | ) +57 | trigger_dagrun_op2 = TriggerDagRunOperator( | help: Use `logical_date` instead -51 | @dag() -52 | def decorator_deprecated_operator_args(): -53 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator( +52 | @dag() +53 | def decorator_deprecated_operator_args(): +54 | trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator( - task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04" -54 + task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04" -55 | ) -56 | trigger_dagrun_op2 = TriggerDagRunOperator( -57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04" +55 + task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04" +56 | ) +57 | trigger_dagrun_op2 = TriggerDagRunOperator( +58 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04" AIR301 [*] `execution_date` is removed in Airflow 3.0 - --> AIR301_args.py:57:62 + --> AIR301_args.py:58:62 | -55 | ) -56 | trigger_dagrun_op2 = TriggerDagRunOperator( -57 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04" +56 | ) +57 | trigger_dagrun_op2 = TriggerDagRunOperator( +58 | task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04" | ^^^^^^^^^^^^^^ -58 | ) +59 | ) | help: Use `logical_date` instead -54 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04" -55 | ) -56 | trigger_dagrun_op2 = TriggerDagRunOperator( +55 | task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04" +56 | ) +57 | trigger_dagrun_op2 = TriggerDagRunOperator( - task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04" -57 + task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04" -58 | ) -59 | -60 | branch_dt_op = datetime.BranchDateTimeOperator( +58 + task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04" +59 | ) +60 | +61 | branch_dt_op = datetime.BranchDateTimeOperator( AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0 - --> AIR301_args.py:61:33 + --> AIR301_args.py:62:33 | -60 | branch_dt_op = datetime.BranchDateTimeOperator( -61 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5 +61 | branch_dt_op = datetime.BranchDateTimeOperator( +62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5 | ^^^^^^^^^^^^^^^^^^^^^^ -62 | ) -63 | branch_dt_op2 = BranchDateTimeOperator( +63 | ) +64 | branch_dt_op2 = BranchDateTimeOperator( | help: Use `use_task_logical_date` instead -58 | ) -59 | -60 | branch_dt_op = datetime.BranchDateTimeOperator( +59 | ) +60 | +61 | branch_dt_op = datetime.BranchDateTimeOperator( - task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5 -61 + task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5 -62 | ) -63 | branch_dt_op2 = BranchDateTimeOperator( -64 | task_id="branch_dt_op2", +62 + task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5 +63 | ) +64 | branch_dt_op2 = BranchDateTimeOperator( +65 | task_id="branch_dt_op2", AIR301 [*] `task_concurrency` is removed in Airflow 3.0 - --> AIR301_args.py:61:62 + --> AIR301_args.py:62:62 | -60 | branch_dt_op = datetime.BranchDateTimeOperator( -61 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5 +61 | branch_dt_op = datetime.BranchDateTimeOperator( +62 | task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5 | ^^^^^^^^^^^^^^^^ -62 | ) -63 | branch_dt_op2 = BranchDateTimeOperator( +63 | ) +64 | branch_dt_op2 = BranchDateTimeOperator( | help: Use `max_active_tis_per_dag` instead -58 | ) -59 | -60 | branch_dt_op = datetime.BranchDateTimeOperator( +59 | ) +60 | +61 | branch_dt_op = datetime.BranchDateTimeOperator( - task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5 -61 + task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5 -62 | ) -63 | branch_dt_op2 = BranchDateTimeOperator( -64 | task_id="branch_dt_op2", +62 + task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5 +63 | ) +64 | branch_dt_op2 = BranchDateTimeOperator( +65 | task_id="branch_dt_op2", AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0 - --> AIR301_args.py:65:9 + --> AIR301_args.py:66:9 | -63 | branch_dt_op2 = BranchDateTimeOperator( -64 | task_id="branch_dt_op2", -65 | use_task_execution_day=True, +64 | branch_dt_op2 = BranchDateTimeOperator( +65 | task_id="branch_dt_op2", +66 | use_task_execution_day=True, | ^^^^^^^^^^^^^^^^^^^^^^ -66 | sla=timedelta(seconds=10), -67 | ) +67 | sla=timedelta(seconds=10), +68 | ) | help: Use `use_task_logical_date` instead -62 | ) -63 | branch_dt_op2 = BranchDateTimeOperator( -64 | task_id="branch_dt_op2", +63 | ) +64 | branch_dt_op2 = BranchDateTimeOperator( +65 | task_id="branch_dt_op2", - use_task_execution_day=True, -65 + use_task_logical_date=True, -66 | sla=timedelta(seconds=10), -67 | ) -68 | +66 + use_task_logical_date=True, +67 | sla=timedelta(seconds=10), +68 | ) +69 | AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0 - --> AIR301_args.py:92:9 + --> AIR301_args.py:93:9 | -90 | follow_task_ids_if_true=None, -91 | week_day=1, -92 | use_task_execution_day=True, +91 | follow_task_ids_if_true=None, +92 | week_day=1, +93 | use_task_execution_day=True, | ^^^^^^^^^^^^^^^^^^^^^^ -93 | ) +94 | ) | help: Use `use_task_logical_date` instead -89 | follow_task_ids_if_false=None, -90 | follow_task_ids_if_true=None, -91 | week_day=1, +90 | follow_task_ids_if_false=None, +91 | follow_task_ids_if_true=None, +92 | week_day=1, - use_task_execution_day=True, -92 + use_task_logical_date=True, -93 | ) -94 | -95 | trigger_dagrun_op >> trigger_dagrun_op2 +93 + use_task_logical_date=True, +94 | ) +95 | +96 | trigger_dagrun_op >> trigger_dagrun_op2 AIR301 `filename_template` is removed in Airflow 3.0 - --> AIR301_args.py:102:15 + --> AIR301_args.py:103:15 | -101 | # deprecated filename_template argument in FileTaskHandler -102 | S3TaskHandler(filename_template="/tmp/test") +102 | # deprecated filename_template argument in FileTaskHandler +103 | S3TaskHandler(filename_template="/tmp/test") | ^^^^^^^^^^^^^^^^^ -103 | HdfsTaskHandler(filename_template="/tmp/test") -104 | ElasticsearchTaskHandler(filename_template="/tmp/test") +104 | HdfsTaskHandler(filename_template="/tmp/test") +105 | ElasticsearchTaskHandler(filename_template="/tmp/test") | AIR301 `filename_template` is removed in Airflow 3.0 - --> AIR301_args.py:103:17 + --> AIR301_args.py:104:17 | -101 | # deprecated filename_template argument in FileTaskHandler -102 | S3TaskHandler(filename_template="/tmp/test") -103 | HdfsTaskHandler(filename_template="/tmp/test") +102 | # deprecated filename_template argument in FileTaskHandler +103 | S3TaskHandler(filename_template="/tmp/test") +104 | HdfsTaskHandler(filename_template="/tmp/test") | ^^^^^^^^^^^^^^^^^ -104 | ElasticsearchTaskHandler(filename_template="/tmp/test") -105 | GCSTaskHandler(filename_template="/tmp/test") +105 | ElasticsearchTaskHandler(filename_template="/tmp/test") +106 | GCSTaskHandler(filename_template="/tmp/test") | AIR301 `filename_template` is removed in Airflow 3.0 - --> AIR301_args.py:104:26 + --> AIR301_args.py:105:26 | -102 | S3TaskHandler(filename_template="/tmp/test") -103 | HdfsTaskHandler(filename_template="/tmp/test") -104 | ElasticsearchTaskHandler(filename_template="/tmp/test") +103 | S3TaskHandler(filename_template="/tmp/test") +104 | HdfsTaskHandler(filename_template="/tmp/test") +105 | ElasticsearchTaskHandler(filename_template="/tmp/test") | ^^^^^^^^^^^^^^^^^ -105 | GCSTaskHandler(filename_template="/tmp/test") +106 | GCSTaskHandler(filename_template="/tmp/test") | AIR301 `filename_template` is removed in Airflow 3.0 - --> AIR301_args.py:105:16 + --> AIR301_args.py:106:16 | -103 | HdfsTaskHandler(filename_template="/tmp/test") -104 | ElasticsearchTaskHandler(filename_template="/tmp/test") -105 | GCSTaskHandler(filename_template="/tmp/test") +104 | HdfsTaskHandler(filename_template="/tmp/test") +105 | ElasticsearchTaskHandler(filename_template="/tmp/test") +106 | GCSTaskHandler(filename_template="/tmp/test") | ^^^^^^^^^^^^^^^^^ -106 | -107 | FabAuthManager(None) +107 | +108 | FabAuthManager(None) | AIR301 `appbuilder` is removed in Airflow 3.0 - --> AIR301_args.py:107:15 + --> AIR301_args.py:108:15 | -105 | GCSTaskHandler(filename_template="/tmp/test") -106 | -107 | FabAuthManager(None) +106 | GCSTaskHandler(filename_template="/tmp/test") +107 | +108 | FabAuthManager(None) | ^^^^^^ | help: The constructor takes no parameter now diff --git a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs index a70659f99e..d111bbf525 100644 --- a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs +++ b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs @@ -513,6 +513,9 @@ impl Violation for MissingReturnTypeClassMethod { /// def foo(x: MyAny): ... /// ``` /// +/// ## Options +/// - `lint.flake8-annotations.allow-star-arg-any` +/// /// ## References /// - [Typing spec: `Any`](https://typing.python.org/en/latest/spec/special-types.html#any) /// - [Python documentation: `typing.Any`](https://docs.python.org/3/library/typing.html#typing.Any) diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs index 7905de14ca..9271d2b01a 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs @@ -3,6 +3,7 @@ use ruff_python_ast::{self as ast, Expr}; use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private}; use ruff_source_file::LineRanges; use ruff_text_size::Ranged; +use unicode_normalization::UnicodeNormalization; use crate::checkers::ast::Checker; use crate::fix::edits::pad; @@ -29,6 +30,21 @@ use crate::{AlwaysFixableViolation, Edit, Fix}; /// obj.foo /// ``` /// +/// ## Fix safety +/// The fix is marked as unsafe for attribute names that are not in NFKC (Normalization Form KC) +/// normalization. Python normalizes identifiers using NFKC when using attribute access syntax +/// (e.g., `obj.attr`), but does not normalize string arguments passed to `getattr`. Rewriting +/// `getattr(obj, "ſ")` to `obj.ſ` would be interpreted as `obj.s` at runtime, changing behavior. +/// +/// For example, the long s character `"ſ"` normalizes to `"s"` under NFKC, so: +/// ```python +/// # This accesses an attribute with the exact name "ſ" (if it exists) +/// value = getattr(obj, "ſ") +/// +/// # But this would normalize to "s" and access a different attribute +/// obj.ſ # This is interpreted as obj.s, not obj.ſ +/// ``` +/// /// ## References /// - [Python documentation: `getattr`](https://docs.python.org/3/library/functions.html#getattr) #[derive(ViolationMetadata)] @@ -69,8 +85,14 @@ pub(crate) fn getattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, return; } + // Mark fixes as unsafe for non-NFKC attribute names. Python normalizes identifiers using NFKC, so using + // attribute syntax (e.g., `obj.attr`) would normalize the name and potentially change + // program behavior. + let attr_name = value.to_str(); + let is_unsafe = attr_name.nfkc().collect::() != attr_name; + let mut diagnostic = checker.report_diagnostic(GetAttrWithConstant, expr.range()); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + let edit = Edit::range_replacement( pad( if matches!( obj, @@ -88,5 +110,11 @@ pub(crate) fn getattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, checker.locator(), ), expr.range(), - ))); + ); + let fix = if is_unsafe { + Fix::unsafe_edit(edit) + } else { + Fix::safe_edit(edit) + }; + diagnostic.set_fix(fix); } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs index d3ba5b953e..51fee45110 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs @@ -4,6 +4,7 @@ use ruff_text_size::{Ranged, TextRange}; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_codegen::Generator; use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private}; +use unicode_normalization::UnicodeNormalization; use crate::checkers::ast::Checker; use crate::{AlwaysFixableViolation, Edit, Fix}; @@ -28,6 +29,23 @@ use crate::{AlwaysFixableViolation, Edit, Fix}; /// obj.foo = 42 /// ``` /// +/// ## Fix safety +/// The fix is marked as unsafe for attribute names that are not in NFKC (Normalization Form KC) +/// normalization. Python normalizes identifiers using NFKC when using attribute access syntax +/// (e.g., `obj.attr = value`), but does not normalize string arguments passed to `setattr`. +/// Rewriting `setattr(obj, "ſ", 1)` to `obj.ſ = 1` would be interpreted as `obj.s = 1` at +/// runtime, changing behavior. +/// +/// For example, the long s character `"ſ"` normalizes to `"s"` under NFKC, so: +/// ```python +/// # This creates an attribute with the exact name "ſ" +/// setattr(obj, "ſ", 1) +/// getattr(obj, "ſ") # Returns 1 +/// +/// # But this would normalize to "s" and set a different attribute +/// obj.ſ = 1 # This is interpreted as obj.s = 1, not obj.ſ = 1 +/// ``` +/// /// ## References /// - [Python documentation: `setattr`](https://docs.python.org/3/library/functions.html#setattr) #[derive(ViolationMetadata)] @@ -89,6 +107,12 @@ pub(crate) fn setattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, return; } + // Mark fixes as unsafe for non-NFKC attribute names. Python normalizes identifiers using NFKC, so using + // attribute syntax (e.g., `obj.attr = value`) would normalize the name and potentially change + // program behavior. + let attr_name = name.to_str(); + let is_unsafe = attr_name.nfkc().collect::() != attr_name; + // We can only replace a `setattr` call (which is an `Expr`) with an assignment // (which is a `Stmt`) if the `Expr` is already being used as a `Stmt` // (i.e., it's directly within an `Stmt::Expr`). @@ -100,10 +124,16 @@ pub(crate) fn setattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, { if expr == child.as_ref() { let mut diagnostic = checker.report_diagnostic(SetAttrWithConstant, expr.range()); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + let edit = Edit::range_replacement( assignment(obj, name.to_str(), value, checker.generator()), expr.range(), - ))); + ); + let fix = if is_unsafe { + Fix::unsafe_edit(edit) + } else { + Fix::safe_edit(edit) + }; + diagnostic.set_fix(fix); } } } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap index ab05bd0966..febc145dd7 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap @@ -360,3 +360,21 @@ help: Replace `getattr` with attribute access 70 | 71 | # Regression test for: https://github.com/astral-sh/ruff/issues/18353 72 | setattr(foo, "__debug__", 0) + +B009 [*] Do not call `getattr` with a constant attribute value. It is not any safer than normal property access. + --> B009_B010.py:80:1 + | +78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. +80 | getattr(foo, "ſ") + | ^^^^^^^^^^^^^^^^^ +81 | setattr(foo, "ſ", 1) + | +help: Replace `getattr` with attribute access +77 | # arguments passed to getattr/setattr. Rewriting `getattr(ns, "ſ")` to +78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. + - getattr(foo, "ſ") +80 + foo.ſ +81 | setattr(foo, "ſ", 1) +note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap index 87c2f01bfe..8dab4338a1 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap @@ -118,3 +118,19 @@ help: Replace `setattr` with assignment 56 | 57 | # Regression test for: https://github.com/astral-sh/ruff/issues/7455#issuecomment-1722458885 58 | assert getattr(func, '_rpc')is True + +B010 [*] Do not call `setattr` with a constant attribute value. It is not any safer than normal property access. + --> B009_B010.py:81:1 + | +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. +80 | getattr(foo, "ſ") +81 | setattr(foo, "ſ", 1) + | ^^^^^^^^^^^^^^^^^^^^ + | +help: Replace `setattr` with assignment +78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. +80 | getattr(foo, "ſ") + - setattr(foo, "ſ", 1) +81 + foo.ſ = 1 +note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs index f384b32e6e..70108c1ba7 100644 --- a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs +++ b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs @@ -43,7 +43,7 @@ use crate::rules::flake8_comprehensions::fixes; /// >>> {x: y for x, y in d1} # Iterates over the keys of a mapping /// {1: 2, 4: 5} /// >>> dict(d1) # Ruff's incorrect suggested fix -/// (1, 2): 3, (4, 5): 6} +/// {(1, 2): 3, (4, 5): 6} /// >>> dict(d1.keys()) # Correct fix /// {1: 2, 4: 5} /// ``` diff --git a/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs b/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs index e0684056dc..6827e99b93 100644 --- a/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs +++ b/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs @@ -78,7 +78,7 @@ pub(crate) fn unconventional_import_alias( let mut diagnostic = checker.report_diagnostic( UnconventionalImportAlias { name: qualified_name, - asname: expected_alias.to_string(), + asname: expected_alias.clone(), }, binding.range(), ); diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs index 0de6758635..bd57ad080c 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs @@ -6,21 +6,17 @@ use ruff_macros::CacheKey; #[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum ParametrizeNameType { #[serde(rename = "csv")] Csv, #[serde(rename = "tuple")] + #[default] Tuple, #[serde(rename = "list")] List, } -impl Default for ParametrizeNameType { - fn default() -> Self { - Self::Tuple - } -} - impl Display for ParametrizeNameType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { @@ -33,19 +29,15 @@ impl Display for ParametrizeNameType { #[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum ParametrizeValuesType { #[serde(rename = "tuple")] Tuple, #[serde(rename = "list")] + #[default] List, } -impl Default for ParametrizeValuesType { - fn default() -> Self { - Self::List - } -} - impl Display for ParametrizeValuesType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { @@ -57,19 +49,15 @@ impl Display for ParametrizeValuesType { #[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum ParametrizeValuesRowType { #[serde(rename = "tuple")] + #[default] Tuple, #[serde(rename = "list")] List, } -impl Default for ParametrizeValuesRowType { - fn default() -> Self { - Self::Tuple - } -} - impl Display for ParametrizeValuesRowType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { diff --git a/crates/ruff_linter/src/rules/flake8_quotes/settings.rs b/crates/ruff_linter/src/rules/flake8_quotes/settings.rs index b241e70b49..fe5129d6e3 100644 --- a/crates/ruff_linter/src/rules/flake8_quotes/settings.rs +++ b/crates/ruff_linter/src/rules/flake8_quotes/settings.rs @@ -9,19 +9,15 @@ use ruff_macros::CacheKey; #[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum Quote { /// Use double quotes. + #[default] Double, /// Use single quotes. Single, } -impl Default for Quote { - fn default() -> Self { - Self::Double - } -} - impl From for Quote { fn from(value: ruff_python_ast::str::Quote) -> Self { match value { diff --git a/crates/ruff_linter/src/rules/flake8_simplify/mod.rs b/crates/ruff_linter/src/rules/flake8_simplify/mod.rs index 45233277e5..4546dd143a 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/mod.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/mod.rs @@ -61,6 +61,7 @@ mod tests { #[test_case(Rule::SplitStaticString, Path::new("SIM905.py"))] #[test_case(Rule::DictGetWithNoneDefault, Path::new("SIM910.py"))] + #[test_case(Rule::EnumerateForLoop, Path::new("SIM113.py"))] fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> { let snapshot = format!( "preview__{}_{}", diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs index 6739fa3868..a35513de85 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs @@ -1,6 +1,8 @@ +use crate::preview::is_enumerate_for_loop_int_index_enabled; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt}; use ruff_python_ast::{self as ast, Expr, Int, Number, Operator, Stmt}; +use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType}; use ruff_python_semantic::analyze::typing; use ruff_text_size::Ranged; @@ -11,6 +13,9 @@ use crate::checkers::ast::Checker; /// Checks for `for` loops with explicit loop-index variables that can be replaced /// with `enumerate()`. /// +/// In [preview], this rule checks for index variables initialized with any integer rather than only +/// a literal zero. +/// /// ## Why is this bad? /// When iterating over a sequence, it's often desirable to keep track of the /// index of each element alongside the element itself. Prefer the `enumerate` @@ -35,6 +40,8 @@ use crate::checkers::ast::Checker; /// /// ## References /// - [Python documentation: `enumerate`](https://docs.python.org/3/library/functions.html#enumerate) +/// +/// [preview]: https://docs.astral.sh/ruff/preview/ #[derive(ViolationMetadata)] #[violation_metadata(stable_since = "v0.2.0")] pub(crate) struct EnumerateForLoop { @@ -82,17 +89,21 @@ pub(crate) fn enumerate_for_loop(checker: &Checker, for_stmt: &ast::StmtFor) { continue; } - // Ensure that the index variable was initialized to 0. + // Ensure that the index variable was initialized to 0 (or instance of `int` if preview is enabled). let Some(value) = typing::find_binding_value(binding, checker.semantic()) else { continue; }; - if !matches!( + if !(matches!( value, Expr::NumberLiteral(ast::ExprNumberLiteral { value: Number::Int(Int::ZERO), .. }) - ) { + ) || matches!( + ResolvedPythonType::from(value), + ResolvedPythonType::Atom(PythonType::Number(NumberLike::Integer)) + ) && is_enumerate_for_loop_int_index_enabled(checker.settings())) + { continue; } diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs index 9c216311ed..4c858fb799 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs @@ -116,7 +116,7 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &Checker, stmt: &Stmt) { let mut diagnostic = checker.report_diagnostic( ReimplementedBuiltin { - replacement: contents.to_string(), + replacement: contents.clone(), }, TextRange::new(stmt.start(), terminal.stmt.end()), ); @@ -212,7 +212,7 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &Checker, stmt: &Stmt) { let mut diagnostic = checker.report_diagnostic( ReimplementedBuiltin { - replacement: contents.to_string(), + replacement: contents.clone(), }, TextRange::new(stmt.start(), terminal.stmt.end()), ); diff --git a/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap index f6c8bba110..0e65033b21 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap +++ b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap @@ -1101,6 +1101,7 @@ help: Replace with `f"{x=}"` 204 + print(f"{x=}") # SIM222 205 | (lambda: 1) or True # SIM222 206 | (i for i in range(1)) or "bar" # SIM222 +207 | note: This is an unsafe fix and may change runtime behavior SIM222 [*] Use `lambda: 1` instead of `lambda: 1 or ...` @@ -1119,6 +1120,8 @@ help: Replace with `lambda: 1` - (lambda: 1) or True # SIM222 205 + lambda: 1 # SIM222 206 | (i for i in range(1)) or "bar" # SIM222 +207 | +208 | # https://github.com/astral-sh/ruff/issues/21136 note: This is an unsafe fix and may change runtime behavior SIM222 [*] Use `(i for i in range(1))` instead of `(i for i in range(1)) or ...` @@ -1128,6 +1131,8 @@ SIM222 [*] Use `(i for i in range(1))` instead of `(i for i in range(1)) or ...` 205 | (lambda: 1) or True # SIM222 206 | (i for i in range(1)) or "bar" # SIM222 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +207 | +208 | # https://github.com/astral-sh/ruff/issues/21136 | help: Replace with `(i for i in range(1))` 203 | x = 1 @@ -1135,4 +1140,7 @@ help: Replace with `(i for i in range(1))` 205 | (lambda: 1) or True # SIM222 - (i for i in range(1)) or "bar" # SIM222 206 + (i for i in range(1)) # SIM222 +207 | +208 | # https://github.com/astral-sh/ruff/issues/21136 +209 | def get_items(): note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__preview__SIM113_SIM113.py.snap b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__preview__SIM113_SIM113.py.snap new file mode 100644 index 0000000000..065ed20bb9 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__preview__SIM113_SIM113.py.snap @@ -0,0 +1,60 @@ +--- +source: crates/ruff_linter/src/rules/flake8_simplify/mod.rs +--- +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:6:9 + | +4 | for x in range(5): +5 | g(x, idx) +6 | idx += 1 + | ^^^^^^^^ +7 | h(x) + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:17:9 + | +15 | if g(x): +16 | break +17 | idx += 1 + | ^^^^^^^^ +18 | sum += h(x, idx) + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:27:9 + | +25 | g(x) +26 | h(x, y) +27 | idx += 1 + | ^^^^^^^^ + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:36:9 + | +34 | for x in range(5): +35 | sum += h(x, idx) +36 | idx += 1 + | ^^^^^^^^ + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:44:9 + | +42 | for x in range(5): +43 | g(x, idx) +44 | idx += 1 + | ^^^^^^^^ +45 | h(x) + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:54:9 + | +52 | for x in range(5): +53 | g(x, idx) +54 | idx += 1 + | ^^^^^^^^ +55 | h(x) + | diff --git a/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs b/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs index 6ada015222..6379304d5c 100644 --- a/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs +++ b/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs @@ -47,7 +47,7 @@ pub(crate) fn banned_api(checker: &Checker, policy: &NameMatchPolicy, checker.report_diagnostic( BannedApi { name: banned_module, - message: reason.msg.to_string(), + message: reason.msg.clone(), }, node.range(), ); @@ -74,8 +74,8 @@ pub(crate) fn banned_attribute_access(checker: &Checker, expr: &Expr) { { checker.report_diagnostic( BannedApi { - name: banned_path.to_string(), - message: ban.msg.to_string(), + name: banned_path.clone(), + message: ban.msg.clone(), }, expr.range(), ); diff --git a/crates/ruff_linter/src/rules/isort/settings.rs b/crates/ruff_linter/src/rules/isort/settings.rs index 05a4dddf08..cab9ab35ed 100644 --- a/crates/ruff_linter/src/rules/isort/settings.rs +++ b/crates/ruff_linter/src/rules/isort/settings.rs @@ -20,21 +20,17 @@ use super::categorize::ImportSection; #[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum RelativeImportsOrder { /// Place "closer" imports (fewer `.` characters, most local) before /// "further" imports (more `.` characters, least local). ClosestToFurthest, /// Place "further" imports (more `.` characters, least local) imports /// before "closer" imports (fewer `.` characters, most local). + #[default] FurthestToClosest, } -impl Default for RelativeImportsOrder { - fn default() -> Self { - Self::FurthestToClosest - } -} - impl Display for RelativeImportsOrder { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs index 6ae6cea817..a68e492846 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs @@ -427,7 +427,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare) for diagnostic in &mut diagnostics { diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement( - content.to_string(), + content.clone(), compare.range(), ))); } diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap index c210a6768b..d436a9826a 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:2:7 | 1 | #: E231 @@ -18,7 +18,7 @@ help: Add missing whitespace 4 | a[b1,:] 5 | #: E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:4:5 | 2 | a = (1,2) @@ -38,7 +38,7 @@ help: Add missing whitespace 6 | a = [{'a':''}] 7 | #: Okay -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:6:10 | 4 | a[b1,:] @@ -58,7 +58,7 @@ help: Add missing whitespace 8 | a = (4,) 9 | b = (5, ) -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:19:10 | 17 | def foo() -> None: @@ -77,7 +77,7 @@ help: Add missing whitespace 21 | 22 | #: Okay -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:29:20 | 27 | mdtypes_template = { @@ -96,7 +96,7 @@ help: Add missing whitespace 31 | 32 | # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:33:6 | 32 | # E231 @@ -115,7 +115,7 @@ help: Add missing whitespace 35 | # Okay because it's hard to differentiate between the usages of a colon in a f-string 36 | f"{a:=1}" -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:47:37 | 46 | #: E231 @@ -134,7 +134,7 @@ help: Add missing whitespace 49 | #: Okay 50 | a = (1,) -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:60:13 | 58 | results = { @@ -154,7 +154,7 @@ help: Add missing whitespace 62 | results_in_tuple = ( 63 | { -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:65:17 | 63 | { @@ -174,7 +174,7 @@ help: Add missing whitespace 67 | ) 68 | results_in_list = [ -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:71:17 | 69 | { @@ -194,7 +194,7 @@ help: Add missing whitespace 73 | ] 74 | results_in_list_first = [ -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:76:17 | 74 | results_in_list_first = [ @@ -214,7 +214,7 @@ help: Add missing whitespace 78 | ] 79 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:82:13 | 80 | x = [ @@ -234,7 +234,7 @@ help: Add missing whitespace 84 | "k3":[2], # E231 85 | "k4": [2], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:84:13 | 82 | "k1":[2], # E231 @@ -254,7 +254,7 @@ help: Add missing whitespace 86 | "k5": [2], 87 | "k6": [1, 2, 3, 4,5,6,7] # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:87:26 | 85 | "k4": [2], @@ -274,7 +274,7 @@ help: Add missing whitespace 89 | { 90 | "k1": [ -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:87:28 | 85 | "k4": [2], @@ -294,7 +294,7 @@ help: Add missing whitespace 89 | { 90 | "k1": [ -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:87:30 | 85 | "k4": [2], @@ -314,7 +314,7 @@ help: Add missing whitespace 89 | { 90 | "k1": [ -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:92:21 | 90 | "k1": [ @@ -334,7 +334,7 @@ help: Add missing whitespace 94 | { 95 | "kb": [2,3], # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:92:24 | 90 | "k1": [ @@ -354,7 +354,7 @@ help: Add missing whitespace 94 | { 95 | "kb": [2,3], # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:95:25 | 93 | }, @@ -374,7 +374,7 @@ help: Add missing whitespace 97 | { 98 | "ka":[2, 3], # E231 -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:98:21 | 96 | }, @@ -394,7 +394,7 @@ help: Add missing whitespace 100 | "kc": [2, 3], # Ok 101 | "kd": [2,3], # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:101:25 | 99 | "kb": [2, 3], # Ok @@ -414,7 +414,7 @@ help: Add missing whitespace 103 | }, 104 | ] -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:102:21 | 100 | "kc": [2, 3], # Ok @@ -434,7 +434,7 @@ help: Add missing whitespace 104 | ] 105 | } -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:102:24 | 100 | "kc": [2, 3], # Ok @@ -454,7 +454,7 @@ help: Add missing whitespace 104 | ] 105 | } -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:109:18 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -473,7 +473,7 @@ help: Add missing whitespace 111 | y:B = [[["foo", "bar"]]], 112 | z:object = "fooo", -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:109:40 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -492,7 +492,7 @@ help: Add missing whitespace 111 | y:B = [[["foo", "bar"]]], 112 | z:object = "fooo", -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:109:70 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -511,7 +511,7 @@ help: Add missing whitespace 111 | y:B = [[["foo", "bar"]]], 112 | z:object = "fooo", -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:110:6 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -531,7 +531,7 @@ help: Add missing whitespace 112 | z:object = "fooo", 113 | ): -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:111:6 | 109 | def pep_696_bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]( @@ -551,7 +551,7 @@ help: Add missing whitespace 113 | ): 114 | pass -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:112:6 | 110 | x:A = "foo"[::-1], @@ -571,7 +571,7 @@ help: Add missing whitespace 114 | pass 115 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:116:18 | 114 | pass @@ -591,7 +591,7 @@ help: Add missing whitespace 118 | self, 119 | x:A = "foo"[::-1], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:116:40 | 114 | pass @@ -611,7 +611,7 @@ help: Add missing whitespace 118 | self, 119 | x:A = "foo"[::-1], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:116:70 | 114 | pass @@ -631,7 +631,7 @@ help: Add missing whitespace 118 | self, 119 | x:A = "foo"[::-1], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:117:29 | 116 | class PEP696Bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]: @@ -650,7 +650,7 @@ help: Add missing whitespace 119 | x:A = "foo"[::-1], 120 | y:B = [[["foo", "bar"]]], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:117:51 | 116 | class PEP696Bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]: @@ -669,7 +669,7 @@ help: Add missing whitespace 119 | x:A = "foo"[::-1], 120 | y:B = [[["foo", "bar"]]], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:117:81 | 116 | class PEP696Bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]: @@ -688,7 +688,7 @@ help: Add missing whitespace 119 | x:A = "foo"[::-1], 120 | y:B = [[["foo", "bar"]]], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:119:10 | 117 | def pep_696_bad_method[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]( @@ -708,7 +708,7 @@ help: Add missing whitespace 121 | z:object = "fooo", 122 | ): -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:120:10 | 118 | self, @@ -728,7 +728,7 @@ help: Add missing whitespace 122 | ): 123 | pass -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:121:10 | 119 | x:A = "foo"[::-1], @@ -748,7 +748,7 @@ help: Add missing whitespace 123 | pass 124 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:125:32 | 123 | pass @@ -768,7 +768,7 @@ help: Add missing whitespace 127 | pass 128 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:125:54 | 123 | pass @@ -788,7 +788,7 @@ help: Add missing whitespace 127 | pass 128 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:125:84 | 123 | pass @@ -808,7 +808,7 @@ help: Add missing whitespace 127 | pass 128 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:126:47 | 125 | class PEP696BadWithEmptyBases[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes](): @@ -826,7 +826,7 @@ help: Add missing whitespace 128 | 129 | # Should be no E231 errors on any of these: -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:126:69 | 125 | class PEP696BadWithEmptyBases[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes](): @@ -844,7 +844,7 @@ help: Add missing whitespace 128 | 129 | # Should be no E231 errors on any of these: -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:126:99 | 125 | class PEP696BadWithEmptyBases[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes](): @@ -862,7 +862,7 @@ help: Add missing whitespace 128 | 129 | # Should be no E231 errors on any of these: -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:147:6 | 146 | # E231 @@ -881,7 +881,7 @@ help: Add missing whitespace 149 | # Okay because it's hard to differentiate between the usages of a colon in a t-string 150 | t"{a:=1}" -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:161:37 | 160 | #: E231 diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap index 7cf04e5cc7..b8c6413c1d 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -11,7 +11,7 @@ invalid-syntax: Expected ']', found '(' 5 | pass | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -32,7 +32,7 @@ E301 Expected 1 blank line, found 0 | help: Add missing blank line -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -41,7 +41,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap index e28bb8562d..76c3d31211 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -22,7 +22,7 @@ E302 Expected 2 blank lines, found 1 | help: Add missing blank line(s) -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -32,7 +32,7 @@ invalid-syntax: Expected ')', found newline 15 | def method(): | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -41,7 +41,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap index c70c94baad..af23f16de9 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -21,7 +21,7 @@ E303 Too many blank lines (3) | help: Remove extraneous blank line(s) -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -31,7 +31,7 @@ invalid-syntax: Expected ')', found newline 15 | def method(): | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -40,7 +40,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap index dd97fe9010..f72c198e1e 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -11,7 +11,7 @@ invalid-syntax: Expected ']', found '(' 5 | pass | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -31,7 +31,7 @@ E305 Expected 2 blank lines after class or function definition, found (1) | help: Add missing blank line(s) -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -40,7 +40,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap index d3a6b15d4e..98d00f77af 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -11,7 +11,7 @@ invalid-syntax: Expected ']', found '(' 5 | pass | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -21,7 +21,7 @@ invalid-syntax: Expected ')', found newline 15 | def method(): | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -30,7 +30,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs index fda9ffda35..5064deb941 100644 --- a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs +++ b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs @@ -705,20 +705,31 @@ fn parse_parameters_numpy(content: &str, content_start: TextSize) -> Vec) -> Vec Vec> { let mut entries: Vec = Vec::new(); - for potential in content.lines() { - let Some(colon_idx) = potential.find(':') else { - continue; - }; - let entry = potential[..colon_idx].trim(); - entries.push(QualifiedName::user_defined(entry)); + let mut lines = content.lines().peekable(); + let Some(first) = lines.peek() else { + return entries; + }; + let indentation = &first[..first.len() - first.trim_start().len()]; + for potential in lines { + if let Some(entry) = potential.strip_prefix(indentation) { + if let Some(first_char) = entry.chars().next() { + if !first_char.is_whitespace() { + if let Some(colon_idx) = entry.find(':') { + let entry = entry[..colon_idx].trim(); + if !entry.is_empty() { + entries.push(QualifiedName::user_defined(entry)); + } + } + } + } + } else { + // If we can't strip the expected indentation, check if this is a dedented line + // (not blank) - if so, break early as we've reached the end of this section + if !potential.trim().is_empty() { + break; + } + } } entries } @@ -788,6 +817,12 @@ fn parse_raises_numpy(content: &str) -> Vec> { let indentation = &dashes_line[..dashes_line.len() - dashes.len()]; for potential in lines { if let Some(entry) = potential.strip_prefix(indentation) { + // Check for Sphinx directives (lines starting with ..) - these indicate the end of the + // section. In numpy-style, exceptions are dedented to the same level as sphinx + // directives. + if entry.starts_with("..") { + break; + } if let Some(first_char) = entry.chars().next() { if !first_char.is_whitespace() { entries.push(QualifiedName::user_defined(entry.trim_end())); diff --git a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__D417_sections.py.snap b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__D417_sections.py.snap index c6205ba71f..977f4e141a 100644 --- a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__D417_sections.py.snap +++ b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__D417_sections.py.snap @@ -71,17 +71,7 @@ D417 Missing argument description in the docstring for `test_missing_numpy_args` 399 | """Toggle the gizmo. | -D417 Missing argument descriptions in the docstring for `test_method`: `another_test`, `test`, `x`, `y` - --> sections.py:413:9 - | -411 | """Test class.""" -412 | -413 | def test_method(self, test, another_test, z, _, x=1, y=2, _private_arg=1): # noqa: D213, D407 - | ^^^^^^^^^^^ -414 | """Test a valid args section. - | - -D417 Missing argument descriptions in the docstring for `test_missing_args`: `t`, `test`, `x`, `y`, `z` +D417 Missing argument descriptions in the docstring for `test_missing_args`: `test`, `y`, `z` --> sections.py:434:9 | 432 | "(argument(s) test, y, z are missing descriptions in " @@ -91,7 +81,7 @@ D417 Missing argument descriptions in the docstring for `test_missing_args`: `t` 435 | """Test a valid args section. | -D417 Missing argument descriptions in the docstring for `test_missing_args_static_method`: `a`, `x`, `y`, `z` +D417 Missing argument descriptions in the docstring for `test_missing_args_static_method`: `a`, `z` --> sections.py:468:9 | 466 | "(argument(s) a, z are missing descriptions in " diff --git a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap index fd28bded5d..2bb6d04b0b 100644 --- a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap +++ b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap @@ -95,3 +95,23 @@ DOC502 Raised exception is not explicitly raised: `DivisionByZero` 82 | return distance / time | help: Remove `DivisionByZero` from the docstring + +DOC502 Raised exception is not explicitly raised: `ZeroDivisionError` + --> DOC502_numpy.py:139:5 + | +137 | # of the exceptions +138 | def foo(): +139 | / """First line. +140 | | +141 | | Raises +142 | | ------ +143 | | ValueError +144 | | some text +145 | | .. math:: e^{xception} +146 | | ZeroDivisionError +147 | | Will not be raised, DOC502 +148 | | """ + | |_______^ +149 | raise ValueError + | +help: Remove `ZeroDivisionError` from the docstring diff --git a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-parameter_DOC102_numpy.py.snap b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-parameter_DOC102_numpy.py.snap index c4566953d0..6371878947 100644 --- a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-parameter_DOC102_numpy.py.snap +++ b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-parameter_DOC102_numpy.py.snap @@ -187,3 +187,36 @@ DOC102 Documented parameter `a` is not in the function's signature 302 | b | help: Remove the extraneous parameter from the docstring + +DOC102 Documented parameter `x1` is not in the function's signature + --> DOC102_numpy.py:380:5 + | +378 | Parameters +379 | ---------- +380 | x1, x2 : object + | ^^ +381 | Objects. + | +help: Remove the extraneous parameter from the docstring + +DOC102 Documented parameter `x2` is not in the function's signature + --> DOC102_numpy.py:380:9 + | +378 | Parameters +379 | ---------- +380 | x1, x2 : object + | ^^ +381 | Objects. + | +help: Remove the extraneous parameter from the docstring + +DOC102 Documented parameter `extra_param` is not in the function's signature + --> DOC102_numpy.py:418:5 + | +416 | x1, x2 : str +417 | String parameters for processing. +418 | extra_param : str + | ^^^^^^^^^^^ +419 | Extra parameter not in signature. + | +help: Remove the extraneous parameter from the docstring diff --git a/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs b/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs index 32cfa89406..23faabc2ec 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs +++ b/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs @@ -94,7 +94,7 @@ pub(crate) fn capitalized(checker: &Checker, docstring: &Docstring) { let mut diagnostic = checker.report_diagnostic( FirstWordUncapitalized { first_word: first_word.to_string(), - capitalized_word: capitalized_word.to_string(), + capitalized_word: capitalized_word.clone(), }, docstring.range(), ); diff --git a/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs b/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs index 90f5e0dde5..f71902cb32 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs @@ -1,9 +1,6 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast as ast; -use ruff_text_size::Ranged; use crate::Violation; -use crate::checkers::ast::Checker; /// ## What it does /// Checks for `nonlocal` names without bindings. @@ -46,19 +43,3 @@ impl Violation for NonlocalWithoutBinding { format!("Nonlocal name `{name}` found without binding") } } - -/// PLE0117 -pub(crate) fn nonlocal_without_binding(checker: &Checker, nonlocal: &ast::StmtNonlocal) { - if !checker.semantic().scope_id.is_global() { - for name in &nonlocal.names { - if checker.semantic().nonlocal(name).is_none() { - checker.report_diagnostic( - NonlocalWithoutBinding { - name: name.to_string(), - }, - name.range(), - ); - } - } - } -} diff --git a/crates/ruff_linter/src/rules/pyupgrade/mod.rs b/crates/ruff_linter/src/rules/pyupgrade/mod.rs index c933de5ee8..a4bd1c5ac6 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/mod.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/mod.rs @@ -99,6 +99,7 @@ mod tests { #[test_case(Rule::UTF8EncodingDeclaration, Path::new("UP009_many_empty_lines.py"))] #[test_case(Rule::UnicodeKindPrefix, Path::new("UP025.py"))] #[test_case(Rule::UnnecessaryBuiltinImport, Path::new("UP029_0.py"))] + #[test_case(Rule::UnnecessaryBuiltinImport, Path::new("UP029_2.py"))] #[test_case(Rule::UnnecessaryClassParentheses, Path::new("UP039.py"))] #[test_case(Rule::UnnecessaryDefaultTypeArgs, Path::new("UP043.py"))] #[test_case(Rule::UnnecessaryEncodeUTF8, Path::new("UP012.py"))] diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs index 9b15ac0b5b..51c3147a50 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs @@ -766,11 +766,12 @@ pub(crate) fn deprecated_import(checker: &Checker, import_from_stmt: &StmtImport } for operation in fixer.with_renames() { - checker.report_diagnostic( + let mut diagnostic = checker.report_diagnostic( DeprecatedImport { deprecation: Deprecation::WithRename(operation), }, import_from_stmt.range(), ); + diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated); } } diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs index 41e73a3096..f15b2dfeb3 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs @@ -75,7 +75,13 @@ pub(crate) fn unnecessary_builtin_import( stmt: &Stmt, module: &str, names: &[Alias], + level: u32, ) { + // Ignore relative imports (they're importing from local modules, not Python's builtins). + if level > 0 { + return; + } + // Ignore irrelevant modules. if !matches!( module, diff --git a/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap b/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap new file mode 100644 index 0000000000..2bacb5d540 --- /dev/null +++ b/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap @@ -0,0 +1,4 @@ +--- +source: crates/ruff_linter/src/rules/pyupgrade/mod.rs +--- + diff --git a/crates/ruff_linter/src/rules/refurb/mod.rs b/crates/ruff_linter/src/rules/refurb/mod.rs index 97d9fae7a6..9187853141 100644 --- a/crates/ruff_linter/src/rules/refurb/mod.rs +++ b/crates/ruff_linter/src/rules/refurb/mod.rs @@ -12,7 +12,6 @@ mod tests { use test_case::test_case; use crate::registry::Rule; - use crate::settings::types::PreviewMode; use crate::test::test_path; use crate::{assert_diagnostics, settings}; @@ -63,25 +62,6 @@ mod tests { Ok(()) } - #[test_case(Rule::ReadWholeFile, Path::new("FURB101.py"))] - #[test_case(Rule::WriteWholeFile, Path::new("FURB103.py"))] - fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> { - let snapshot = format!( - "preview_{}_{}", - rule_code.noqa_code(), - path.to_string_lossy() - ); - let diagnostics = test_path( - Path::new("refurb").join(path).as_path(), - &settings::LinterSettings { - preview: PreviewMode::Enabled, - ..settings::LinterSettings::for_rule(rule_code) - }, - )?; - assert_diagnostics!(snapshot, diagnostics); - Ok(()) - } - #[test] fn write_whole_file_python_39() -> Result<()> { let diagnostics = test_path( diff --git a/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs b/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs index b86c6e9d9e..0690ca5449 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs @@ -188,7 +188,7 @@ pub(crate) fn bit_count(checker: &Checker, call: &ExprCall) { let mut diagnostic = checker.report_diagnostic( BitCount { existing: SourceCodeSnippet::from_str(literal_text), - replacement: SourceCodeSnippet::new(replacement.to_string()), + replacement: SourceCodeSnippet::new(replacement.clone()), }, call.range(), ); diff --git a/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs b/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs index 151bdc3113..8194ac87d4 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs @@ -62,40 +62,11 @@ pub(crate) fn hardcoded_string_charset_literal(checker: &Checker, expr: &ExprStr struct NamedCharset { name: &'static str, bytes: &'static [u8], - ascii_char_set: AsciiCharSet, -} - -/// Represents the set of ascii characters in form of a bitset. -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -struct AsciiCharSet(u128); - -impl AsciiCharSet { - /// Creates the set of ascii characters from `bytes`. - /// Returns None if there is non-ascii byte. - const fn from_bytes(bytes: &[u8]) -> Option { - // TODO: simplify implementation, when const-traits are supported - // https://github.com/rust-lang/rust-project-goals/issues/106 - let mut bitset = 0; - let mut i = 0; - while i < bytes.len() { - if !bytes[i].is_ascii() { - return None; - } - bitset |= 1 << bytes[i]; - i += 1; - } - Some(Self(bitset)) - } } impl NamedCharset { const fn new(name: &'static str, bytes: &'static [u8]) -> Self { - Self { - name, - bytes, - // SAFETY: The named charset is guaranteed to have only ascii bytes. - ascii_char_set: AsciiCharSet::from_bytes(bytes).unwrap(), - } + Self { name, bytes } } } diff --git a/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs b/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs index 2b8ecfab45..6c010da313 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs @@ -1,5 +1,5 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::helpers::contains_effect; +use ruff_python_ast::helpers::{contains_effect, is_empty_f_string}; use ruff_python_ast::{self as ast, Expr}; use ruff_python_codegen::Generator; use ruff_python_semantic::SemanticModel; @@ -194,13 +194,11 @@ pub(crate) fn print_empty_string(checker: &Checker, call: &ast::ExprCall) { /// Check if an expression is a constant empty string. fn is_empty_string(expr: &Expr) -> bool { - matches!( - expr, - Expr::StringLiteral(ast::ExprStringLiteral { - value, - .. - }) if value.is_empty() - ) + match expr { + Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.is_empty(), + Expr::FString(f_string) => is_empty_f_string(f_string), + _ => false, + } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs b/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs index b64f91829a..2b43af89a8 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs @@ -125,24 +125,8 @@ impl<'a> Visitor<'a> for ReadMatcher<'a, '_> { open.item.range(), ); - if !crate::preview::is_fix_read_whole_file_enabled(self.checker.settings()) { - return; - } - - let target = match self.with_stmt.body.first() { - Some(Stmt::Assign(assign)) - if assign.value.range().contains_range(expr.range()) => - { - match assign.targets.first() { - Some(Expr::Name(name)) => Some(name.id.as_str()), - _ => None, - } - } - _ => None, - }; - if let Some(fix) = - generate_fix(self.checker, &open, target, self.with_stmt, &suggestion) + generate_fix(self.checker, &open, expr, self.with_stmt, &suggestion) { diagnostic.set_fix(fix); } @@ -194,15 +178,16 @@ fn make_suggestion(open: &FileOpen<'_>, generator: Generator) -> String { fn generate_fix( checker: &Checker, open: &FileOpen, - target: Option<&str>, + expr: &Expr, with_stmt: &ast::StmtWith, suggestion: &str, ) -> Option { - if !(with_stmt.items.len() == 1 && matches!(with_stmt.body.as_slice(), [Stmt::Assign(_)])) { + if with_stmt.items.len() != 1 { return None; } let locator = checker.locator(); + let filename_code = locator.slice(open.filename.range()); let (import_edit, binding) = checker @@ -214,9 +199,39 @@ fn generate_fix( ) .ok()?; - let replacement = match target { - Some(var) => format!("{var} = {binding}({filename_code}).{suggestion}"), - None => format!("{binding}({filename_code}).{suggestion}"), + // Only replace context managers with a single assignment or annotated assignment in the body. + // The assignment's RHS must also be the same as the `read` call in `expr`, otherwise this fix + // would remove the rest of the expression. + let replacement = match with_stmt.body.as_slice() { + [Stmt::Assign(ast::StmtAssign { targets, value, .. })] if value.range() == expr.range() => { + match targets.as_slice() { + [Expr::Name(name)] => { + format!( + "{name} = {binding}({filename_code}).{suggestion}", + name = name.id + ) + } + _ => return None, + } + } + [ + Stmt::AnnAssign(ast::StmtAnnAssign { + target, + annotation, + value: Some(value), + .. + }), + ] if value.range() == expr.range() => match target.as_ref() { + Expr::Name(name) => { + format!( + "{var}: {ann} = {binding}({filename_code}).{suggestion}", + var = name.id, + ann = locator.slice(annotation.range()) + ) + } + _ => return None, + }, + _ => return None, }; let applicability = if checker.comment_ranges().intersects(with_stmt.range()) { diff --git a/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs b/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs index e34357bd55..38184c8fd3 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs @@ -149,10 +149,9 @@ pub(crate) fn unnecessary_from_float(checker: &Checker, call: &ExprCall) { // Check if we should suppress the fix due to type validation concerns let is_type_safe = is_valid_argument_type(arg_value, method_name, constructor, checker); - let has_keywords = !call.arguments.keywords.is_empty(); // Determine fix safety - let applicability = if is_type_safe && !has_keywords { + let applicability = if is_type_safe { Applicability::Safe } else { Applicability::Unsafe @@ -210,21 +209,27 @@ fn is_valid_argument_type( _ => false, }, // Fraction.from_decimal accepts int, bool, Decimal - (MethodName::FromDecimal, Constructor::Fraction) => match resolved_type { - ResolvedPythonType::Atom(PythonType::Number( - NumberLike::Integer | NumberLike::Bool, - )) => true, - ResolvedPythonType::Unknown => is_int, - _ => { - // Check if it's a Decimal instance - arg_expr - .as_call_expr() - .and_then(|call| semantic.resolve_qualified_name(&call.func)) - .is_some_and(|qualified_name| { - matches!(qualified_name.segments(), ["decimal", "Decimal"]) - }) + (MethodName::FromDecimal, Constructor::Fraction) => { + // First check if it's a Decimal constructor call + let is_decimal_call = arg_expr + .as_call_expr() + .and_then(|call| semantic.resolve_qualified_name(&call.func)) + .is_some_and(|qualified_name| { + matches!(qualified_name.segments(), ["decimal", "Decimal"]) + }); + + if is_decimal_call { + return true; } - }, + + match resolved_type { + ResolvedPythonType::Atom(PythonType::Number( + NumberLike::Integer | NumberLike::Bool, + )) => true, + ResolvedPythonType::Unknown => is_int, + _ => false, + } + } _ => false, } } @@ -274,7 +279,7 @@ fn handle_non_finite_float_special_case( return None; } - let Expr::Call(ast::ExprCall { + let Expr::Call(ExprCall { func, arguments, .. }) = arg_value else { diff --git a/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs b/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs index 50c98026d5..28779b021a 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs @@ -93,16 +93,21 @@ pub(crate) fn verbose_decimal_constructor(checker: &Checker, call: &ast::ExprCal // https://github.com/python/cpython/blob/ac556a2ad1213b8bb81372fe6fb762f5fcb076de/Lib/_pydecimal.py#L6060-L6077 // _after_ trimming whitespace from the string and removing all occurrences of "_". let original_str = str_literal.to_str().trim_whitespace(); + // Strip leading underscores before extracting the sign, as Python's Decimal parser + // removes underscores before parsing the sign. + let sign_check_str = original_str.trim_start_matches('_'); // Extract the unary sign, if any. - let (unary, original_str) = if let Some(trimmed) = original_str.strip_prefix('+') { + let (unary, sign_check_str) = if let Some(trimmed) = sign_check_str.strip_prefix('+') { ("+", trimmed) - } else if let Some(trimmed) = original_str.strip_prefix('-') { + } else if let Some(trimmed) = sign_check_str.strip_prefix('-') { ("-", trimmed) } else { - ("", original_str) + ("", sign_check_str) }; - let mut rest = Cow::from(original_str); - let has_digit_separators = memchr::memchr(b'_', rest.as_bytes()).is_some(); + // Save the string after the sign for normalization (before removing underscores) + let str_after_sign_for_normalization = sign_check_str; + let mut rest = Cow::from(sign_check_str); + let has_digit_separators = memchr::memchr(b'_', original_str.as_bytes()).is_some(); if has_digit_separators { rest = Cow::from(rest.replace('_', "")); } @@ -123,7 +128,7 @@ pub(crate) fn verbose_decimal_constructor(checker: &Checker, call: &ast::ExprCal // If the original string had digit separators, normalize them let rest = if has_digit_separators { - Cow::from(normalize_digit_separators(original_str)) + Cow::from(normalize_digit_separators(str_after_sign_for_normalization)) } else { Cow::from(rest) }; diff --git a/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs b/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs index bbee6dcb5a..f25faa3eb2 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs @@ -5,7 +5,6 @@ use ruff_python_ast::{ relocate::relocate_expr, visitor::{self, Visitor}, }; - use ruff_python_codegen::Generator; use ruff_text_size::{Ranged, TextRange}; @@ -142,10 +141,6 @@ impl<'a> Visitor<'a> for WriteMatcher<'a, '_> { open.item.range(), ); - if !crate::preview::is_fix_write_whole_file_enabled(self.checker.settings()) { - return; - } - if let Some(fix) = generate_fix(self.checker, &open, self.with_stmt, &suggestion) { diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap index 3f851c3f12..3fea418d76 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/refurb/mod.rs --- -FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()` +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()` --> FURB101.py:12:6 | 11 | # FURB101 @@ -10,8 +10,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()` 13 | x = f.read() | help: Replace with `Path("file.txt").read_text()` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +10 | # Errors. +11 | +12 | # FURB101 + - with open("file.txt") as f: + - x = f.read() +13 + x = pathlib.Path("file.txt").read_text() +14 | +15 | # FURB101 +16 | with open("file.txt", "rb") as f: -FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()` +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()` --> FURB101.py:16:6 | 15 | # FURB101 @@ -20,8 +34,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()` 17 | x = f.read() | help: Replace with `Path("file.txt").read_bytes()` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +14 | x = f.read() +15 | +16 | # FURB101 + - with open("file.txt", "rb") as f: + - x = f.read() +17 + x = pathlib.Path("file.txt").read_bytes() +18 | +19 | # FURB101 +20 | with open("file.txt", mode="rb") as f: -FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()` +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()` --> FURB101.py:20:6 | 19 | # FURB101 @@ -30,8 +58,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()` 21 | x = f.read() | help: Replace with `Path("file.txt").read_bytes()` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +18 | x = f.read() +19 | +20 | # FURB101 + - with open("file.txt", mode="rb") as f: + - x = f.read() +21 + x = pathlib.Path("file.txt").read_bytes() +22 | +23 | # FURB101 +24 | with open("file.txt", encoding="utf8") as f: -FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")` +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")` --> FURB101.py:24:6 | 23 | # FURB101 @@ -40,8 +82,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco 25 | x = f.read() | help: Replace with `Path("file.txt").read_text(encoding="utf8")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +22 | x = f.read() +23 | +24 | # FURB101 + - with open("file.txt", encoding="utf8") as f: + - x = f.read() +25 + x = pathlib.Path("file.txt").read_text(encoding="utf8") +26 | +27 | # FURB101 +28 | with open("file.txt", errors="ignore") as f: -FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")` +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")` --> FURB101.py:28:6 | 27 | # FURB101 @@ -50,8 +106,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(erro 29 | x = f.read() | help: Replace with `Path("file.txt").read_text(errors="ignore")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +26 | x = f.read() +27 | +28 | # FURB101 + - with open("file.txt", errors="ignore") as f: + - x = f.read() +29 + x = pathlib.Path("file.txt").read_text(errors="ignore") +30 | +31 | # FURB101 +32 | with open("file.txt", mode="r") as f: # noqa: FURB120 -FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()` +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()` --> FURB101.py:32:6 | 31 | # FURB101 @@ -60,6 +130,21 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()` 33 | x = f.read() | help: Replace with `Path("file.txt").read_text()` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +30 | x = f.read() +31 | +32 | # FURB101 + - with open("file.txt", mode="r") as f: # noqa: FURB120 + - x = f.read() +33 + x = pathlib.Path("file.txt").read_text() +34 | +35 | # FURB101 +36 | with open(foo(), "rb") as f: +note: This is an unsafe fix and may change runtime behavior FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()` --> FURB101.py:36:6 @@ -104,3 +189,58 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()` 51 | # the user reads the whole file and that bit they can replace. | help: Replace with `Path("file.txt").read_text()` + +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:130:6 + | +129 | # FURB101 +130 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +131 | contents: str = f.read() + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +128 | x = f.read() +129 | +130 | # FURB101 + - with open("file.txt", encoding="utf-8") as f: + - contents: str = f.read() +131 + contents: str = pathlib.Path("file.txt").read_text(encoding="utf-8") +132 | +133 | # FURB101 but no fix because it would remove the assignment to `x` +134 | with open("file.txt", encoding="utf-8") as f: + +FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:134:6 + | +133 | # FURB101 but no fix because it would remove the assignment to `x` +134 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +135 | contents, x = f.read(), 2 + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` + +FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:138:6 + | +137 | # FURB101 but no fix because it would remove the `process_contents` call +138 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +139 | contents = process_contents(f.read()) + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` + +FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:141:6 + | +139 | contents = process_contents(f.read()) +140 | +141 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +142 | contents: str = process_contents(f.read()) + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap index dfb111341e..8148035435 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/refurb/mod.rs --- -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("test")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")` --> FURB103.py:12:6 | 11 | # FURB103 @@ -10,8 +10,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("t 13 | f.write("test") | help: Replace with `Path("file.txt").write_text("test")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +10 | # Errors. +11 | +12 | # FURB103 + - with open("file.txt", "w") as f: + - f.write("test") +13 + pathlib.Path("file.txt").write_text("test") +14 | +15 | # FURB103 +16 | with open("file.txt", "wb") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)` --> FURB103.py:16:6 | 15 | # FURB103 @@ -20,8 +34,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(f 17 | f.write(foobar) | help: Replace with `Path("file.txt").write_bytes(foobar)` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +14 | f.write("test") +15 | +16 | # FURB103 + - with open("file.txt", "wb") as f: + - f.write(foobar) +17 + pathlib.Path("file.txt").write_bytes(foobar) +18 | +19 | # FURB103 +20 | with open("file.txt", mode="wb") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")` --> FURB103.py:20:6 | 19 | # FURB103 @@ -30,8 +58,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b 21 | f.write(b"abc") | help: Replace with `Path("file.txt").write_bytes(b"abc")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +18 | f.write(foobar) +19 | +20 | # FURB103 + - with open("file.txt", mode="wb") as f: + - f.write(b"abc") +21 + pathlib.Path("file.txt").write_bytes(b"abc") +22 | +23 | # FURB103 +24 | with open("file.txt", "w", encoding="utf8") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")` --> FURB103.py:24:6 | 23 | # FURB103 @@ -40,8 +82,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 25 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +22 | f.write(b"abc") +23 | +24 | # FURB103 + - with open("file.txt", "w", encoding="utf8") as f: + - f.write(foobar) +25 + pathlib.Path("file.txt").write_text(foobar, encoding="utf8") +26 | +27 | # FURB103 +28 | with open("file.txt", "w", errors="ignore") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")` --> FURB103.py:28:6 | 27 | # FURB103 @@ -50,8 +106,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 29 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +26 | f.write(foobar) +27 | +28 | # FURB103 + - with open("file.txt", "w", errors="ignore") as f: + - f.write(foobar) +29 + pathlib.Path("file.txt").write_text(foobar, errors="ignore") +30 | +31 | # FURB103 +32 | with open("file.txt", mode="w") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)` --> FURB103.py:32:6 | 31 | # FURB103 @@ -60,6 +130,20 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 33 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar)` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +30 | f.write(foobar) +31 | +32 | # FURB103 + - with open("file.txt", mode="w") as f: + - f.write(foobar) +33 + pathlib.Path("file.txt").write_text(foobar) +34 | +35 | # FURB103 +36 | with open(foo(), "wb") as f: FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())` --> FURB103.py:36:6 @@ -105,7 +189,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba | help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))` -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` --> FURB103.py:58:6 | 57 | # FURB103 @@ -114,8 +198,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 59 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +56 | +57 | +58 | # FURB103 + - with open("file.txt", "w", newline="\r\n") as f: + - f.write(foobar) +59 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n") +60 | +61 | +62 | import builtins -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` --> FURB103.py:66:6 | 65 | # FURB103 @@ -124,8 +222,21 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 67 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` +60 | +61 | +62 | import builtins +63 + import pathlib +64 | +65 | +66 | # FURB103 + - with builtins.open("file.txt", "w", newline="\r\n") as f: + - f.write(foobar) +67 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n") +68 | +69 | +70 | from builtins import open as o -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` --> FURB103.py:74:6 | 73 | # FURB103 @@ -134,3 +245,37 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 75 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` +68 | +69 | +70 | from builtins import open as o +71 + import pathlib +72 | +73 | +74 | # FURB103 + - with o("file.txt", "w", newline="\r\n") as f: + - f.write(foobar) +75 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n") +76 | +77 | # Non-errors. +78 | + +FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....` + --> FURB103.py:154:6 + | +152 | data = {"price": 100} +153 | +154 | with open("test.json", "wb") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +155 | f.write(json.dumps(data, indent=4).encode("utf-8")) + | +help: Replace with `Path("test.json")....` +148 | +149 | # See: https://github.com/astral-sh/ruff/issues/20785 +150 | import json +151 + import pathlib +152 | +153 | data = {"price": 100} +154 | + - with open("test.json", "wb") as f: + - f.write(json.dumps(data, indent=4).encode("utf-8")) +155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8")) diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap index e258e208cb..d875fcff56 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap @@ -317,7 +317,7 @@ help: Remove empty string 19 + print(**kwargs) 20 | print(sep="\t") 21 | print(sep=print(1)) -22 | +22 | print(f"") FURB105 [*] Unnecessary separator passed to `print` --> FURB105.py:20:1 @@ -327,6 +327,7 @@ FURB105 [*] Unnecessary separator passed to `print` 20 | print(sep="\t") | ^^^^^^^^^^^^^^^ 21 | print(sep=print(1)) +22 | print(f"") | help: Remove separator 17 | print("", *args) @@ -335,8 +336,8 @@ help: Remove separator - print(sep="\t") 20 + print() 21 | print(sep=print(1)) -22 | -23 | # OK. +22 | print(f"") +23 | print(f"", sep=",") FURB105 [*] Unnecessary separator passed to `print` --> FURB105.py:21:1 @@ -345,8 +346,8 @@ FURB105 [*] Unnecessary separator passed to `print` 20 | print(sep="\t") 21 | print(sep=print(1)) | ^^^^^^^^^^^^^^^^^^^ -22 | -23 | # OK. +22 | print(f"") +23 | print(f"", sep=",") | help: Remove separator 18 | print("", *args, sep="") @@ -354,7 +355,66 @@ help: Remove separator 20 | print(sep="\t") - print(sep=print(1)) 21 + print() -22 | -23 | # OK. -24 | +22 | print(f"") +23 | print(f"", sep=",") +24 | print(f"", end="bar") note: This is an unsafe fix and may change runtime behavior + +FURB105 [*] Unnecessary empty string passed to `print` + --> FURB105.py:22:1 + | +20 | print(sep="\t") +21 | print(sep=print(1)) +22 | print(f"") + | ^^^^^^^^^^ +23 | print(f"", sep=",") +24 | print(f"", end="bar") + | +help: Remove empty string +19 | print("", **kwargs) +20 | print(sep="\t") +21 | print(sep=print(1)) + - print(f"") +22 + print() +23 | print(f"", sep=",") +24 | print(f"", end="bar") +25 | + +FURB105 [*] Unnecessary empty string and separator passed to `print` + --> FURB105.py:23:1 + | +21 | print(sep=print(1)) +22 | print(f"") +23 | print(f"", sep=",") + | ^^^^^^^^^^^^^^^^^^^ +24 | print(f"", end="bar") + | +help: Remove empty string and separator +20 | print(sep="\t") +21 | print(sep=print(1)) +22 | print(f"") + - print(f"", sep=",") +23 + print() +24 | print(f"", end="bar") +25 | +26 | # OK. + +FURB105 [*] Unnecessary empty string passed to `print` + --> FURB105.py:24:1 + | +22 | print(f"") +23 | print(f"", sep=",") +24 | print(f"", end="bar") + | ^^^^^^^^^^^^^^^^^^^^^ +25 | +26 | # OK. + | +help: Remove empty string +21 | print(sep=print(1)) +22 | print(f"") +23 | print(f"", sep=",") + - print(f"", end="bar") +24 + print(end="bar") +25 | +26 | # OK. +27 | diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap index 92e8057055..3f0a1c2cf6 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap @@ -669,6 +669,7 @@ help: Replace with `1_2345` 85 + Decimal(1_2345) 86 | Decimal("000_1_2345") 87 | Decimal("000_000") +88 | FURB157 [*] Verbose expression in `Decimal` constructor --> FURB157.py:86:9 @@ -686,6 +687,8 @@ help: Replace with `1_2345` - Decimal("000_1_2345") 86 + Decimal(1_2345) 87 | Decimal("000_000") +88 | +89 | # Test cases for underscores before sign FURB157 [*] Verbose expression in `Decimal` constructor --> FURB157.py:87:9 @@ -694,6 +697,8 @@ FURB157 [*] Verbose expression in `Decimal` constructor 86 | Decimal("000_1_2345") 87 | Decimal("000_000") | ^^^^^^^^^ +88 | +89 | # Test cases for underscores before sign | help: Replace with `0` 84 | # Separators _and_ leading zeros @@ -701,3 +706,57 @@ help: Replace with `0` 86 | Decimal("000_1_2345") - Decimal("000_000") 87 + Decimal(0) +88 | +89 | # Test cases for underscores before sign +90 | # https://github.com/astral-sh/ruff/issues/21186 + +FURB157 [*] Verbose expression in `Decimal` constructor + --> FURB157.py:91:9 + | +89 | # Test cases for underscores before sign +90 | # https://github.com/astral-sh/ruff/issues/21186 +91 | Decimal("_-1") # Should flag as verbose + | ^^^^^ +92 | Decimal("_+1") # Should flag as verbose +93 | Decimal("_-1_000") # Should flag as verbose + | +help: Replace with `-1` +88 | +89 | # Test cases for underscores before sign +90 | # https://github.com/astral-sh/ruff/issues/21186 + - Decimal("_-1") # Should flag as verbose +91 + Decimal(-1) # Should flag as verbose +92 | Decimal("_+1") # Should flag as verbose +93 | Decimal("_-1_000") # Should flag as verbose + +FURB157 [*] Verbose expression in `Decimal` constructor + --> FURB157.py:92:9 + | +90 | # https://github.com/astral-sh/ruff/issues/21186 +91 | Decimal("_-1") # Should flag as verbose +92 | Decimal("_+1") # Should flag as verbose + | ^^^^^ +93 | Decimal("_-1_000") # Should flag as verbose + | +help: Replace with `+1` +89 | # Test cases for underscores before sign +90 | # https://github.com/astral-sh/ruff/issues/21186 +91 | Decimal("_-1") # Should flag as verbose + - Decimal("_+1") # Should flag as verbose +92 + Decimal(+1) # Should flag as verbose +93 | Decimal("_-1_000") # Should flag as verbose + +FURB157 [*] Verbose expression in `Decimal` constructor + --> FURB157.py:93:9 + | +91 | Decimal("_-1") # Should flag as verbose +92 | Decimal("_+1") # Should flag as verbose +93 | Decimal("_-1_000") # Should flag as verbose + | ^^^^^^^^^ + | +help: Replace with `-1_000` +90 | # https://github.com/astral-sh/ruff/issues/21186 +91 | Decimal("_-1") # Should flag as verbose +92 | Decimal("_+1") # Should flag as verbose + - Decimal("_-1_000") # Should flag as verbose +93 + Decimal(-1_000) # Should flag as verbose diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap index e917928a64..7bd2ce8225 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap @@ -99,7 +99,6 @@ help: Replace with `Fraction` constructor 12 | _ = Fraction.from_decimal(Decimal("-4.2")) 13 | _ = Fraction.from_decimal(Decimal.from_float(4.2)) 14 | _ = Decimal.from_float(0.1) -note: This is an unsafe fix and may change runtime behavior FURB164 [*] Verbose method `from_decimal` in `Fraction` construction --> FURB164.py:12:5 @@ -120,7 +119,6 @@ help: Replace with `Fraction` constructor 13 | _ = Fraction.from_decimal(Decimal.from_float(4.2)) 14 | _ = Decimal.from_float(0.1) 15 | _ = Decimal.from_float(-0.5) -note: This is an unsafe fix and may change runtime behavior FURB164 [*] Verbose method `from_decimal` in `Fraction` construction --> FURB164.py:13:5 @@ -484,7 +482,6 @@ help: Replace with `Fraction` constructor 32 | _ = Decimal.from_float(f=4.2) 33 | 34 | # Cases with invalid argument counts - should not get fixes -note: This is an unsafe fix and may change runtime behavior FURB164 Verbose method `from_float` in `Decimal` construction --> FURB164.py:32:5 @@ -658,6 +655,7 @@ help: Replace with `Decimal` constructor 64 + _ = Decimal("nan") 65 | _ = Decimal.from_float(float("\x2dnan")) 66 | _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan")) +67 | FURB164 [*] Verbose method `from_float` in `Decimal` construction --> FURB164.py:65:5 @@ -675,6 +673,8 @@ help: Replace with `Decimal` constructor - _ = Decimal.from_float(float("\x2dnan")) 65 + _ = Decimal("nan") 66 | _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan")) +67 | +68 | # See: https://github.com/astral-sh/ruff/issues/21257 FURB164 [*] Verbose method `from_float` in `Decimal` construction --> FURB164.py:66:5 @@ -683,6 +683,8 @@ FURB164 [*] Verbose method `from_float` in `Decimal` construction 65 | _ = Decimal.from_float(float("\x2dnan")) 66 | _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan")) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +67 | +68 | # See: https://github.com/astral-sh/ruff/issues/21257 | help: Replace with `Decimal` constructor 63 | # Cases with non-finite floats - should produce safe fixes @@ -690,3 +692,38 @@ help: Replace with `Decimal` constructor 65 | _ = Decimal.from_float(float("\x2dnan")) - _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan")) 66 + _ = Decimal("nan") +67 | +68 | # See: https://github.com/astral-sh/ruff/issues/21257 +69 | # fixes must be safe + +FURB164 [*] Verbose method `from_float` in `Fraction` construction + --> FURB164.py:70:5 + | +68 | # See: https://github.com/astral-sh/ruff/issues/21257 +69 | # fixes must be safe +70 | _ = Fraction.from_float(f=4.2) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +71 | _ = Fraction.from_decimal(dec=4) + | +help: Replace with `Fraction` constructor +67 | +68 | # See: https://github.com/astral-sh/ruff/issues/21257 +69 | # fixes must be safe + - _ = Fraction.from_float(f=4.2) +70 + _ = Fraction(4.2) +71 | _ = Fraction.from_decimal(dec=4) + +FURB164 [*] Verbose method `from_decimal` in `Fraction` construction + --> FURB164.py:71:5 + | +69 | # fixes must be safe +70 | _ = Fraction.from_float(f=4.2) +71 | _ = Fraction.from_decimal(dec=4) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Replace with `Fraction` constructor +68 | # See: https://github.com/astral-sh/ruff/issues/21257 +69 | # fixes must be safe +70 | _ = Fraction.from_float(f=4.2) + - _ = Fraction.from_decimal(dec=4) +71 + _ = Fraction(4) diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB101_FURB101.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB101_FURB101.py.snap deleted file mode 100644 index 4131499c0c..0000000000 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB101_FURB101.py.snap +++ /dev/null @@ -1,191 +0,0 @@ ---- -source: crates/ruff_linter/src/rules/refurb/mod.rs ---- -FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()` - --> FURB101.py:12:6 - | -11 | # FURB101 -12 | with open("file.txt") as f: - | ^^^^^^^^^^^^^^^^^^^^^ -13 | x = f.read() - | -help: Replace with `Path("file.txt").read_text()` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -10 | # Errors. -11 | -12 | # FURB101 - - with open("file.txt") as f: - - x = f.read() -13 + x = pathlib.Path("file.txt").read_text() -14 | -15 | # FURB101 -16 | with open("file.txt", "rb") as f: - -FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()` - --> FURB101.py:16:6 - | -15 | # FURB101 -16 | with open("file.txt", "rb") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -17 | x = f.read() - | -help: Replace with `Path("file.txt").read_bytes()` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -14 | x = f.read() -15 | -16 | # FURB101 - - with open("file.txt", "rb") as f: - - x = f.read() -17 + x = pathlib.Path("file.txt").read_bytes() -18 | -19 | # FURB101 -20 | with open("file.txt", mode="rb") as f: - -FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()` - --> FURB101.py:20:6 - | -19 | # FURB101 -20 | with open("file.txt", mode="rb") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -21 | x = f.read() - | -help: Replace with `Path("file.txt").read_bytes()` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -18 | x = f.read() -19 | -20 | # FURB101 - - with open("file.txt", mode="rb") as f: - - x = f.read() -21 + x = pathlib.Path("file.txt").read_bytes() -22 | -23 | # FURB101 -24 | with open("file.txt", encoding="utf8") as f: - -FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")` - --> FURB101.py:24:6 - | -23 | # FURB101 -24 | with open("file.txt", encoding="utf8") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -25 | x = f.read() - | -help: Replace with `Path("file.txt").read_text(encoding="utf8")` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -22 | x = f.read() -23 | -24 | # FURB101 - - with open("file.txt", encoding="utf8") as f: - - x = f.read() -25 + x = pathlib.Path("file.txt").read_text(encoding="utf8") -26 | -27 | # FURB101 -28 | with open("file.txt", errors="ignore") as f: - -FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")` - --> FURB101.py:28:6 - | -27 | # FURB101 -28 | with open("file.txt", errors="ignore") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -29 | x = f.read() - | -help: Replace with `Path("file.txt").read_text(errors="ignore")` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -26 | x = f.read() -27 | -28 | # FURB101 - - with open("file.txt", errors="ignore") as f: - - x = f.read() -29 + x = pathlib.Path("file.txt").read_text(errors="ignore") -30 | -31 | # FURB101 -32 | with open("file.txt", mode="r") as f: # noqa: FURB120 - -FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()` - --> FURB101.py:32:6 - | -31 | # FURB101 -32 | with open("file.txt", mode="r") as f: # noqa: FURB120 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -33 | x = f.read() - | -help: Replace with `Path("file.txt").read_text()` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -30 | x = f.read() -31 | -32 | # FURB101 - - with open("file.txt", mode="r") as f: # noqa: FURB120 - - x = f.read() -33 + x = pathlib.Path("file.txt").read_text() -34 | -35 | # FURB101 -36 | with open(foo(), "rb") as f: -note: This is an unsafe fix and may change runtime behavior - -FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()` - --> FURB101.py:36:6 - | -35 | # FURB101 -36 | with open(foo(), "rb") as f: - | ^^^^^^^^^^^^^^^^^^^^^^ -37 | # The body of `with` is non-trivial, but the recommendation holds. -38 | bar("pre") - | -help: Replace with `Path(foo()).read_bytes()` - -FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()` - --> FURB101.py:44:6 - | -43 | # FURB101 -44 | with open("a.txt") as a, open("b.txt", "rb") as b: - | ^^^^^^^^^^^^^^^^^^ -45 | x = a.read() -46 | y = b.read() - | -help: Replace with `Path("a.txt").read_text()` - -FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()` - --> FURB101.py:44:26 - | -43 | # FURB101 -44 | with open("a.txt") as a, open("b.txt", "rb") as b: - | ^^^^^^^^^^^^^^^^^^^^^^^^ -45 | x = a.read() -46 | y = b.read() - | -help: Replace with `Path("b.txt").read_bytes()` - -FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()` - --> FURB101.py:49:18 - | -48 | # FURB101 -49 | with foo() as a, open("file.txt") as b, foo() as c: - | ^^^^^^^^^^^^^^^^^^^^^ -50 | # We have other things in here, multiple with items, but -51 | # the user reads the whole file and that bit they can replace. - | -help: Replace with `Path("file.txt").read_text()` diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap deleted file mode 100644 index eef0992839..0000000000 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap +++ /dev/null @@ -1,260 +0,0 @@ ---- -source: crates/ruff_linter/src/rules/refurb/mod.rs ---- -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")` - --> FURB103.py:12:6 - | -11 | # FURB103 -12 | with open("file.txt", "w") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ -13 | f.write("test") - | -help: Replace with `Path("file.txt").write_text("test")` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -10 | # Errors. -11 | -12 | # FURB103 - - with open("file.txt", "w") as f: - - f.write("test") -13 + pathlib.Path("file.txt").write_text("test") -14 | -15 | # FURB103 -16 | with open("file.txt", "wb") as f: - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)` - --> FURB103.py:16:6 - | -15 | # FURB103 -16 | with open("file.txt", "wb") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -17 | f.write(foobar) - | -help: Replace with `Path("file.txt").write_bytes(foobar)` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -14 | f.write("test") -15 | -16 | # FURB103 - - with open("file.txt", "wb") as f: - - f.write(foobar) -17 + pathlib.Path("file.txt").write_bytes(foobar) -18 | -19 | # FURB103 -20 | with open("file.txt", mode="wb") as f: - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")` - --> FURB103.py:20:6 - | -19 | # FURB103 -20 | with open("file.txt", mode="wb") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -21 | f.write(b"abc") - | -help: Replace with `Path("file.txt").write_bytes(b"abc")` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -18 | f.write(foobar) -19 | -20 | # FURB103 - - with open("file.txt", mode="wb") as f: - - f.write(b"abc") -21 + pathlib.Path("file.txt").write_bytes(b"abc") -22 | -23 | # FURB103 -24 | with open("file.txt", "w", encoding="utf8") as f: - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")` - --> FURB103.py:24:6 - | -23 | # FURB103 -24 | with open("file.txt", "w", encoding="utf8") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -25 | f.write(foobar) - | -help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -22 | f.write(b"abc") -23 | -24 | # FURB103 - - with open("file.txt", "w", encoding="utf8") as f: - - f.write(foobar) -25 + pathlib.Path("file.txt").write_text(foobar, encoding="utf8") -26 | -27 | # FURB103 -28 | with open("file.txt", "w", errors="ignore") as f: - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")` - --> FURB103.py:28:6 - | -27 | # FURB103 -28 | with open("file.txt", "w", errors="ignore") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -29 | f.write(foobar) - | -help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -26 | f.write(foobar) -27 | -28 | # FURB103 - - with open("file.txt", "w", errors="ignore") as f: - - f.write(foobar) -29 + pathlib.Path("file.txt").write_text(foobar, errors="ignore") -30 | -31 | # FURB103 -32 | with open("file.txt", mode="w") as f: - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)` - --> FURB103.py:32:6 - | -31 | # FURB103 -32 | with open("file.txt", mode="w") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -33 | f.write(foobar) - | -help: Replace with `Path("file.txt").write_text(foobar)` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -30 | f.write(foobar) -31 | -32 | # FURB103 - - with open("file.txt", mode="w") as f: - - f.write(foobar) -33 + pathlib.Path("file.txt").write_text(foobar) -34 | -35 | # FURB103 -36 | with open(foo(), "wb") as f: - -FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())` - --> FURB103.py:36:6 - | -35 | # FURB103 -36 | with open(foo(), "wb") as f: - | ^^^^^^^^^^^^^^^^^^^^^^ -37 | # The body of `with` is non-trivial, but the recommendation holds. -38 | bar("pre") - | -help: Replace with `Path(foo()).write_bytes(bar())` - -FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)` - --> FURB103.py:44:6 - | -43 | # FURB103 -44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b: - | ^^^^^^^^^^^^^^^^^^^^^^^ -45 | a.write(x) -46 | b.write(y) - | -help: Replace with `Path("a.txt").write_text(x)` - -FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)` - --> FURB103.py:44:31 - | -43 | # FURB103 -44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b: - | ^^^^^^^^^^^^^^^^^^^^^^^^ -45 | a.write(x) -46 | b.write(y) - | -help: Replace with `Path("b.txt").write_bytes(y)` - -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))` - --> FURB103.py:49:18 - | -48 | # FURB103 -49 | with foo() as a, open("file.txt", "w") as b, foo() as c: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ -50 | # We have other things in here, multiple with items, but the user -51 | # writes a single time to file and that bit they can replace. - | -help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))` - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` - --> FURB103.py:58:6 - | -57 | # FURB103 -58 | with open("file.txt", "w", newline="\r\n") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -59 | f.write(foobar) - | -help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` -1 + import pathlib -2 | def foo(): -3 | ... -4 | --------------------------------------------------------------------------------- -56 | -57 | -58 | # FURB103 - - with open("file.txt", "w", newline="\r\n") as f: - - f.write(foobar) -59 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n") -60 | -61 | -62 | import builtins - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` - --> FURB103.py:66:6 - | -65 | # FURB103 -66 | with builtins.open("file.txt", "w", newline="\r\n") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -67 | f.write(foobar) - | -help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` -60 | -61 | -62 | import builtins -63 + import pathlib -64 | -65 | -66 | # FURB103 - - with builtins.open("file.txt", "w", newline="\r\n") as f: - - f.write(foobar) -67 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n") -68 | -69 | -70 | from builtins import open as o - -FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")` - --> FURB103.py:74:6 - | -73 | # FURB103 -74 | with o("file.txt", "w", newline="\r\n") as f: - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -75 | f.write(foobar) - | -help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` -68 | -69 | -70 | from builtins import open as o -71 + import pathlib -72 | -73 | -74 | # FURB103 - - with o("file.txt", "w", newline="\r\n") as f: - - f.write(foobar) -75 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n") -76 | -77 | # Non-errors. -78 | diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap index 81eea0c159..3b68b110d5 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/refurb/mod.rs --- -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("test")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")` --> FURB103.py:12:6 | 11 | # FURB103 @@ -10,8 +10,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("t 13 | f.write("test") | help: Replace with `Path("file.txt").write_text("test")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +10 | # Errors. +11 | +12 | # FURB103 + - with open("file.txt", "w") as f: + - f.write("test") +13 + pathlib.Path("file.txt").write_text("test") +14 | +15 | # FURB103 +16 | with open("file.txt", "wb") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)` --> FURB103.py:16:6 | 15 | # FURB103 @@ -20,8 +34,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(f 17 | f.write(foobar) | help: Replace with `Path("file.txt").write_bytes(foobar)` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +14 | f.write("test") +15 | +16 | # FURB103 + - with open("file.txt", "wb") as f: + - f.write(foobar) +17 + pathlib.Path("file.txt").write_bytes(foobar) +18 | +19 | # FURB103 +20 | with open("file.txt", mode="wb") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")` --> FURB103.py:20:6 | 19 | # FURB103 @@ -30,8 +58,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b 21 | f.write(b"abc") | help: Replace with `Path("file.txt").write_bytes(b"abc")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +18 | f.write(foobar) +19 | +20 | # FURB103 + - with open("file.txt", mode="wb") as f: + - f.write(b"abc") +21 + pathlib.Path("file.txt").write_bytes(b"abc") +22 | +23 | # FURB103 +24 | with open("file.txt", "w", encoding="utf8") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")` --> FURB103.py:24:6 | 23 | # FURB103 @@ -40,8 +82,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 25 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +22 | f.write(b"abc") +23 | +24 | # FURB103 + - with open("file.txt", "w", encoding="utf8") as f: + - f.write(foobar) +25 + pathlib.Path("file.txt").write_text(foobar, encoding="utf8") +26 | +27 | # FURB103 +28 | with open("file.txt", "w", errors="ignore") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")` --> FURB103.py:28:6 | 27 | # FURB103 @@ -50,8 +106,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 29 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +26 | f.write(foobar) +27 | +28 | # FURB103 + - with open("file.txt", "w", errors="ignore") as f: + - f.write(foobar) +29 + pathlib.Path("file.txt").write_text(foobar, errors="ignore") +30 | +31 | # FURB103 +32 | with open("file.txt", mode="w") as f: -FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)` +FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)` --> FURB103.py:32:6 | 31 | # FURB103 @@ -60,6 +130,20 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 33 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar)` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +30 | f.write(foobar) +31 | +32 | # FURB103 + - with open("file.txt", mode="w") as f: + - f.write(foobar) +33 + pathlib.Path("file.txt").write_text(foobar) +34 | +35 | # FURB103 +36 | with open(foo(), "wb") as f: FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())` --> FURB103.py:36:6 @@ -104,3 +188,24 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba 51 | # writes a single time to file and that bit they can replace. | help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))` + +FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....` + --> FURB103.py:154:6 + | +152 | data = {"price": 100} +153 | +154 | with open("test.json", "wb") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +155 | f.write(json.dumps(data, indent=4).encode("utf-8")) + | +help: Replace with `Path("test.json")....` +148 | +149 | # See: https://github.com/astral-sh/ruff/issues/20785 +150 | import json +151 + import pathlib +152 | +153 | data = {"price": 100} +154 | + - with open("test.json", "wb") as f: + - f.write(json.dumps(data, indent=4).encode("utf-8")) +155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8")) diff --git a/crates/ruff_linter/src/rules/ruff/mod.rs b/crates/ruff_linter/src/rules/ruff/mod.rs index 7cdc557841..b07107587c 100644 --- a/crates/ruff_linter/src/rules/ruff/mod.rs +++ b/crates/ruff_linter/src/rules/ruff/mod.rs @@ -112,7 +112,8 @@ mod tests { #[test_case(Rule::LegacyFormPytestRaises, Path::new("RUF061_warns.py"))] #[test_case(Rule::LegacyFormPytestRaises, Path::new("RUF061_deprecated_call.py"))] #[test_case(Rule::NonOctalPermissions, Path::new("RUF064.py"))] - #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065.py"))] + #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065_0.py"))] + #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065_1.py"))] #[test_case(Rule::RedirectedNOQA, Path::new("RUF101_0.py"))] #[test_case(Rule::RedirectedNOQA, Path::new("RUF101_1.py"))] #[test_case(Rule::InvalidRuleCode, Path::new("RUF102.py"))] diff --git a/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs b/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs index 2b09c8a1e0..32c4755229 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs @@ -138,7 +138,12 @@ pub(crate) fn logging_eager_conversion(checker: &Checker, call: &ast::ExprCall) .zip(call.arguments.args.iter().skip(msg_pos + 1)) { // Check if the argument is a call to eagerly format a value - if let Expr::Call(ast::ExprCall { func, .. }) = arg { + if let Expr::Call(ast::ExprCall { + func, + arguments: str_call_args, + .. + }) = arg + { let CFormatType::String(format_conversion) = spec.format_type else { continue; }; @@ -146,8 +151,13 @@ pub(crate) fn logging_eager_conversion(checker: &Checker, call: &ast::ExprCall) // Check for various eager conversion patterns match format_conversion { // %s with str() - remove str() call + // Only flag if str() has exactly one argument (positional or keyword) that is not unpacked FormatConversion::Str - if checker.semantic().match_builtin_expr(func.as_ref(), "str") => + if checker.semantic().match_builtin_expr(func.as_ref(), "str") + && str_call_args.len() == 1 + && str_call_args + .find_argument("object", 0) + .is_some_and(|arg| !arg.is_variadic()) => { checker.report_diagnostic( LoggingEagerConversion { diff --git a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs index c7fe4687e8..e2ab51e1db 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs @@ -143,6 +143,15 @@ pub(super) fn rounded_and_ndigits<'a>( return None; } + // *args + if arguments.args.iter().any(Expr::is_starred_expr) { + return None; + } + // **kwargs + if arguments.keywords.iter().any(|kw| kw.arg.is_none()) { + return None; + } + let rounded = arguments.find_argument_value("number", 0)?; let ndigits = arguments.find_argument_value("ndigits", 1); diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap index abace6c8b4..8c536b67a8 100644 --- a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap @@ -253,6 +253,8 @@ RUF057 [*] Value being rounded is already an integer 82 | | 17 # a comment 83 | | ) | |_^ +84 | +85 | # See: https://github.com/astral-sh/ruff/issues/21209 | help: Remove unnecessary `round` call 78 | round(# a comment @@ -262,4 +264,7 @@ help: Remove unnecessary `round` call - 17 # a comment - ) 81 + 17 +82 | +83 | # See: https://github.com/astral-sh/ruff/issues/21209 +84 | print(round(125, **{"ndigits": -2})) note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_0.py.snap similarity index 91% rename from crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065.py.snap rename to crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_0.py.snap index 9f96c36307..9ac438216a 100644 --- a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065.py.snap +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_0.py.snap @@ -2,7 +2,7 @@ source: crates/ruff_linter/src/rules/ruff/mod.rs --- RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:4:26 + --> RUF065_0.py:4:26 | 3 | # %s + str() 4 | logging.info("Hello %s", str("World!")) @@ -11,7 +11,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:5:39 + --> RUF065_0.py:5:39 | 3 | # %s + str() 4 | logging.info("Hello %s", str("World!")) @@ -22,7 +22,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:8:26 + --> RUF065_0.py:8:26 | 7 | # %s + repr() 8 | logging.info("Hello %s", repr("World!")) @@ -31,7 +31,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:9:39 + --> RUF065_0.py:9:39 | 7 | # %s + repr() 8 | logging.info("Hello %s", repr("World!")) @@ -42,7 +42,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:22:18 + --> RUF065_0.py:22:18 | 21 | # %s + str() 22 | info("Hello %s", str("World!")) @@ -51,7 +51,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:23:31 + --> RUF065_0.py:23:31 | 21 | # %s + str() 22 | info("Hello %s", str("World!")) @@ -62,7 +62,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:26:18 + --> RUF065_0.py:26:18 | 25 | # %s + repr() 26 | info("Hello %s", repr("World!")) @@ -71,7 +71,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:27:31 + --> RUF065_0.py:27:31 | 25 | # %s + repr() 26 | info("Hello %s", repr("World!")) @@ -82,7 +82,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:44:32 + --> RUF065_0.py:44:32 | 42 | logging.warning("Value: %r", repr(42)) 43 | logging.error("Error: %r", repr([1, 2, 3])) @@ -92,7 +92,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:45:30 + --> RUF065_0.py:45:30 | 43 | logging.error("Error: %r", repr([1, 2, 3])) 44 | logging.info("Debug info: %s", repr("test\nstring")) @@ -103,7 +103,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:48:27 + --> RUF065_0.py:48:27 | 47 | # %s + ascii() 48 | logging.info("ASCII: %s", ascii("Hello\nWorld")) @@ -112,7 +112,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:49:30 + --> RUF065_0.py:49:30 | 47 | # %s + ascii() 48 | logging.info("ASCII: %s", ascii("Hello\nWorld")) @@ -123,7 +123,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:52:27 + --> RUF065_0.py:52:27 | 51 | # %s + oct() 52 | logging.info("Octal: %s", oct(42)) @@ -132,7 +132,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:53:30 + --> RUF065_0.py:53:30 | 51 | # %s + oct() 52 | logging.info("Octal: %s", oct(42)) @@ -143,7 +143,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:56:25 + --> RUF065_0.py:56:25 | 55 | # %s + hex() 56 | logging.info("Hex: %s", hex(42)) @@ -152,7 +152,7 @@ RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:57:28 + --> RUF065_0.py:57:28 | 55 | # %s + hex() 56 | logging.info("Hex: %s", hex(42)) @@ -161,7 +161,7 @@ RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` inste | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:63:19 + --> RUF065_0.py:63:19 | 61 | from logging import info, log 62 | @@ -171,7 +171,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:64:32 + --> RUF065_0.py:64:32 | 63 | info("ASCII: %s", ascii("Hello\nWorld")) 64 | log(logging.INFO, "ASCII: %s", ascii("test")) @@ -181,7 +181,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:66:19 + --> RUF065_0.py:66:19 | 64 | log(logging.INFO, "ASCII: %s", ascii("test")) 65 | @@ -191,7 +191,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:67:32 + --> RUF065_0.py:67:32 | 66 | info("Octal: %s", oct(42)) 67 | log(logging.INFO, "Octal: %s", oct(255)) @@ -201,7 +201,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:69:17 + --> RUF065_0.py:69:17 | 67 | log(logging.INFO, "Octal: %s", oct(255)) 68 | @@ -211,7 +211,7 @@ RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:70:30 + --> RUF065_0.py:70:30 | 69 | info("Hex: %s", hex(42)) 70 | log(logging.INFO, "Hex: %s", hex(255)) diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_1.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_1.py.snap new file mode 100644 index 0000000000..56fa9ec243 --- /dev/null +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_1.py.snap @@ -0,0 +1,10 @@ +--- +source: crates/ruff_linter/src/rules/ruff/mod.rs +--- +RUF065 Unnecessary `str()` conversion when formatting with `%s` + --> RUF065_1.py:17:23 + | +16 | # str() with single keyword argument - should be flagged (equivalent to str("!")) +17 | logging.warning("%s", str(object="!")) + | ^^^^^^^^^^^^^^^ + | diff --git a/crates/ruff_notebook/src/index.rs b/crates/ruff_notebook/src/index.rs index eff605aa6d..951914e74a 100644 --- a/crates/ruff_notebook/src/index.rs +++ b/crates/ruff_notebook/src/index.rs @@ -8,37 +8,40 @@ use ruff_source_file::{LineColumn, OneIndexed, SourceLocation}; /// [`ruff_text_size::TextSize`] to jupyter notebook cell/row/column. #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub struct NotebookIndex { - /// Enter a row (1-based), get back the cell (1-based) - pub(super) row_to_cell: Vec, - /// Enter a row (1-based), get back the row in cell (1-based) - pub(super) row_to_row_in_cell: Vec, + /// Stores the starting row and the absolute cell index for every Python (valid) cell. + /// + /// The index in this vector corresponds to the Python cell index (valid cell index). + pub(super) cell_starts: Vec, } impl NotebookIndex { - pub fn new(row_to_cell: Vec, row_to_row_in_cell: Vec) -> Self { - Self { - row_to_cell, - row_to_row_in_cell, + fn find_cell(&self, row: OneIndexed) -> Option { + match self + .cell_starts + .binary_search_by_key(&row, |start| start.start_row) + { + Ok(cell_index) => Some(self.cell_starts[cell_index]), + Err(insertion_point) => Some(self.cell_starts[insertion_point.checked_sub(1)?]), } } - /// Returns the cell number (1-based) for the given row (1-based). + /// Returns the (raw) cell number (1-based) for the given row (1-based). pub fn cell(&self, row: OneIndexed) -> Option { - self.row_to_cell.get(row.to_zero_indexed()).copied() + self.find_cell(row).map(|start| start.raw_cell_index) } /// Returns the row number (1-based) in the cell (1-based) for the /// given row (1-based). pub fn cell_row(&self, row: OneIndexed) -> Option { - self.row_to_row_in_cell.get(row.to_zero_indexed()).copied() + self.find_cell(row) + .map(|start| OneIndexed::from_zero_indexed(row.get() - start.start_row.get())) } - /// Returns an iterator over the row:cell-number pairs (both 1-based). - pub fn iter(&self) -> impl Iterator { - self.row_to_cell - .iter() - .enumerate() - .map(|(row, cell)| (OneIndexed::from_zero_indexed(row), *cell)) + /// Returns an iterator over the starting rows of each cell (1-based). + /// + /// This yields one entry per Python cell (skipping over Makrdown cell). + pub fn iter(&self) -> impl Iterator + '_ { + self.cell_starts.iter().copied() } /// Translates the given [`LineColumn`] based on the indexing table. @@ -67,3 +70,23 @@ impl NotebookIndex { } } } + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)] +pub struct CellStart { + /// The row in the concatenated notebook source code at which + /// this cell starts. + pub(super) start_row: OneIndexed, + + /// The absolute index of this cell in the notebook. + pub(super) raw_cell_index: OneIndexed, +} + +impl CellStart { + pub fn start_row(&self) -> OneIndexed { + self.start_row + } + + pub fn cell_index(&self) -> OneIndexed { + self.raw_cell_index + } +} diff --git a/crates/ruff_notebook/src/notebook.rs b/crates/ruff_notebook/src/notebook.rs index 124202e927..4dc01971fc 100644 --- a/crates/ruff_notebook/src/notebook.rs +++ b/crates/ruff_notebook/src/notebook.rs @@ -18,7 +18,7 @@ use ruff_text_size::TextSize; use crate::cell::CellOffsets; use crate::index::NotebookIndex; use crate::schema::{Cell, RawNotebook, SortAlphabetically, SourceValue}; -use crate::{CellMetadata, RawNotebookMetadata, schema}; +use crate::{CellMetadata, CellStart, RawNotebookMetadata, schema}; /// Run round-trip source code generation on a given Jupyter notebook file path. pub fn round_trip(path: &Path) -> anyhow::Result { @@ -320,11 +320,19 @@ impl Notebook { /// The index building is expensive as it needs to go through the content of /// every valid code cell. fn build_index(&self) -> NotebookIndex { - let mut row_to_cell = Vec::new(); - let mut row_to_row_in_cell = Vec::new(); + let mut cell_starts = Vec::with_capacity(self.valid_code_cells.len()); + + let mut current_row = OneIndexed::MIN; for &cell_index in &self.valid_code_cells { - let line_count = match &self.raw.cells[cell_index as usize].source() { + let raw_cell_index = cell_index as usize; + // Record the starting row of this cell + cell_starts.push(CellStart { + start_row: current_row, + raw_cell_index: OneIndexed::from_zero_indexed(raw_cell_index), + }); + + let line_count = match &self.raw.cells[raw_cell_index].source() { SourceValue::String(string) => { if string.is_empty() { 1 @@ -342,17 +350,11 @@ impl Notebook { } } }; - row_to_cell.extend(std::iter::repeat_n( - OneIndexed::from_zero_indexed(cell_index as usize), - line_count, - )); - row_to_row_in_cell.extend((0..line_count).map(OneIndexed::from_zero_indexed)); + + current_row = current_row.saturating_add(line_count); } - NotebookIndex { - row_to_cell, - row_to_row_in_cell, - } + NotebookIndex { cell_starts } } /// Return the notebook content. @@ -456,7 +458,7 @@ mod tests { use ruff_source_file::OneIndexed; - use crate::{Cell, Notebook, NotebookError, NotebookIndex}; + use crate::{Cell, CellStart, Notebook, NotebookError, NotebookIndex}; /// Construct a path to a Jupyter notebook in the `resources/test/fixtures/jupyter` directory. fn notebook_path(path: impl AsRef) -> std::path::PathBuf { @@ -548,39 +550,27 @@ print("after empty cells") assert_eq!( notebook.index(), &NotebookIndex { - row_to_cell: vec![ - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(4), - OneIndexed::from_zero_indexed(6), - OneIndexed::from_zero_indexed(6), - OneIndexed::from_zero_indexed(7) - ], - row_to_row_in_cell: vec![ - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(1), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(3), - OneIndexed::from_zero_indexed(4), - OneIndexed::from_zero_indexed(5), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(1), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(3), - OneIndexed::from_zero_indexed(4), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(1), - OneIndexed::from_zero_indexed(0) + cell_starts: vec![ + CellStart { + start_row: OneIndexed::MIN, + raw_cell_index: OneIndexed::MIN + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(6), + raw_cell_index: OneIndexed::from_zero_indexed(2) + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(11), + raw_cell_index: OneIndexed::from_zero_indexed(4) + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(12), + raw_cell_index: OneIndexed::from_zero_indexed(6) + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(14), + raw_cell_index: OneIndexed::from_zero_indexed(7) + } ], } ); diff --git a/crates/ruff_python_ast/src/helpers.rs b/crates/ruff_python_ast/src/helpers.rs index 9d5159a829..66ad66d9b1 100644 --- a/crates/ruff_python_ast/src/helpers.rs +++ b/crates/ruff_python_ast/src/helpers.rs @@ -1318,9 +1318,19 @@ impl Truthiness { if arguments.is_empty() { // Ex) `list()` Self::Falsey - } else if arguments.args.len() == 1 && arguments.keywords.is_empty() { + } else if let [argument] = &*arguments.args + && arguments.keywords.is_empty() + { // Ex) `list([1, 2, 3])` - Self::from_expr(&arguments.args[0], is_builtin) + // For tuple(generator), we can't determine statically if the result will + // be empty or not, so return Unknown. The generator itself is truthy, but + // tuple(empty_generator) is falsy. ListComp and SetComp are handled by + // recursing into Self::from_expr below, which returns Unknown for them. + if argument.is_generator_expr() { + Self::Unknown + } else { + Self::from_expr(argument, is_builtin) + } } else { Self::Unknown } diff --git a/crates/ruff_python_ast/src/nodes.rs b/crates/ruff_python_ast/src/nodes.rs index f71f420d09..b57b86aaf2 100644 --- a/crates/ruff_python_ast/src/nodes.rs +++ b/crates/ruff_python_ast/src/nodes.rs @@ -3269,6 +3269,13 @@ impl<'a> ArgOrKeyword<'a> { ArgOrKeyword::Keyword(keyword) => &keyword.value, } } + + pub const fn is_variadic(self) -> bool { + match self { + ArgOrKeyword::Arg(expr) => expr.is_starred_expr(), + ArgOrKeyword::Keyword(keyword) => keyword.arg.is_none(), + } + } } impl<'a> From<&'a Expr> for ArgOrKeyword<'a> { @@ -3372,7 +3379,7 @@ impl Arguments { pub fn arguments_source_order(&self) -> impl Iterator> { let args = self.args.iter().map(ArgOrKeyword::Arg); let keywords = self.keywords.iter().map(ArgOrKeyword::Keyword); - args.merge_by(keywords, |left, right| left.start() < right.start()) + args.merge_by(keywords, |left, right| left.start() <= right.start()) } pub fn inner_range(&self) -> TextRange { diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py index 2afbd18229..18c810ead8 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py @@ -335,3 +335,96 @@ def overload4(): # trailing comment def overload4(a: int): ... + + +# In preview, we preserve these newlines at the start of functions: +def preserved1(): + + return 1 + +def preserved2(): + + pass + +def preserved3(): + + def inner(): ... + +def preserved4(): + + def inner(): + print("with a body") + return 1 + + return 2 + +def preserved5(): + + ... + # trailing comment prevents collapsing the stub + + +def preserved6(): + + # Comment + + return 1 + + +def preserved7(): + + # comment + # another line + # and a third + + return 0 + + +def preserved8(): # this also prevents collapsing the stub + + ... + + +# But we still discard these newlines: +def removed1(): + + "Docstring" + + return 1 + + +def removed2(): + + ... + + +def removed3(): + + ... # trailing same-line comment does not prevent collapsing the stub + + +# And we discard empty lines after the first: +def partially_preserved1(): + + + return 1 + + +# We only preserve blank lines, not add new ones +def untouched1(): + # comment + + return 0 + + +def untouched2(): + # comment + return 0 + + +def untouched3(): + # comment + # another line + # and a third + + return 0 diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py index 1fb1522aa0..e10ffe55ee 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py @@ -61,3 +61,9 @@ def test6 (): print("Format" ) print(3 + 4) print("Format to fix indentation" ) + + +def test7 (): + print("Format" ) + print(3 + 4) + print("Format to fix indentation" ) diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py index 4067d508c0..4a403b1541 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py @@ -613,3 +613,58 @@ match guard_comments: ): pass + +# regression tests from https://github.com/astral-sh/ruff/issues/17796 +match class_pattern: + case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture: + pass + case Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as capture: + pass + case Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as capture: + pass + case Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture: + pass + +match sequence_pattern_brackets: + case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture: + pass + case [ + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ] as capture: + pass + case [ + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ] as capture: + pass + + +match class_pattern: + # 1 + case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture: # 2 + # 3 + pass # 4 + # 5 + case Class( # 6 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 7 + ) as capture: # 8 + pass + case Class( # 9 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 10 + ) as capture: # 11 + pass + case Class( # 12 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 13 + ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture: # 14 + pass + case Class( # 0 + # 1 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 2 + # 3 + ) as capture: + pass diff --git a/crates/ruff_python_formatter/src/lib.rs b/crates/ruff_python_formatter/src/lib.rs index e6b2f9e7b8..bf68598e13 100644 --- a/crates/ruff_python_formatter/src/lib.rs +++ b/crates/ruff_python_formatter/src/lib.rs @@ -334,7 +334,7 @@ class A: ... let options = PyFormatOptions::from_source_type(source_type); let printed = format_range(&source, TextRange::new(start, end), options).unwrap(); - let mut formatted = source.to_string(); + let mut formatted = source.clone(); formatted.replace_range( std::ops::Range::::from(printed.source_range()), printed.as_code(), diff --git a/crates/ruff_python_formatter/src/pattern/mod.rs b/crates/ruff_python_formatter/src/pattern/mod.rs index e255d59359..a379aeb849 100644 --- a/crates/ruff_python_formatter/src/pattern/mod.rs +++ b/crates/ruff_python_formatter/src/pattern/mod.rs @@ -1,5 +1,5 @@ use ruff_formatter::{FormatOwnedWithRule, FormatRefWithRule, FormatRule, FormatRuleWithOptions}; -use ruff_python_ast::{AnyNodeRef, Expr}; +use ruff_python_ast::{AnyNodeRef, Expr, PatternMatchAs}; use ruff_python_ast::{MatchCase, Pattern}; use ruff_python_trivia::CommentRanges; use ruff_python_trivia::{ @@ -14,6 +14,7 @@ use crate::expression::parentheses::{ NeedsParentheses, OptionalParentheses, Parentheses, optional_parentheses, parenthesized, }; use crate::prelude::*; +use crate::preview::is_avoid_parens_for_long_as_captures_enabled; pub(crate) mod pattern_arguments; pub(crate) mod pattern_keyword; @@ -242,8 +243,14 @@ pub(crate) fn can_pattern_omit_optional_parentheses( Pattern::MatchValue(_) | Pattern::MatchSingleton(_) | Pattern::MatchStar(_) - | Pattern::MatchAs(_) | Pattern::MatchOr(_) => false, + Pattern::MatchAs(PatternMatchAs { pattern, .. }) => match pattern { + Some(pattern) => { + is_avoid_parens_for_long_as_captures_enabled(context) + && has_parentheses_and_is_non_empty(pattern, context) + } + None => false, + }, Pattern::MatchSequence(sequence) => { !sequence.patterns.is_empty() || context.comments().has_dangling(pattern) } @@ -299,7 +306,7 @@ impl<'a> CanOmitOptionalParenthesesVisitor<'a> { } // `case 4+3j:` or `case 4-3j: - // Can not contain arbitrary expressions. Limited to complex numbers. + // Cannot contain arbitrary expressions. Limited to complex numbers. Expr::BinOp(_) => { self.update_max_precedence(OperatorPrecedence::Additive, 1); } @@ -318,7 +325,14 @@ impl<'a> CanOmitOptionalParenthesesVisitor<'a> { // The pattern doesn't start with a parentheses pattern, but with the class's identifier. self.first.set_if_none(First::Token); } - Pattern::MatchStar(_) | Pattern::MatchSingleton(_) | Pattern::MatchAs(_) => {} + Pattern::MatchAs(PatternMatchAs { pattern, .. }) => { + if let Some(pattern) = pattern + && is_avoid_parens_for_long_as_captures_enabled(context) + { + self.visit_sub_pattern(pattern, context); + } + } + Pattern::MatchStar(_) | Pattern::MatchSingleton(_) => {} Pattern::MatchOr(or_pattern) => { self.update_max_precedence( OperatorPrecedence::Or, diff --git a/crates/ruff_python_formatter/src/preview.rs b/crates/ruff_python_formatter/src/preview.rs index b6479ab1b4..9d307390d6 100644 --- a/crates/ruff_python_formatter/src/preview.rs +++ b/crates/ruff_python_formatter/src/preview.rs @@ -36,3 +36,19 @@ pub(crate) const fn is_remove_parens_around_except_types_enabled( ) -> bool { context.is_preview() } + +/// Returns `true` if the +/// [`allow_newline_after_block_open`](https://github.com/astral-sh/ruff/pull/21110) preview style +/// is enabled. +pub(crate) const fn is_allow_newline_after_block_open_enabled(context: &PyFormatContext) -> bool { + context.is_preview() +} + +/// Returns `true` if the +/// [`avoid_parens_for_long_as_captures`](https://github.com/astral-sh/ruff/pull/21176) preview +/// style is enabled. +pub(crate) const fn is_avoid_parens_for_long_as_captures_enabled( + context: &PyFormatContext, +) -> bool { + context.is_preview() +} diff --git a/crates/ruff_python_formatter/src/statement/clause.rs b/crates/ruff_python_formatter/src/statement/clause.rs index a5c172f4f8..1554c30d0f 100644 --- a/crates/ruff_python_formatter/src/statement/clause.rs +++ b/crates/ruff_python_formatter/src/statement/clause.rs @@ -8,7 +8,7 @@ use ruff_python_trivia::{SimpleToken, SimpleTokenKind, SimpleTokenizer}; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::comments::{SourceComment, leading_alternate_branch_comments, trailing_comments}; -use crate::statement::suite::{SuiteKind, contains_only_an_ellipsis}; +use crate::statement::suite::{SuiteKind, as_only_an_ellipsis}; use crate::verbatim::write_suppressed_clause_header; use crate::{has_skip_comment, prelude::*}; @@ -449,17 +449,10 @@ impl Format> for FormatClauseBody<'_> { || matches!(self.kind, SuiteKind::Function | SuiteKind::Class); if should_collapse_stub - && contains_only_an_ellipsis(self.body, f.context().comments()) + && let Some(ellipsis) = as_only_an_ellipsis(self.body, f.context().comments()) && self.trailing_comments.is_empty() { - write!( - f, - [ - space(), - self.body.format().with_options(self.kind), - hard_line_break() - ] - ) + write!(f, [space(), ellipsis.format(), hard_line_break()]) } else { write!( f, diff --git a/crates/ruff_python_formatter/src/statement/suite.rs b/crates/ruff_python_formatter/src/statement/suite.rs index 4071b4ba1f..9ed32beb76 100644 --- a/crates/ruff_python_formatter/src/statement/suite.rs +++ b/crates/ruff_python_formatter/src/statement/suite.rs @@ -13,7 +13,9 @@ use crate::comments::{ use crate::context::{NodeLevel, TopLevelStatementPosition, WithIndentLevel, WithNodeLevel}; use crate::other::string_literal::StringLiteralKind; use crate::prelude::*; -use crate::preview::is_blank_line_before_decorated_class_in_stub_enabled; +use crate::preview::{ + is_allow_newline_after_block_open_enabled, is_blank_line_before_decorated_class_in_stub_enabled, +}; use crate::statement::stmt_expr::FormatStmtExpr; use crate::verbatim::{ suppressed_node, write_suppressed_statements_starting_with_leading_comment, @@ -169,6 +171,22 @@ impl FormatRule> for FormatSuite { false, ) } else { + // Allow an empty line after a function header in preview, if the function has no + // docstring and no initial comment. + let allow_newline_after_block_open = + is_allow_newline_after_block_open_enabled(f.context()) + && matches!(self.kind, SuiteKind::Function) + && matches!(first, SuiteChildStatement::Other(_)); + + let start = comments + .leading(first) + .first() + .map_or_else(|| first.start(), Ranged::start); + + if allow_newline_after_block_open && lines_before(start, f.context().source()) > 1 { + empty_line().fmt(f)?; + } + first.fmt(f)?; let empty_line_after_docstring = if matches!(first, SuiteChildStatement::Docstring(_)) @@ -218,7 +236,7 @@ impl FormatRule> for FormatSuite { )?; } else { // Preserve empty lines after a stub implementation but don't insert a new one if there isn't any present in the source. - // This is useful when having multiple function overloads that should be grouped to getter by omitting new lines between them. + // This is useful when having multiple function overloads that should be grouped together by omitting new lines between them. let is_preceding_stub_function_without_empty_line = following .is_function_def_stmt() && preceding @@ -728,17 +746,21 @@ fn stub_suite_can_omit_empty_line(preceding: &Stmt, following: &Stmt, f: &PyForm /// Returns `true` if a function or class body contains only an ellipsis with no comments. pub(crate) fn contains_only_an_ellipsis(body: &[Stmt], comments: &Comments) -> bool { - match body { - [Stmt::Expr(ast::StmtExpr { value, .. })] => { - let [node] = body else { - return false; - }; - value.is_ellipsis_literal_expr() - && !comments.has_leading(node) - && !comments.has_trailing_own_line(node) - } - _ => false, + as_only_an_ellipsis(body, comments).is_some() +} + +/// Returns `Some(Stmt::Ellipsis)` if a function or class body contains only an ellipsis with no +/// comments. +pub(crate) fn as_only_an_ellipsis<'a>(body: &'a [Stmt], comments: &Comments) -> Option<&'a Stmt> { + if let [node @ Stmt::Expr(ast::StmtExpr { value, .. })] = body + && value.is_ellipsis_literal_expr() + && !comments.has_leading(node) + && !comments.has_trailing_own_line(node) + { + return Some(node); } + + None } /// Returns `true` if a [`Stmt`] is a class or function definition. diff --git a/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap index 84bd4283c4..260de915fc 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap @@ -1,7 +1,6 @@ --- source: crates/ruff_python_formatter/tests/fixtures.rs input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py -snapshot_kind: text --- ## Input ```python @@ -342,6 +341,99 @@ def overload4(): # trailing comment def overload4(a: int): ... + + +# In preview, we preserve these newlines at the start of functions: +def preserved1(): + + return 1 + +def preserved2(): + + pass + +def preserved3(): + + def inner(): ... + +def preserved4(): + + def inner(): + print("with a body") + return 1 + + return 2 + +def preserved5(): + + ... + # trailing comment prevents collapsing the stub + + +def preserved6(): + + # Comment + + return 1 + + +def preserved7(): + + # comment + # another line + # and a third + + return 0 + + +def preserved8(): # this also prevents collapsing the stub + + ... + + +# But we still discard these newlines: +def removed1(): + + "Docstring" + + return 1 + + +def removed2(): + + ... + + +def removed3(): + + ... # trailing same-line comment does not prevent collapsing the stub + + +# And we discard empty lines after the first: +def partially_preserved1(): + + + return 1 + + +# We only preserve blank lines, not add new ones +def untouched1(): + # comment + + return 0 + + +def untouched2(): + # comment + return 0 + + +def untouched3(): + # comment + # another line + # and a third + + return 0 ``` ## Output @@ -732,6 +824,88 @@ def overload4(): def overload4(a: int): ... + + +# In preview, we preserve these newlines at the start of functions: +def preserved1(): + return 1 + + +def preserved2(): + pass + + +def preserved3(): + def inner(): ... + + +def preserved4(): + def inner(): + print("with a body") + return 1 + + return 2 + + +def preserved5(): + ... + # trailing comment prevents collapsing the stub + + +def preserved6(): + # Comment + + return 1 + + +def preserved7(): + # comment + # another line + # and a third + + return 0 + + +def preserved8(): # this also prevents collapsing the stub + ... + + +# But we still discard these newlines: +def removed1(): + "Docstring" + + return 1 + + +def removed2(): ... + + +def removed3(): ... # trailing same-line comment does not prevent collapsing the stub + + +# And we discard empty lines after the first: +def partially_preserved1(): + return 1 + + +# We only preserve blank lines, not add new ones +def untouched1(): + # comment + + return 0 + + +def untouched2(): + # comment + return 0 + + +def untouched3(): + # comment + # another line + # and a third + + return 0 ``` @@ -739,7 +913,15 @@ def overload4(a: int): ... ```diff --- Stable +++ Preview -@@ -277,6 +277,7 @@ +@@ -253,6 +253,7 @@ + + + def fakehttp(): ++ + class FakeHTTPConnection: + if mock_close: + +@@ -277,6 +278,7 @@ def a(): return 1 @@ -747,7 +929,7 @@ def overload4(a: int): ... else: pass -@@ -293,6 +294,7 @@ +@@ -293,6 +295,7 @@ def a(): return 1 @@ -755,7 +937,7 @@ def overload4(a: int): ... case 1: def a(): -@@ -303,6 +305,7 @@ +@@ -303,6 +306,7 @@ def a(): return 1 @@ -763,7 +945,7 @@ def overload4(a: int): ... except RuntimeError: def a(): -@@ -313,6 +316,7 @@ +@@ -313,6 +317,7 @@ def a(): return 1 @@ -771,7 +953,7 @@ def overload4(a: int): ... finally: def a(): -@@ -323,18 +327,22 @@ +@@ -323,18 +328,22 @@ def a(): return 1 @@ -794,4 +976,64 @@ def overload4(a: int): ... finally: def a(): +@@ -388,18 +397,22 @@ + + # In preview, we preserve these newlines at the start of functions: + def preserved1(): ++ + return 1 + + + def preserved2(): ++ + pass + + + def preserved3(): ++ + def inner(): ... + + + def preserved4(): ++ + def inner(): + print("with a body") + return 1 +@@ -408,17 +421,20 @@ + + + def preserved5(): ++ + ... + # trailing comment prevents collapsing the stub + + + def preserved6(): ++ + # Comment + + return 1 + + + def preserved7(): ++ + # comment + # another line + # and a third +@@ -427,6 +443,7 @@ + + + def preserved8(): # this also prevents collapsing the stub ++ + ... + + +@@ -445,6 +462,7 @@ + + # And we discard empty lines after the first: + def partially_preserved1(): ++ + return 1 + + ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap index 1609cf657e..213c843da1 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap @@ -67,6 +67,12 @@ def test6 (): print("Format" ) print(3 + 4) print("Format to fix indentation" ) + + +def test7 (): + print("Format" ) + print(3 + 4) + print("Format to fix indentation" ) ``` ## Outputs @@ -146,6 +152,27 @@ def test6 (): print("Format") print(3 + 4) print("Format to fix indentation" ) + + +def test7 (): + print("Format") + print(3 + 4) + print("Format to fix indentation" ) +``` + + +#### Preview changes +```diff +--- Stable ++++ Preview +@@ -55,6 +55,7 @@ + + + def test6 (): ++ + print("Format") + print(3 + 4) + print("Format to fix indentation" ) ``` @@ -225,6 +252,27 @@ def test6 (): print("Format") print(3 + 4) print("Format to fix indentation") + + +def test7 (): + print("Format") + print(3 + 4) + print("Format to fix indentation") +``` + + +#### Preview changes +```diff +--- Stable ++++ Preview +@@ -55,6 +55,7 @@ + + + def test6 (): ++ + print("Format") + print(3 + 4) + print("Format to fix indentation") ``` @@ -304,4 +352,25 @@ def test6 (): print("Format") print(3 + 4) print("Format to fix indentation") + + +def test7 (): + print("Format") + print(3 + 4) + print("Format to fix indentation") +``` + + +#### Preview changes +```diff +--- Stable ++++ Preview +@@ -55,6 +55,7 @@ + + + def test6 (): ++ + print("Format") + print(3 + 4) + print("Format to fix indentation") ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap index 852740fa6d..a94ee5e636 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap @@ -1,7 +1,6 @@ --- source: crates/ruff_python_formatter/tests/fixtures.rs input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py -snapshot_kind: text --- ## Input ```python @@ -620,6 +619,61 @@ match guard_comments: ): pass + +# regression tests from https://github.com/astral-sh/ruff/issues/17796 +match class_pattern: + case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture: + pass + case Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as capture: + pass + case Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as capture: + pass + case Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture: + pass + +match sequence_pattern_brackets: + case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture: + pass + case [ + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ] as capture: + pass + case [ + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ] as capture: + pass + + +match class_pattern: + # 1 + case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture: # 2 + # 3 + pass # 4 + # 5 + case Class( # 6 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 7 + ) as capture: # 8 + pass + case Class( # 9 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 10 + ) as capture: # 11 + pass + case Class( # 12 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 13 + ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture: # 14 + pass + case Class( # 0 + # 1 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 2 + # 3 + ) as capture: + pass ``` ## Output @@ -1285,4 +1339,175 @@ match guard_comments: # trailing own line comment ): pass + + +# regression tests from https://github.com/astral-sh/ruff/issues/17796 +match class_pattern: + case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture: + pass + case ( + Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture + ): + pass + case ( + Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as capture + ): + pass + case ( + Class( + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture + ): + pass + +match sequence_pattern_brackets: + case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture: + pass + case ( + [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture + ): + pass + case ( + [ + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx + ] as capture + ): + pass + + +match class_pattern: + # 1 + case ( + Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture + ): # 2 + # 3 + pass # 4 + # 5 + case ( + Class( # 6 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 7 + ) as capture + ): # 8 + pass + case ( + Class( # 9 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 10 + ) as capture + ): # 11 + pass + case ( + Class( # 12 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 13 + ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture + ): # 14 + pass + case ( + Class( # 0 + # 1 + xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 2 + # 3 + ) as capture + ): + pass +``` + + +## Preview changes +```diff +--- Stable ++++ Preview +@@ -665,15 +665,13 @@ + match class_pattern: + case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture: + pass +- case ( +- Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture +- ): ++ case Class( ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ++ ) as capture: + pass +- case ( +- Class( +- xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +- ) as capture +- ): ++ case Class( ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ++ ) as capture: + pass + case ( + Class( +@@ -685,37 +683,31 @@ + match sequence_pattern_brackets: + case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture: + pass +- case ( +- [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture +- ): ++ case [ ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ++ ] as capture: + pass +- case ( +- [ +- xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +- ] as capture +- ): ++ case [ ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ++ ] as capture: + pass + + + match class_pattern: + # 1 +- case ( +- Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture +- ): # 2 ++ case Class( ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ++ ) as capture: # 2 + # 3 + pass # 4 + # 5 +- case ( +- Class( # 6 +- xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 7 +- ) as capture +- ): # 8 ++ case Class( # 6 ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 7 ++ ) as capture: # 8 + pass +- case ( +- Class( # 9 +- xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 10 +- ) as capture +- ): # 11 ++ case Class( # 9 ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 10 ++ ) as capture: # 11 + pass + case ( + Class( # 12 +@@ -723,11 +715,9 @@ + ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture + ): # 14 + pass +- case ( +- Class( # 0 +- # 1 +- xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 2 +- # 3 +- ) as capture +- ): ++ case Class( # 0 ++ # 1 ++ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 2 ++ # 3 ++ ) as capture: + pass ``` diff --git a/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py b/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py index d322fa3899..a12be52ac3 100644 --- a/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py +++ b/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py @@ -5,3 +5,5 @@ match 42: case [[x] | [x]] | x: ... match 42: case [[x | x] | [x]] | x: ... +match 42: + case ast.Subscript(n, ast.Constant() | ast.Slice()) | ast.Attribute(n): ... diff --git a/crates/ruff_python_parser/src/error.rs b/crates/ruff_python_parser/src/error.rs index 2c2baa8dd7..8b02546d3b 100644 --- a/crates/ruff_python_parser/src/error.rs +++ b/crates/ruff_python_parser/src/error.rs @@ -78,9 +78,9 @@ pub enum InterpolatedStringErrorType { impl std::fmt::Display for InterpolatedStringErrorType { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { - Self::UnclosedLbrace => write!(f, "expecting '}}'"), + Self::UnclosedLbrace => write!(f, "expecting `}}`"), Self::InvalidConversionFlag => write!(f, "invalid conversion character"), - Self::SingleRbrace => write!(f, "single '}}' is not allowed"), + Self::SingleRbrace => write!(f, "single `}}` is not allowed"), Self::UnterminatedString => write!(f, "unterminated string"), Self::UnterminatedTripleQuotedString => write!(f, "unterminated triple-quoted string"), Self::LambdaWithoutParentheses => { @@ -232,7 +232,7 @@ impl std::fmt::Display for ParseErrorType { ParseErrorType::UnexpectedTokenAfterAsync(kind) => { write!( f, - "Expected 'def', 'with' or 'for' to follow 'async', found {kind}", + "Expected `def`, `with` or `for` to follow `async`, found {kind}", ) } ParseErrorType::InvalidArgumentUnpackingOrder => { @@ -286,10 +286,10 @@ impl std::fmt::Display for ParseErrorType { f.write_str("Parameter without a default cannot follow a parameter with a default") } ParseErrorType::ExpectedKeywordParam => { - f.write_str("Expected one or more keyword parameter after '*' separator") + f.write_str("Expected one or more keyword parameter after `*` separator") } ParseErrorType::VarParameterWithDefault => { - f.write_str("Parameter with '*' or '**' cannot have default value") + f.write_str("Parameter with `*` or `**` cannot have default value") } ParseErrorType::InvalidStarPatternUsage => { f.write_str("Star pattern cannot be used here") diff --git a/crates/ruff_python_parser/src/semantic_errors.rs b/crates/ruff_python_parser/src/semantic_errors.rs index f35029d4b9..1577f80cb5 100644 --- a/crates/ruff_python_parser/src/semantic_errors.rs +++ b/crates/ruff_python_parser/src/semantic_errors.rs @@ -219,7 +219,7 @@ impl SemanticSyntaxChecker { AwaitOutsideAsyncFunctionKind::AsyncWith, ); } - Stmt::Nonlocal(ast::StmtNonlocal { range, .. }) => { + Stmt::Nonlocal(ast::StmtNonlocal { names, range, .. }) => { // test_ok nonlocal_declaration_at_module_level // def _(): // nonlocal x @@ -234,6 +234,18 @@ impl SemanticSyntaxChecker { *range, ); } + + if !ctx.in_module_scope() { + for name in names { + if !ctx.has_nonlocal_binding(name) { + Self::add_error( + ctx, + SemanticSyntaxErrorKind::NonlocalWithoutBinding(name.to_string()), + name.range, + ); + } + } + } } Stmt::Break(ast::StmtBreak { range, .. }) => { if !ctx.in_loop_context() { @@ -1154,6 +1166,9 @@ impl Display for SemanticSyntaxError { SemanticSyntaxErrorKind::DifferentMatchPatternBindings => { write!(f, "alternative patterns bind different names") } + SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => { + write!(f, "no binding for nonlocal `{name}` found") + } } } } @@ -1554,6 +1569,9 @@ pub enum SemanticSyntaxErrorKind { /// ... /// ``` DifferentMatchPatternBindings, + + /// Represents a nonlocal statement for a name that has no binding in an enclosing scope. + NonlocalWithoutBinding(String), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)] @@ -1850,6 +1868,8 @@ impl<'a, Ctx: SemanticSyntaxContext> MatchPatternVisitor<'a, Ctx> { // case [[x] | [x]] | x: ... // match 42: // case [[x | x] | [x]] | x: ... + // match 42: + // case ast.Subscript(n, ast.Constant() | ast.Slice()) | ast.Attribute(n): ... SemanticSyntaxChecker::add_error( self.ctx, SemanticSyntaxErrorKind::DifferentMatchPatternBindings, @@ -1857,7 +1877,7 @@ impl<'a, Ctx: SemanticSyntaxContext> MatchPatternVisitor<'a, Ctx> { ); break; } - self.names = visitor.names; + self.names.extend(visitor.names); } } } @@ -2004,6 +2024,9 @@ pub trait SemanticSyntaxContext { /// Return the [`TextRange`] at which a name is declared as `global` in the current scope. fn global(&self, name: &str) -> Option; + /// Returns `true` if `name` has a binding in an enclosing scope. + fn has_nonlocal_binding(&self, name: &str) -> bool; + /// Returns `true` if the visitor is currently in an async context, i.e. an async function. fn in_async_context(&self) -> bool; diff --git a/crates/ruff_python_parser/src/token.rs b/crates/ruff_python_parser/src/token.rs index 18b7648c4c..a5790a9597 100644 --- a/crates/ruff_python_parser/src/token.rs +++ b/crates/ruff_python_parser/src/token.rs @@ -635,93 +635,93 @@ impl fmt::Display for TokenKind { TokenKind::TStringEnd => "TStringEnd", TokenKind::IpyEscapeCommand => "IPython escape command", TokenKind::Comment => "comment", - TokenKind::Question => "'?'", - TokenKind::Exclamation => "'!'", - TokenKind::Lpar => "'('", - TokenKind::Rpar => "')'", - TokenKind::Lsqb => "'['", - TokenKind::Rsqb => "']'", - TokenKind::Lbrace => "'{'", - TokenKind::Rbrace => "'}'", - TokenKind::Equal => "'='", - TokenKind::ColonEqual => "':='", - TokenKind::Dot => "'.'", - TokenKind::Colon => "':'", - TokenKind::Semi => "';'", - TokenKind::Comma => "','", - TokenKind::Rarrow => "'->'", - TokenKind::Plus => "'+'", - TokenKind::Minus => "'-'", - TokenKind::Star => "'*'", - TokenKind::DoubleStar => "'**'", - TokenKind::Slash => "'/'", - TokenKind::DoubleSlash => "'//'", - TokenKind::Percent => "'%'", - TokenKind::Vbar => "'|'", - TokenKind::Amper => "'&'", - TokenKind::CircumFlex => "'^'", - TokenKind::LeftShift => "'<<'", - TokenKind::RightShift => "'>>'", - TokenKind::Tilde => "'~'", - TokenKind::At => "'@'", - TokenKind::Less => "'<'", - TokenKind::Greater => "'>'", - TokenKind::EqEqual => "'=='", - TokenKind::NotEqual => "'!='", - TokenKind::LessEqual => "'<='", - TokenKind::GreaterEqual => "'>='", - TokenKind::PlusEqual => "'+='", - TokenKind::MinusEqual => "'-='", - TokenKind::StarEqual => "'*='", - TokenKind::DoubleStarEqual => "'**='", - TokenKind::SlashEqual => "'/='", - TokenKind::DoubleSlashEqual => "'//='", - TokenKind::PercentEqual => "'%='", - TokenKind::VbarEqual => "'|='", - TokenKind::AmperEqual => "'&='", - TokenKind::CircumflexEqual => "'^='", - TokenKind::LeftShiftEqual => "'<<='", - TokenKind::RightShiftEqual => "'>>='", - TokenKind::AtEqual => "'@='", - TokenKind::Ellipsis => "'...'", - TokenKind::False => "'False'", - TokenKind::None => "'None'", - TokenKind::True => "'True'", - TokenKind::And => "'and'", - TokenKind::As => "'as'", - TokenKind::Assert => "'assert'", - TokenKind::Async => "'async'", - TokenKind::Await => "'await'", - TokenKind::Break => "'break'", - TokenKind::Class => "'class'", - TokenKind::Continue => "'continue'", - TokenKind::Def => "'def'", - TokenKind::Del => "'del'", - TokenKind::Elif => "'elif'", - TokenKind::Else => "'else'", - TokenKind::Except => "'except'", - TokenKind::Finally => "'finally'", - TokenKind::For => "'for'", - TokenKind::From => "'from'", - TokenKind::Global => "'global'", - TokenKind::If => "'if'", - TokenKind::Import => "'import'", - TokenKind::In => "'in'", - TokenKind::Is => "'is'", - TokenKind::Lambda => "'lambda'", - TokenKind::Nonlocal => "'nonlocal'", - TokenKind::Not => "'not'", - TokenKind::Or => "'or'", - TokenKind::Pass => "'pass'", - TokenKind::Raise => "'raise'", - TokenKind::Return => "'return'", - TokenKind::Try => "'try'", - TokenKind::While => "'while'", - TokenKind::Match => "'match'", - TokenKind::Type => "'type'", - TokenKind::Case => "'case'", - TokenKind::With => "'with'", - TokenKind::Yield => "'yield'", + TokenKind::Question => "`?`", + TokenKind::Exclamation => "`!`", + TokenKind::Lpar => "`(`", + TokenKind::Rpar => "`)`", + TokenKind::Lsqb => "`[`", + TokenKind::Rsqb => "`]`", + TokenKind::Lbrace => "`{`", + TokenKind::Rbrace => "`}`", + TokenKind::Equal => "`=`", + TokenKind::ColonEqual => "`:=`", + TokenKind::Dot => "`.`", + TokenKind::Colon => "`:`", + TokenKind::Semi => "`;`", + TokenKind::Comma => "`,`", + TokenKind::Rarrow => "`->`", + TokenKind::Plus => "`+`", + TokenKind::Minus => "`-`", + TokenKind::Star => "`*`", + TokenKind::DoubleStar => "`**`", + TokenKind::Slash => "`/`", + TokenKind::DoubleSlash => "`//`", + TokenKind::Percent => "`%`", + TokenKind::Vbar => "`|`", + TokenKind::Amper => "`&`", + TokenKind::CircumFlex => "`^`", + TokenKind::LeftShift => "`<<`", + TokenKind::RightShift => "`>>`", + TokenKind::Tilde => "`~`", + TokenKind::At => "`@`", + TokenKind::Less => "`<`", + TokenKind::Greater => "`>`", + TokenKind::EqEqual => "`==`", + TokenKind::NotEqual => "`!=`", + TokenKind::LessEqual => "`<=`", + TokenKind::GreaterEqual => "`>=`", + TokenKind::PlusEqual => "`+=`", + TokenKind::MinusEqual => "`-=`", + TokenKind::StarEqual => "`*=`", + TokenKind::DoubleStarEqual => "`**=`", + TokenKind::SlashEqual => "`/=`", + TokenKind::DoubleSlashEqual => "`//=`", + TokenKind::PercentEqual => "`%=`", + TokenKind::VbarEqual => "`|=`", + TokenKind::AmperEqual => "`&=`", + TokenKind::CircumflexEqual => "`^=`", + TokenKind::LeftShiftEqual => "`<<=`", + TokenKind::RightShiftEqual => "`>>=`", + TokenKind::AtEqual => "`@=`", + TokenKind::Ellipsis => "`...`", + TokenKind::False => "`False`", + TokenKind::None => "`None`", + TokenKind::True => "`True`", + TokenKind::And => "`and`", + TokenKind::As => "`as`", + TokenKind::Assert => "`assert`", + TokenKind::Async => "`async`", + TokenKind::Await => "`await`", + TokenKind::Break => "`break`", + TokenKind::Class => "`class`", + TokenKind::Continue => "`continue`", + TokenKind::Def => "`def`", + TokenKind::Del => "`del`", + TokenKind::Elif => "`elif`", + TokenKind::Else => "`else`", + TokenKind::Except => "`except`", + TokenKind::Finally => "`finally`", + TokenKind::For => "`for`", + TokenKind::From => "`from`", + TokenKind::Global => "`global`", + TokenKind::If => "`if`", + TokenKind::Import => "`import`", + TokenKind::In => "`in`", + TokenKind::Is => "`is`", + TokenKind::Lambda => "`lambda`", + TokenKind::Nonlocal => "`nonlocal`", + TokenKind::Not => "`not`", + TokenKind::Or => "`or`", + TokenKind::Pass => "`pass`", + TokenKind::Raise => "`raise`", + TokenKind::Return => "`return`", + TokenKind::Try => "`try`", + TokenKind::While => "`while`", + TokenKind::Match => "`match`", + TokenKind::Type => "`type`", + TokenKind::Case => "`case`", + TokenKind::With => "`with`", + TokenKind::Yield => "`yield`", }; f.write_str(value) } diff --git a/crates/ruff_python_parser/tests/fixtures.rs b/crates/ruff_python_parser/tests/fixtures.rs index c646fe525b..2de49e6d68 100644 --- a/crates/ruff_python_parser/tests/fixtures.rs +++ b/crates/ruff_python_parser/tests/fixtures.rs @@ -527,6 +527,10 @@ impl SemanticSyntaxContext for SemanticSyntaxCheckerVisitor<'_> { None } + fn has_nonlocal_binding(&self, _name: &str) -> bool { + true + } + fn in_async_context(&self) -> bool { if let Some(scope) = self.scopes.iter().next_back() { match scope { diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap index 1a843b29c9..87c0dcf672 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap @@ -131,7 +131,7 @@ Module( | 1 | assert *x 2 | assert assert x - | ^^^^^^ Syntax Error: Expected an identifier, but found a keyword 'assert' that cannot be used here + | ^^^^^^ Syntax Error: Expected an identifier, but found a keyword `assert` that cannot be used here 3 | assert yield x 4 | assert x := 1 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap index 6264d907b5..e59c71cea8 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap @@ -148,7 +148,7 @@ Module( | 1 | a = pass = c - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here 2 | a + b 3 | a = b = pass = c | @@ -158,6 +158,6 @@ Module( 1 | a = pass = c 2 | a + b 3 | a = b = pass = c - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here 4 | a + b | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap index 2dd2bddfc4..dc62f1b446 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap @@ -181,7 +181,7 @@ Module( | 1 | async class Foo: ... - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'class' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `class` 2 | async while test: ... 3 | async x = 1 | @@ -190,7 +190,7 @@ Module( | 1 | async class Foo: ... 2 | async while test: ... - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'while' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `while` 3 | async x = 1 4 | async async def foo(): ... | @@ -200,7 +200,7 @@ Module( 1 | async class Foo: ... 2 | async while test: ... 3 | async x = 1 - | ^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found name + | ^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found name 4 | async async def foo(): ... 5 | async match test: | @@ -210,7 +210,7 @@ Module( 2 | async while test: ... 3 | async x = 1 4 | async async def foo(): ... - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'async' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `async` 5 | async match test: 6 | case _: ... | @@ -220,6 +220,6 @@ Module( 3 | async x = 1 4 | async async def foo(): ... 5 | async match test: - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'match' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `match` 6 | case _: ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap index 0d1311ca25..dbe201539e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap @@ -245,7 +245,7 @@ Module( 3 | *x += 1 4 | pass += 1 5 | x += pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here 6 | (x + y) += 1 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap index 3246bdb0ce..515534ed26 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap @@ -121,7 +121,7 @@ Module( | 1 | class Foo[T1, *T2(a, b): - | ^ Syntax Error: Expected ']', found '(' + | ^ Syntax Error: Expected `]`, found `(` 2 | pass 3 | x = 10 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap index bf33de094f..84c88e8d1f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap @@ -68,7 +68,7 @@ Module( | 1 | call(**x := 1) - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap index 5f39e94515..4da8a91dea 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap @@ -61,5 +61,5 @@ Module( | 1 | # The comma between the first two elements is expected in `parse_list_expression`. 2 | [0, 1 2] - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap index aee9bf7056..a31a055919 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap @@ -77,7 +77,7 @@ Module( | 1 | (async) - | ^^^^^ Syntax Error: Expected an identifier, but found a keyword 'async' that cannot be used here + | ^^^^^ Syntax Error: Expected an identifier, but found a keyword `async` that cannot be used here 2 | (x async x in iter) | @@ -85,5 +85,5 @@ Module( | 1 | (async) 2 | (x async x in iter) - | ^ Syntax Error: Expected 'for', found name + | ^ Syntax Error: Expected `for`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap index 2c5bbd3a03..dd7225493f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap @@ -169,7 +169,7 @@ Module( | 1 | @def foo(): ... - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 2 | @ 3 | def foo(): ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap index 948fc24fe2..ea573c4cde 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap @@ -161,7 +161,7 @@ Module( | 1 | @x def foo(): ... - | ^^^ Syntax Error: Expected newline, found 'def' + | ^^^ Syntax Error: Expected newline, found `def` 2 | @x async def foo(): ... 3 | @x class Foo: ... | @@ -170,7 +170,7 @@ Module( | 1 | @x def foo(): ... 2 | @x async def foo(): ... - | ^^^^^ Syntax Error: Expected newline, found 'async' + | ^^^^^ Syntax Error: Expected newline, found `async` 3 | @x class Foo: ... | @@ -179,5 +179,5 @@ Module( 1 | @x def foo(): ... 2 | @x async def foo(): ... 3 | @x class Foo: ... - | ^^^^^ Syntax Error: Expected newline, found 'class' + | ^^^^^ Syntax Error: Expected newline, found `class` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap index e3f633b879..8656ee03e8 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap @@ -238,7 +238,7 @@ Module( 3 | call(***x) 4 | 5 | call(**x := 1) - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap index 37e891b89a..0f781f1e53 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap @@ -61,5 +61,5 @@ Module( | 1 | call(x y) - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap index 655f45ed24..cc6fba138b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap @@ -76,7 +76,7 @@ Module( | 1 | call( - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap index 99e0e4fbcd..cdb11a8ebc 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap @@ -85,7 +85,7 @@ Module( | 1 | call(x - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap index 2b4270bf2c..e28cecdd9f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap @@ -85,7 +85,7 @@ Module( | 1 | call(x, - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap index 419cc7854b..ccc649ea7c 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap @@ -175,7 +175,7 @@ Module( | 6 | # Same here as well, `not` without `in` is considered to be a unary operator 7 | x not is y - | ^^ Syntax Error: Expected an identifier, but found a keyword 'is' that cannot be used here + | ^^ Syntax Error: Expected an identifier, but found a keyword `is` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap index a579afac89..64fb0233e4 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap @@ -544,7 +544,7 @@ Module( 2 | # the ones which are higher than that. 3 | 4 | {**x := 1} - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 5 | {a: 1, **x if True else y} 6 | {**lambda x: x, b: 2} | @@ -554,7 +554,7 @@ Module( 2 | # the ones which are higher than that. 3 | 4 | {**x := 1} - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 5 | {a: 1, **x if True else y} 6 | {**lambda x: x, b: 2} | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap index 9c0cde63d3..e5e7f7e3ee 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap @@ -134,7 +134,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^^^ Syntax Error: Expected ':', found 'for' + | ^^^ Syntax Error: Expected `:`, found `for` 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | @@ -144,7 +144,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | @@ -154,7 +154,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | @@ -164,7 +164,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap index 99d310bc87..31bd7feb9f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap @@ -86,7 +86,7 @@ Module( 1 | {x: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 4 | pass | @@ -95,7 +95,7 @@ Module( 1 | {x: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected ',', found name + | ^^^ Syntax Error: Expected `,`, found name 4 | pass | @@ -103,7 +103,7 @@ Module( | 3 | def foo(): 4 | pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap index a5a08be0be..a54264becd 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap @@ -85,7 +85,7 @@ Module( | 1 | {x: 1, - | ^ Syntax Error: Expected '}', found newline + | ^ Syntax Error: Expected `}`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap index 5db7a61381..824f3261f7 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap @@ -149,7 +149,7 @@ Module( 1 | # Unparenthesized named expression not allowed in key 2 | 3 | {x := 1: y, z := 2: a} - | ^^ Syntax Error: Expected ':', found ':=' + | ^^ Syntax Error: Expected `:`, found `:=` 4 | 5 | x + y | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap index 58509cc935..ccf8ead1b9 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap @@ -145,7 +145,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 4 | 5 | x + y | @@ -155,7 +155,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 4 | 5 | x + y | @@ -165,7 +165,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 4 | 5 | x + y | @@ -175,7 +175,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 4 | 5 | x + y | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap index c4c4f242c8..b1ad5d8255 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap @@ -504,7 +504,7 @@ Module( | 9 | # Missing comma 10 | {1: 2 3: 4} - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int 11 | 12 | # No value | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap index 986a07de03..39ddc3c4c5 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap @@ -338,7 +338,7 @@ Module( 7 | lambda a, *a: 1 8 | 9 | lambda a, *, **a: 1 - | ^^^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^^^ Syntax Error: Expected one or more keyword parameter after `*` separator | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap index 4f21ca4ddf..1870af8aa0 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap @@ -85,7 +85,7 @@ Module( 2 | # token starts a statement. 3 | 4 | [1, 2 - | ^ Syntax Error: Expected ']', found newline + | ^ Syntax Error: Expected `]`, found newline 5 | 6 | def foo(): 7 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap index 3b1ba32aac..3fa2a32578 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap @@ -305,7 +305,7 @@ Module( | 9 | # Missing comma 10 | [1 2] - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int 11 | 12 | # Dictionary element in a list | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap index 884fb234b2..057477b761 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap @@ -84,7 +84,7 @@ Module( 3 | (x := 4 | 5 | def foo(): - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 6 | pass | @@ -93,7 +93,7 @@ Module( 3 | (x := 4 | 5 | def foo(): - | ^^^ Syntax Error: Expected ')', found name + | ^^^ Syntax Error: Expected `)`, found name 6 | pass | @@ -101,7 +101,7 @@ Module( | 5 | def foo(): 6 | pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap index 3e8a85dc97..7ebe57ede2 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap @@ -142,14 +142,14 @@ Module( | 1 | (*x for x in y) 2 | (x := 1, for x in y) - | ^^^ Syntax Error: Expected ')', found 'for' + | ^^^ Syntax Error: Expected `)`, found `for` | | 1 | (*x for x in y) 2 | (x := 1, for x in y) - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap index b98aae283e..24ae8dd4bb 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap @@ -86,7 +86,7 @@ Module( 2 | # token starts a statement. 3 | 4 | (1, 2 - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 5 | 6 | def foo(): 7 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap index 768381c483..7670ed0edd 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap @@ -315,7 +315,7 @@ Module( | 9 | # Missing comma 10 | (1 2) - | ^ Syntax Error: Expected ')', found int + | ^ Syntax Error: Expected `)`, found int 11 | 12 | # Dictionary element in a list | @@ -343,7 +343,7 @@ Module( | 12 | # Dictionary element in a list 13 | (1: 2) - | ^ Syntax Error: Expected ')', found ':' + | ^ Syntax Error: Expected `)`, found `:` 14 | 15 | # Missing expression | @@ -390,7 +390,7 @@ Module( 16 | (1, x + ) 17 | 18 | (1; 2) - | ^ Syntax Error: Expected ')', found ';' + | ^ Syntax Error: Expected `)`, found `;` 19 | 20 | # Unparenthesized named expression is not allowed | @@ -420,5 +420,5 @@ Module( | 20 | # Unparenthesized named expression is not allowed 21 | x, y := 2, z - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap index da92fa1991..05cd9dbaca 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap @@ -1542,5 +1542,5 @@ Module( 18 | *x if True else y, z, *x if True else y 19 | *lambda x: x, z, *lambda x: x 20 | *x := 2, z, *x := 2 - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap index 311eaae530..0be8d06138 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap @@ -84,7 +84,7 @@ Module( 2 | # token starts a statement. 3 | 4 | {1, 2 - | ^ Syntax Error: Expected '}', found newline + | ^ Syntax Error: Expected `}`, found newline 5 | 6 | def foo(): 7 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap index b489b1c64f..74e95fe8c7 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap @@ -302,7 +302,7 @@ Module( | 11 | # Missing comma 12 | {1 2} - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int 13 | 14 | # Dictionary element in a list | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap index d3e57ddfc0..05de28c275 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap @@ -95,7 +95,7 @@ Module( 1 | x[:: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 4 | pass | @@ -104,7 +104,7 @@ Module( 1 | x[:: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected ']', found name + | ^^^ Syntax Error: Expected `]`, found name 4 | pass | @@ -112,7 +112,7 @@ Module( | 3 | def foo(): 4 | pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap index 5feebcc55f..49a4e5362b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap @@ -125,5 +125,5 @@ Module( 2 | yield x := 1 3 | 4 | yield 1, x := 2, 3 - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap index b7da154352..2f5d767448 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap @@ -117,7 +117,7 @@ Module( | 1 | f"{lambda x: x}" - | ^^ Syntax Error: f-string: expecting '}' + | ^^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap index c8b75ce3f9..004ae87faa 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap @@ -267,7 +267,7 @@ Module( | 1 | f"{" 2 | f"{foo!r" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 3 | f"{foo=" 4 | f"{" | @@ -277,7 +277,7 @@ Module( 1 | f"{" 2 | f"{foo!r" 3 | f"{foo=" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 4 | f"{" 5 | f"""{""" | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap index cf843119c2..ac1d7c98f4 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap @@ -146,7 +146,7 @@ Module( | 1 | f"hello {x:" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 2 | f"hello {x:.3f" | @@ -154,5 +154,5 @@ Module( | 1 | f"hello {x:" 2 | f"hello {x:.3f" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap index 907c07e8ce..ab00df4afe 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap @@ -192,7 +192,7 @@ Module( 1 | for x in *a and b: ... 2 | for x in yield a: ... 3 | for target in x := 1: ... - | ^^ Syntax Error: Expected ':', found ':=' + | ^^ Syntax Error: Expected `:`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap index 04caa94916..88050de12e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap @@ -498,7 +498,7 @@ Module( 4 | for *x | y in z: ... 5 | for await x in z: ... 6 | for yield x in y: ... - | ^ Syntax Error: Expected 'in', found ':' + | ^ Syntax Error: Expected `in`, found `:` 7 | for [x, 1, y, *["a"]] in z: ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap index 8052c314b5..a2bf0f699d 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap @@ -94,7 +94,7 @@ Module( | 1 | for a b: ... - | ^ Syntax Error: Expected 'in', found name + | ^ Syntax Error: Expected `in`, found name 2 | for a: ... | @@ -102,5 +102,5 @@ Module( | 1 | for a b: ... 2 | for a: ... - | ^ Syntax Error: Expected 'in', found ':' + | ^ Syntax Error: Expected `in`, found `:` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap index 84d8b4f8cd..7742223cd0 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap @@ -56,11 +56,11 @@ Module( | 1 | for in x: ... - | ^^ Syntax Error: Expected an identifier, but found a keyword 'in' that cannot be used here + | ^^ Syntax Error: Expected an identifier, but found a keyword `in` that cannot be used here | | 1 | for in x: ... - | ^ Syntax Error: Expected 'in', found name + | ^ Syntax Error: Expected `in`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap index 2520cfbe49..a0fbe287f6 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap @@ -166,7 +166,7 @@ Module( | 1 | from x import a. - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` 2 | from x import a.b 3 | from x import a, b.c, d, e.f, g | @@ -175,7 +175,7 @@ Module( | 1 | from x import a. 2 | from x import a.b - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` 3 | from x import a, b.c, d, e.f, g | @@ -184,7 +184,7 @@ Module( 1 | from x import a. 2 | from x import a.b 3 | from x import a, b.c, d, e.f, g - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` | @@ -192,5 +192,5 @@ Module( 1 | from x import a. 2 | from x import a.b 3 | from x import a, b.c, d, e.f, g - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap index f53eb5aeff..d1792e0e09 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap @@ -152,7 +152,7 @@ Module( | 1 | from x import (a, b - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 1 + 1 3 | from x import (a, b, 4 | 2 + 2 @@ -163,6 +163,6 @@ Module( 1 | from x import (a, b 2 | 1 + 1 3 | from x import (a, b, - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 4 | 2 + 2 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap index 9028296eeb..9efb6d3fac 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap @@ -234,7 +234,7 @@ Module( | 1 | def foo(a: int, b: - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | def foo(): 3 | return 42 4 | def foo(a: int, b: str @@ -254,7 +254,7 @@ Module( 3 | return 42 4 | def foo(a: int, b: str 5 | x = 10 - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap index fa71509d1f..dd2412c32a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap @@ -163,7 +163,7 @@ Module( | 1 | def foo[T1, *T2(a, b): - | ^ Syntax Error: Expected ']', found '(' + | ^ Syntax Error: Expected `]`, found `(` 2 | return a + b 3 | x = 10 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap index d8ac7c86be..4238e23e7a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap @@ -79,7 +79,7 @@ Module( 1 | if x: 2 | pass 3 | elif y - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 4 | pass 5 | else: 6 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap index 8092bc7d7c..5dc30d6ce6 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap @@ -82,7 +82,7 @@ Module( | 1 | if x - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 2 | if x 3 | pass 4 | a = 1 @@ -101,7 +101,7 @@ Module( | 1 | if x 2 | if x - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 3 | pass 4 | a = 1 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap index f352512262..da778df7a1 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap @@ -80,6 +80,6 @@ Module( | 1 | match [1, 2] - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 2 | case _: ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap index 324f3480ff..7888bcd48e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap @@ -61,7 +61,7 @@ Module( | 1 | match foo: case _: ... - | ^^^^ Syntax Error: Expected newline, found 'case' + | ^^^^ Syntax Error: Expected newline, found `case` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap index 0fb6c83f46..be571b2cd2 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap @@ -326,7 +326,7 @@ Module( | 1 | if True: pass elif False: pass else: pass - | ^^^^ Syntax Error: Expected newline, found 'elif' + | ^^^^ Syntax Error: Expected newline, found `elif` 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass | @@ -334,7 +334,7 @@ Module( | 1 | if True: pass elif False: pass else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass | @@ -343,7 +343,7 @@ Module( | 1 | if True: pass elif False: pass else: pass 2 | if True: pass; elif False: pass; else: pass - | ^^^^ Syntax Error: Expected newline, found 'elif' + | ^^^^ Syntax Error: Expected newline, found `elif` 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass | @@ -352,7 +352,7 @@ Module( | 1 | if True: pass elif False: pass else: pass 2 | if True: pass; elif False: pass; else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass | @@ -362,7 +362,7 @@ Module( 1 | if True: pass elif False: pass else: pass 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass | @@ -372,7 +372,7 @@ Module( 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -382,7 +382,7 @@ Module( 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass - | ^^^^^^ Syntax Error: Expected newline, found 'except' + | ^^^^^^ Syntax Error: Expected newline, found `except` 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -391,7 +391,7 @@ Module( 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -400,7 +400,7 @@ Module( 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass - | ^^^^^^^ Syntax Error: Expected newline, found 'finally' + | ^^^^^^^ Syntax Error: Expected newline, found `finally` 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -409,7 +409,7 @@ Module( 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass - | ^^^^^^ Syntax Error: Expected newline, found 'except' + | ^^^^^^ Syntax Error: Expected newline, found `except` | @@ -417,7 +417,7 @@ Module( 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` | @@ -425,5 +425,5 @@ Module( 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass - | ^^^^^^^ Syntax Error: Expected newline, found 'finally' + | ^^^^^^^ Syntax Error: Expected newline, found `finally` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap index d521c935f4..ab0bcdf9ca 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap @@ -238,7 +238,7 @@ Module( 1 | # even after 3.9, an unparenthesized named expression is not allowed in a slice 2 | lst[x:=1:-1] 3 | lst[1:x:=1] - | ^^ Syntax Error: Expected ']', found ':=' + | ^^ Syntax Error: Expected `]`, found `:=` 4 | lst[1:3:x:=1] | @@ -265,7 +265,7 @@ Module( 2 | lst[x:=1:-1] 3 | lst[1:x:=1] 4 | lst[1:3:x:=1] - | ^^ Syntax Error: Expected ']', found ':=' + | ^^ Syntax Error: Expected `]`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap index c39b322387..7eec20f80b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap @@ -88,5 +88,5 @@ Module( | 1 | # parse_options: {"target-version": "3.12"} 2 | f"{1:""}" # this is a ParseError on all versions - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap index c3a8cdca25..361fe1288b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap @@ -106,7 +106,7 @@ Module( | 1 | def foo # comment - | ^ Syntax Error: Expected '(', found newline + | ^ Syntax Error: Expected `(`, found newline 2 | def bar(): ... 3 | def baz | @@ -115,7 +115,7 @@ Module( | 1 | def foo # comment 2 | def bar(): ... - | ^^^ Syntax Error: Expected ')', found 'def' + | ^^^ Syntax Error: Expected `)`, found `def` 3 | def baz | @@ -124,12 +124,12 @@ Module( 1 | def foo # comment 2 | def bar(): ... 3 | def baz - | ^ Syntax Error: Expected '(', found newline + | ^ Syntax Error: Expected `(`, found newline | | 2 | def bar(): ... 3 | def baz - | ^ Syntax Error: Expected ')', found end of file + | ^ Syntax Error: Expected `)`, found end of file | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap index 149cc7b4ce..d6b1a5944e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap @@ -255,7 +255,7 @@ Module( 1 | def foo(arg: *int): ... 2 | def foo(arg: yield int): ... 3 | def foo(arg: x := int): ... - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap index b1d8bdaa92..f343b562c6 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap @@ -251,7 +251,7 @@ Module( | 1 | def foo(*): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 2 | def foo(*,): ... 3 | def foo(a, *): ... | @@ -260,7 +260,7 @@ Module( | 1 | def foo(*): ... 2 | def foo(*,): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 3 | def foo(a, *): ... 4 | def foo(a, *,): ... | @@ -270,7 +270,7 @@ Module( 1 | def foo(*): ... 2 | def foo(*,): ... 3 | def foo(a, *): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 4 | def foo(a, *,): ... 5 | def foo(*, **kwargs): ... | @@ -280,7 +280,7 @@ Module( 2 | def foo(*,): ... 3 | def foo(a, *): ... 4 | def foo(a, *,): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 5 | def foo(*, **kwargs): ... | @@ -289,5 +289,5 @@ Module( 3 | def foo(a, *): ... 4 | def foo(a, *,): ... 5 | def foo(*, **kwargs): ... - | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after `*` separator | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap index 324b7246ea..589d8905af 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap @@ -67,5 +67,5 @@ Module( | 1 | def foo(*, **kwargs): ... - | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after `*` separator | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap index 3a8644b3a8..87adc006a3 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap @@ -165,19 +165,19 @@ Module( | 1 | def foo(a, **kwargs={'b': 1, 'c': 2}): ... - | ^ Syntax Error: Parameter with '*' or '**' cannot have default value + | ^ Syntax Error: Parameter with `*` or `**` cannot have default value | | 1 | def foo(a, **kwargs={'b': 1, 'c': 2}): ... - | ^ Syntax Error: Expected ')', found '{' + | ^ Syntax Error: Expected `)`, found `{` | | 1 | def foo(a, **kwargs={'b': 1, 'c': 2}): ... - | ^ Syntax Error: Expected newline, found ')' + | ^ Syntax Error: Expected newline, found `)` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap index 1e84abfa40..e965d4a0b4 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap @@ -117,19 +117,19 @@ Module( | 1 | def foo(a, *args=(1, 2)): ... - | ^ Syntax Error: Parameter with '*' or '**' cannot have default value + | ^ Syntax Error: Parameter with `*` or `**` cannot have default value | | 1 | def foo(a, *args=(1, 2)): ... - | ^ Syntax Error: Expected ')', found '(' + | ^ Syntax Error: Expected `)`, found `(` | | 1 | def foo(a, *args=(1, 2)): ... - | ^ Syntax Error: Expected newline, found ')' + | ^ Syntax Error: Expected newline, found `)` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap index f6650bb5d9..5328fcf7dd 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap @@ -298,7 +298,7 @@ Module( 3 | def foo(a, /, b, /): ... 4 | def foo(a, *args, /, b): ... 5 | def foo(a, //): ... - | ^^ Syntax Error: Expected ',', found '//' + | ^^ Syntax Error: Expected `,`, found `//` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap index 45a5b27c78..61f3230855 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap @@ -785,7 +785,7 @@ Module( | 1 | # No indentation before the function definition 2 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 3 | def bar(): 4 | pass | @@ -803,7 +803,7 @@ Module( | 7 | # Indented function definition 8 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 9 | def bar(): 10 | pass | @@ -812,7 +812,7 @@ Module( | 13 | # There are multiple non-logical newlines (blank lines) in the `if` body 14 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 15 | 16 | 17 | def bar(): @@ -822,7 +822,7 @@ Module( | 21 | # There are trailing whitespaces in the blank line inside the `if` body 22 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 23 | 24 | def bar(): 25 | pass @@ -832,7 +832,7 @@ Module( | 28 | # The lexer is nested with multiple levels of parentheses 29 | if call(foo, [a, b - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline 30 | def bar(): 31 | pass | @@ -841,7 +841,7 @@ Module( | 34 | # The outer parenthesis is closed but the inner bracket isn't 35 | if call(foo, [a, b) - | ^ Syntax Error: Expected ']', found ')' + | ^ Syntax Error: Expected `]`, found `)` 36 | def bar(): 37 | pass | @@ -850,7 +850,7 @@ Module( | 34 | # The outer parenthesis is closed but the inner bracket isn't 35 | if call(foo, [a, b) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 36 | def bar(): 37 | pass | @@ -860,7 +860,7 @@ Module( 41 | # test is to make sure it emits a `NonLogicalNewline` token after `b`. 42 | if call(foo, [a, 43 | b - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline 44 | ) 45 | def bar(): 46 | pass @@ -871,7 +871,7 @@ Module( 42 | if call(foo, [a, 43 | b 44 | ) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 45 | def bar(): 46 | pass | @@ -890,7 +890,7 @@ Module( 49 | # F-strings uses normal list parsing, so test those as well 50 | if call(f"hello {x 51 | def bar(): - | ^^^ Syntax Error: f-string: expecting '}' + | ^^^ Syntax Error: f-string: expecting `}` 52 | pass | @@ -923,7 +923,7 @@ Module( | 55 | if call(f"hello 56 | def bar(): - | ^^^^ Syntax Error: Expected ',', found indent + | ^^^^ Syntax Error: Expected `,`, found indent 57 | pass | @@ -931,7 +931,7 @@ Module( | 55 | if call(f"hello 56 | def bar(): - | ^^^ Syntax Error: Expected ')', found 'def' + | ^^^ Syntax Error: Expected `)`, found `def` 57 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap index d9082066b8..5567459c70 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap @@ -113,5 +113,5 @@ Module( | 1 | if call(foo, [a, b def bar(): pass - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap index c3b23f4fbd..ae03fee095 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap @@ -113,7 +113,7 @@ Module( | 1 | if call(foo, [a, b - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline 2 | def bar(): 3 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap index 1e2ad65fcf..d836318aea 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap @@ -399,7 +399,7 @@ Module( | 5 | f'middle {'string':\ 6 | 'format spec'} - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 7 | 8 | f'middle {'string':\\ | @@ -445,7 +445,7 @@ Module( 6 | 'format spec'} 7 | 8 | f'middle {'string':\\ - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 9 | 'format spec'} 10 | 11 | f'middle {'string':\\\ @@ -492,7 +492,7 @@ Module( | 11 | f'middle {'string':\\\ 12 | 'format spec'} - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap index 54c66a5216..21f4465c4e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap @@ -110,7 +110,7 @@ Module( | 1 | call(a, b, \\\ 2 | - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 3 | def bar(): 4 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap index f1ae6a18c9..cde70bca42 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap @@ -93,7 +93,7 @@ Module( | 1 | call(a, b, # comment \ - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def bar(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap index 1a9af6dacc..f58010350b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap @@ -83,7 +83,7 @@ Module( | 5 | f"""hello {x # comment 6 | y = 1 - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap index 50bb114c7e..7a5f85ab4a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap @@ -80,5 +80,5 @@ Module( | 5 | f'''{foo:.3f 6 | ''' - | ^^^ Syntax Error: f-string: expecting '}' + | ^^^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap index 174ebceee4..3c611d4fbe 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap @@ -110,7 +110,7 @@ Module( | 5 | if call(f'''{x:.3f 6 | ''' - | ^^^ Syntax Error: f-string: expecting '}' + | ^^^ Syntax Error: f-string: expecting `}` 7 | pass | @@ -118,6 +118,6 @@ Module( | 5 | if call(f'''{x:.3f 6 | ''' - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 7 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap index 9618f4200b..18b90424fa 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap @@ -375,7 +375,7 @@ Module( 9 | # on following lines. 10 | 11 | def keyword[A, await](): ... - | ^^^^^ Syntax Error: Expected an identifier, but found a keyword 'await' that cannot be used here + | ^^^^^ Syntax Error: Expected an identifier, but found a keyword `await` that cannot be used here 12 | 13 | def not_a_type_param[A, |, B](): ... | @@ -385,7 +385,7 @@ Module( 11 | def keyword[A, await](): ... 12 | 13 | def not_a_type_param[A, |, B](): ... - | ^ Syntax Error: Expected ',', found '|' + | ^ Syntax Error: Expected `,`, found `|` 14 | 15 | def multiple_commas[A,,B](): ... | @@ -433,7 +433,7 @@ Module( 17 | def multiple_trailing_commas[A,,](): ... 18 | 19 | def multiple_commas_and_recovery[A,,100](): ... - | ^^^ Syntax Error: Expected ']', found int + | ^^^ Syntax Error: Expected `]`, found int | @@ -441,7 +441,7 @@ Module( 17 | def multiple_trailing_commas[A,,](): ... 18 | 19 | def multiple_commas_and_recovery[A,,100](): ... - | ^ Syntax Error: Expected newline, found ']' + | ^ Syntax Error: Expected newline, found `]` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap index 886c21590c..780f943a96 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap @@ -40,7 +40,7 @@ Module( | 1 | # FIXME(micha): This creates two syntax errors instead of just one (and overlapping ones) 2 | if True)): - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 3 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap index 34e3500f26..4bd787208c 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap @@ -111,7 +111,7 @@ Module( 2 | # This `as` pattern is unparenthesied so the parser never takes the path 3 | # where it might be confused as a complex literal pattern. 4 | case x as y + 1j: - | ^ Syntax Error: Expected ':', found '+' + | ^ Syntax Error: Expected `:`, found `+` 5 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap index 07cd1a64e6..ad4dbb0ec8 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap @@ -113,7 +113,7 @@ Module( 2 | # Not in the mapping start token set, so the list parsing bails 3 | # v 4 | case {(x as y): 1}: - | ^ Syntax Error: Expected '}', found '(' + | ^ Syntax Error: Expected `}`, found `(` 5 | pass | @@ -131,7 +131,7 @@ Module( 2 | # Not in the mapping start token set, so the list parsing bails 3 | # v 4 | case {(x as y): 1}: - | ^ Syntax Error: Expected newline, found '}' + | ^ Syntax Error: Expected newline, found `}` 5 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap index 771706eaed..fde6679f01 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap @@ -114,7 +114,7 @@ Module( 2 | # This `as` pattern is unparenthesized so the parser never takes the path 3 | # where it might be confused as a mapping key pattern. 4 | case {x as y: 1}: - | ^^ Syntax Error: Expected ':', found 'as' + | ^^ Syntax Error: Expected `:`, found `as` 5 | pass | @@ -123,6 +123,6 @@ Module( 2 | # This `as` pattern is unparenthesized so the parser never takes the path 3 | # where it might be confused as a mapping key pattern. 4 | case {x as y: 1}: - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name 5 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap index 5592524488..1fbaa9df86 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap @@ -540,7 +540,7 @@ Module( 1 | # Starred expression is not allowed as a mapping pattern key 2 | match subject: 3 | case {*key}: - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 4 | pass 5 | case {*key: 1}: | @@ -570,7 +570,7 @@ Module( 5 | case {*key: 1}: 6 | pass 7 | case {*key 1}: - | ^ Syntax Error: Expected ':', found int + | ^ Syntax Error: Expected `:`, found int 8 | pass 9 | case {*key, None: 1}: | @@ -589,7 +589,7 @@ Module( 7 | case {*key 1}: 8 | pass 9 | case {*key, None: 1}: - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 10 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap index 364d382ff2..d87687110b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap @@ -580,7 +580,7 @@ Module( 15 | case Foo(x=*_): 16 | pass 17 | case {*_}: - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 18 | pass 19 | case {*_: 1}: | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap index 9d872e920e..61ff19b25d 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap @@ -1580,7 +1580,7 @@ Module( | 4 | with (item1, item2),: ... 5 | with (item1, item2), as f: ... - | ^^ Syntax Error: Expected ',', found 'as' + | ^^ Syntax Error: Expected `,`, found `as` 6 | with (item1, item2), item3,: ... 7 | with (*item): ... | @@ -1640,7 +1640,7 @@ Module( 9 | with (item := 10 as f): ... 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... - | ^ Syntax Error: Expected ')', found ',' + | ^ Syntax Error: Expected `)`, found `,` 12 | with (item, x for x in range(10)): ... | @@ -1649,7 +1649,7 @@ Module( 9 | with (item := 10 as f): ... 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... - | ^ Syntax Error: Expected ',', found ')' + | ^ Syntax Error: Expected `,`, found `)` 12 | with (item, x for x in range(10)): ... | @@ -1658,7 +1658,7 @@ Module( 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... 12 | with (item, x for x in range(10)): ... - | ^^^ Syntax Error: Expected ')', found 'for' + | ^^^ Syntax Error: Expected `)`, found `for` 13 | 14 | # Make sure the parser doesn't report the same error twice | @@ -1668,7 +1668,7 @@ Module( 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... 12 | with (item, x for x in range(10)): ... - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 13 | 14 | # Make sure the parser doesn't report the same error twice | @@ -1707,7 +1707,7 @@ Module( 15 | with ((*item)): ... 16 | 17 | with (*x for x in iter, item): ... - | ^ Syntax Error: Expected ')', found ',' + | ^ Syntax Error: Expected `)`, found `,` 18 | with (item1, *x for x in iter, item2): ... 19 | with (x as f, *y): ... | @@ -1717,7 +1717,7 @@ Module( 15 | with ((*item)): ... 16 | 17 | with (*x for x in iter, item): ... - | ^ Syntax Error: Expected ',', found ')' + | ^ Syntax Error: Expected `,`, found `)` 18 | with (item1, *x for x in iter, item2): ... 19 | with (x as f, *y): ... | @@ -1726,7 +1726,7 @@ Module( | 17 | with (*x for x in iter, item): ... 18 | with (item1, *x for x in iter, item2): ... - | ^^^ Syntax Error: Expected ')', found 'for' + | ^^^ Syntax Error: Expected `)`, found `for` 19 | with (x as f, *y): ... 20 | with (*x, y as f): ... | @@ -1735,7 +1735,7 @@ Module( | 17 | with (*x for x in iter, item): ... 18 | with (item1, *x for x in iter, item2): ... - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 19 | with (x as f, *y): ... 20 | with (*x, y as f): ... | @@ -1804,7 +1804,7 @@ Module( 22 | with (x, yield y, z): ... 23 | with (x, yield from y): ... 24 | with (x as f, y) as f: ... - | ^^ Syntax Error: Expected ':', found 'as' + | ^^ Syntax Error: Expected `:`, found `as` 25 | with (x for x in iter as y): ... | @@ -1813,7 +1813,7 @@ Module( 23 | with (x, yield from y): ... 24 | with (x as f, y) as f: ... 25 | with (x for x in iter as y): ... - | ^^ Syntax Error: Expected ')', found 'as' + | ^^ Syntax Error: Expected `)`, found `as` 26 | 27 | # The inner `(...)` is parsed as parenthesized expression | @@ -1823,7 +1823,7 @@ Module( 23 | with (x, yield from y): ... 24 | with (x as f, y) as f: ... 25 | with (x for x in iter as y): ... - | ^ Syntax Error: Expected ',', found ')' + | ^ Syntax Error: Expected `,`, found `)` 26 | 27 | # The inner `(...)` is parsed as parenthesized expression | @@ -1832,7 +1832,7 @@ Module( | 27 | # The inner `(...)` is parsed as parenthesized expression 28 | with ((item as f)): ... - | ^^ Syntax Error: Expected ')', found 'as' + | ^^ Syntax Error: Expected `)`, found `as` 29 | 30 | with (item as f), x: ... | @@ -1841,7 +1841,7 @@ Module( | 27 | # The inner `(...)` is parsed as parenthesized expression 28 | with ((item as f)): ... - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 29 | 30 | with (item as f), x: ... | @@ -1860,7 +1860,7 @@ Module( 28 | with ((item as f)): ... 29 | 30 | with (item as f), x: ... - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 31 | with (item as f1) as f2: ... 32 | with (item1 as f, item2 := 0): ... | @@ -1869,7 +1869,7 @@ Module( | 30 | with (item as f), x: ... 31 | with (item as f1) as f2: ... - | ^^ Syntax Error: Expected ':', found 'as' + | ^^ Syntax Error: Expected `:`, found `as` 32 | with (item1 as f, item2 := 0): ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap index 7e2d808bc6..daebbd0a96 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap @@ -401,5 +401,5 @@ Module( 7 | with *item1, item2 as f: pass 8 | with item1 as f, *item2: pass 9 | with item := 0 as f: pass - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap index 0d23f0c0d2..0de7376e01 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap @@ -118,7 +118,7 @@ Module( | 1 | # parse_options: {"target-version": "3.14"} 2 | t"{lambda x: x}" - | ^^ Syntax Error: t-string: expecting '}' + | ^^ Syntax Error: t-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap index 4ff0a7d78f..f39f719d1d 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap @@ -259,7 +259,7 @@ Module( 1 | # parse_options: {"target-version": "3.14"} 2 | t"{" 3 | t"{foo!r" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` 4 | t"{foo=" 5 | t"{" | @@ -269,7 +269,7 @@ Module( 2 | t"{" 3 | t"{foo!r" 4 | t"{foo=" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` 5 | t"{" 6 | t"""{""" | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap index bc20f6172c..9789ed8922 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap @@ -143,7 +143,7 @@ Module( | 1 | # parse_options: {"target-version": "3.14"} 2 | t"hello {x:" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` 3 | t"hello {x:.3f" | @@ -152,5 +152,5 @@ Module( 1 | # parse_options: {"target-version": "3.14"} 2 | t"hello {x:" 3 | t"hello {x:.3f" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap index 321704cd05..7876c6b6ec 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap @@ -288,7 +288,7 @@ Module( 2 | type X[T: yield x] = int 3 | type X[T: yield from x] = int 4 | type X[T: x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap index de9da4848d..351a141b60 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap @@ -88,7 +88,7 @@ Module( | 1 | type X[**T: int] = int - | ^ Syntax Error: Expected ']', found ':' + | ^ Syntax Error: Expected `]`, found `:` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap index dad7c709de..0bcbd3cc52 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap @@ -343,7 +343,7 @@ Module( 2 | type X[**P = yield x] = int 3 | type X[**P = yield from x] = int 4 | type X[**P = x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 5 | type X[**P = *int] = int | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap index 2831009c20..c3e38b8e9a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap @@ -417,7 +417,7 @@ Module( 3 | type X[T = (yield x)] = int 4 | type X[T = yield from x] = int 5 | type X[T = x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 6 | type X[T: int = *int] = int | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap index e1693e1722..ae228c0e30 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap @@ -88,7 +88,7 @@ Module( | 1 | type X[*T: int] = int - | ^ Syntax Error: Expected ']', found ':' + | ^ Syntax Error: Expected `]`, found `:` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap index 9b2d1c6de9..4aff137a73 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap @@ -361,7 +361,7 @@ Module( 3 | type X[*Ts = yield x] = int 4 | type X[*Ts = yield from x] = int 5 | type X[*Ts = x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap index 0595f124f2..7ed7f32534 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap @@ -376,7 +376,7 @@ Module( 2 | 1 + 1 3 | f"hello {x 4 | 2 + 2 - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 5 | f"hello {x: 6 | 3 + 3 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap index 0af23c288c..acdd7532ad 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap @@ -201,7 +201,7 @@ Module( 1 | while *x: ... 2 | while yield x: ... 3 | while a, b: ... - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 4 | while a := 1, b: ... | @@ -210,5 +210,5 @@ Module( 2 | while yield x: ... 3 | while a, b: ... 4 | while a := 1, b: ... - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap index 67fb75a824..9ad6e62132 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap @@ -63,6 +63,6 @@ Module( 1 | while ( 2 | a < 30 # comment 3 | ) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 4 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap index 668c7c2c08..817368aa22 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap @@ -62,6 +62,6 @@ Module( | 1 | # `)` followed by a newline 2 | with (item1, item2) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 3 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap index ea060453a9..0c4b117dcf 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap @@ -338,7 +338,7 @@ Module( | 1 | with (item1 item2): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 2 | with (item1 as f1 item2): ... 3 | with (item1, item2 item3, item4): ... | @@ -347,7 +347,7 @@ Module( | 1 | with (item1 item2): ... 2 | with (item1 as f1 item2): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 3 | with (item1, item2 item3, item4): ... 4 | with (item1, item2 as f1 item3, item4): ... | @@ -357,7 +357,7 @@ Module( 1 | with (item1 item2): ... 2 | with (item1 as f1 item2): ... 3 | with (item1, item2 item3, item4): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 4 | with (item1, item2 as f1 item3, item4): ... 5 | with (item1, item2: ... | @@ -367,7 +367,7 @@ Module( 2 | with (item1 as f1 item2): ... 3 | with (item1, item2 item3, item4): ... 4 | with (item1, item2 as f1 item3, item4): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 5 | with (item1, item2: ... | @@ -376,5 +376,5 @@ Module( 3 | with (item1, item2 item3, item4): ... 4 | with (item1, item2 as f1 item3, item4): ... 5 | with (item1, item2: ... - | ^ Syntax Error: Expected ')', found ':' + | ^ Syntax Error: Expected `)`, found `:` | diff --git a/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap b/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap index 8d241b04ef..b685a0656d 100644 --- a/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap @@ -8,7 +8,7 @@ input_file: crates/ruff_python_parser/resources/inline/ok/nested_alternative_pat Module( ModModule { node_index: NodeIndex(None), - range: 0..181, + range: 0..271, body: [ Match( StmtMatch { @@ -489,6 +489,216 @@ Module( ], }, ), + Match( + StmtMatch { + node_index: NodeIndex(None), + range: 181..270, + subject: NumberLiteral( + ExprNumberLiteral { + node_index: NodeIndex(None), + range: 187..189, + value: Int( + 42, + ), + }, + ), + cases: [ + MatchCase { + range: 195..270, + node_index: NodeIndex(None), + pattern: MatchOr( + PatternMatchOr { + node_index: NodeIndex(None), + range: 200..265, + patterns: [ + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 200..246, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 200..213, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 200..203, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Subscript"), + range: 204..213, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 213..246, + node_index: NodeIndex(None), + patterns: [ + MatchAs( + PatternMatchAs { + node_index: NodeIndex(None), + range: 214..215, + pattern: None, + name: Some( + Identifier { + id: Name("n"), + range: 214..215, + node_index: NodeIndex(None), + }, + ), + }, + ), + MatchOr( + PatternMatchOr { + node_index: NodeIndex(None), + range: 217..245, + patterns: [ + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 217..231, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 217..229, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 217..220, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Constant"), + range: 221..229, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 229..231, + node_index: NodeIndex(None), + patterns: [], + keywords: [], + }, + }, + ), + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 234..245, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 234..243, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 234..237, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Slice"), + range: 238..243, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 243..245, + node_index: NodeIndex(None), + patterns: [], + keywords: [], + }, + }, + ), + ], + }, + ), + ], + keywords: [], + }, + }, + ), + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 249..265, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 249..262, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 249..252, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Attribute"), + range: 253..262, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 262..265, + node_index: NodeIndex(None), + patterns: [ + MatchAs( + PatternMatchAs { + node_index: NodeIndex(None), + range: 263..264, + pattern: None, + name: Some( + Identifier { + id: Name("n"), + range: 263..264, + node_index: NodeIndex(None), + }, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ], + }, + ), + guard: None, + body: [ + Expr( + StmtExpr { + node_index: NodeIndex(None), + range: 267..270, + value: EllipsisLiteral( + ExprEllipsisLiteral { + node_index: NodeIndex(None), + range: 267..270, + }, + ), + }, + ), + ], + }, + ], + }, + ), ], }, ) diff --git a/crates/ruff_server/src/server/api/diagnostics.rs b/crates/ruff_server/src/server/api/diagnostics.rs index 2c8faab6db..6f8efe47e8 100644 --- a/crates/ruff_server/src/server/api/diagnostics.rs +++ b/crates/ruff_server/src/server/api/diagnostics.rs @@ -1,7 +1,4 @@ -use lsp_types::Url; - use crate::{ - Session, lint::DiagnosticsMap, session::{Client, DocumentQuery, DocumentSnapshot}, }; @@ -22,21 +19,10 @@ pub(super) fn generate_diagnostics(snapshot: &DocumentSnapshot) -> DiagnosticsMa } pub(super) fn publish_diagnostics_for_document( - session: &Session, - url: &Url, + snapshot: &DocumentSnapshot, client: &Client, ) -> crate::server::Result<()> { - // Publish diagnostics if the client doesn't support pull diagnostics - if session.resolved_client_capabilities().pull_diagnostics { - return Ok(()); - } - - let snapshot = session - .take_snapshot(url.clone()) - .ok_or_else(|| anyhow::anyhow!("Unable to take snapshot for document with URL {url}")) - .with_failure_code(lsp_server::ErrorCode::InternalError)?; - - for (uri, diagnostics) in generate_diagnostics(&snapshot) { + for (uri, diagnostics) in generate_diagnostics(snapshot) { client .send_notification::( lsp_types::PublishDiagnosticsParams { @@ -52,14 +38,9 @@ pub(super) fn publish_diagnostics_for_document( } pub(super) fn clear_diagnostics_for_document( - session: &Session, query: &DocumentQuery, client: &Client, ) -> crate::server::Result<()> { - if session.resolved_client_capabilities().pull_diagnostics { - return Ok(()); - } - client .send_notification::( lsp_types::PublishDiagnosticsParams { diff --git a/crates/ruff_server/src/server/api/notifications/did_change.rs b/crates/ruff_server/src/server/api/notifications/did_change.rs index 5ac7a1f606..8e77cb593f 100644 --- a/crates/ruff_server/src/server/api/notifications/did_change.rs +++ b/crates/ruff_server/src/server/api/notifications/did_change.rs @@ -31,7 +31,11 @@ impl super::SyncNotificationHandler for DidChange { .update_text_document(&key, content_changes, new_version) .with_failure_code(ErrorCode::InternalError)?; - publish_diagnostics_for_document(session, &key.into_url(), client)?; + // Publish diagnostics if the client doesn't support pull diagnostics + if !session.resolved_client_capabilities().pull_diagnostics { + let snapshot = session.take_snapshot(key.into_url()).unwrap(); + publish_diagnostics_for_document(&snapshot, client)?; + } Ok(()) } diff --git a/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs b/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs index da11755d71..d092ccacb8 100644 --- a/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs +++ b/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs @@ -27,7 +27,10 @@ impl super::SyncNotificationHandler for DidChangeNotebook { .with_failure_code(ErrorCode::InternalError)?; // publish new diagnostics - publish_diagnostics_for_document(session, &key.into_url(), client)?; + let snapshot = session + .take_snapshot(key.into_url()) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; Ok(()) } diff --git a/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs b/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs index cb157d81f9..bc97231411 100644 --- a/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs +++ b/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs @@ -31,13 +31,19 @@ impl super::SyncNotificationHandler for DidChangeWatchedFiles { } else { // publish diagnostics for text documents for url in session.text_document_urls() { - publish_diagnostics_for_document(session, url, client)?; + let snapshot = session + .take_snapshot(url.clone()) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; } } // always publish diagnostics for notebook files (since they don't use pull diagnostics) for url in session.notebook_document_urls() { - publish_diagnostics_for_document(session, url, client)?; + let snapshot = session + .take_snapshot(url.clone()) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; } } diff --git a/crates/ruff_server/src/server/api/notifications/did_close.rs b/crates/ruff_server/src/server/api/notifications/did_close.rs index 5a482c4fcc..a3075a4846 100644 --- a/crates/ruff_server/src/server/api/notifications/did_close.rs +++ b/crates/ruff_server/src/server/api/notifications/did_close.rs @@ -27,7 +27,7 @@ impl super::SyncNotificationHandler for DidClose { ); return Ok(()); }; - clear_diagnostics_for_document(session, snapshot.query(), client)?; + clear_diagnostics_for_document(snapshot.query(), client)?; session .close_document(&key) diff --git a/crates/ruff_server/src/server/api/notifications/did_open.rs b/crates/ruff_server/src/server/api/notifications/did_open.rs index fa5f6b92df..41a6fb6cf8 100644 --- a/crates/ruff_server/src/server/api/notifications/did_open.rs +++ b/crates/ruff_server/src/server/api/notifications/did_open.rs @@ -1,5 +1,6 @@ use crate::TextDocument; use crate::server::Result; +use crate::server::api::LSPResult; use crate::server::api::diagnostics::publish_diagnostics_for_document; use crate::session::{Client, Session}; use lsp_types as types; @@ -29,7 +30,16 @@ impl super::SyncNotificationHandler for DidOpen { session.open_text_document(uri.clone(), document); - publish_diagnostics_for_document(session, &uri, client)?; + // Publish diagnostics if the client doesn't support pull diagnostics + if !session.resolved_client_capabilities().pull_diagnostics { + let snapshot = session + .take_snapshot(uri.clone()) + .ok_or_else(|| { + anyhow::anyhow!("Unable to take snapshot for document with URL {uri}") + }) + .with_failure_code(lsp_server::ErrorCode::InternalError)?; + publish_diagnostics_for_document(&snapshot, client)?; + } Ok(()) } diff --git a/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs b/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs index 3ce27168e4..a75e88ecc5 100644 --- a/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs +++ b/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs @@ -40,7 +40,10 @@ impl super::SyncNotificationHandler for DidOpenNotebook { session.open_notebook_document(uri.clone(), notebook); // publish diagnostics - publish_diagnostics_for_document(session, &uri, client)?; + let snapshot = session + .take_snapshot(uri) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; Ok(()) } diff --git a/crates/ruff_wasm/Cargo.toml b/crates/ruff_wasm/Cargo.toml index f399ef1007..aa10112777 100644 --- a/crates/ruff_wasm/Cargo.toml +++ b/crates/ruff_wasm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_wasm" -version = "0.14.2" +version = "0.14.4" publish = false authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff_workspace/src/options.rs b/crates/ruff_workspace/src/options.rs index 47ee0fe738..708d6dcf0b 100644 --- a/crates/ruff_workspace/src/options.rs +++ b/crates/ruff_workspace/src/options.rs @@ -59,13 +59,20 @@ pub struct Options { )] pub cache_dir: Option, - /// A path to a local `pyproject.toml` file to merge into this + /// A path to a local `pyproject.toml` or `ruff.toml` file to merge into this /// configuration. User home directory and environment variables will be /// expanded. /// - /// To resolve the current `pyproject.toml` file, Ruff will first resolve - /// this base configuration file, then merge in any properties defined - /// in the current configuration file. + /// To resolve the current configuration file, Ruff will first load + /// this base configuration file, then merge in properties defined + /// in the current configuration file. Most settings follow simple override + /// behavior where the child value replaces the parent value. However, + /// rule selection (`lint.select` and `lint.ignore`) has special merging + /// behavior: if the child configuration specifies `lint.select`, it + /// establishes a new baseline rule set and the parent's `lint.ignore` + /// rules are discarded; if the child configuration omits `lint.select`, + /// the parent's rule selection is inherited and both parent and child + /// `lint.ignore` rules are accumulated together. #[option( default = r#"null"#, value_type = "str", diff --git a/crates/ruff_workspace/src/pyproject.rs b/crates/ruff_workspace/src/pyproject.rs index 9cfa01a35d..53649a31e8 100644 --- a/crates/ruff_workspace/src/pyproject.rs +++ b/crates/ruff_workspace/src/pyproject.rs @@ -468,6 +468,62 @@ line-length = 500 "line-length must be between 1 and 320 (got 500)" ); + // Test value at u16::MAX boundary (65535) - should show range error + let invalid_line_length_65535 = toml::from_str::( + r" +[tool.ruff] +line-length = 65535 +", + ) + .expect_err("Deserialization should have failed for line-length at u16::MAX"); + + assert_eq!( + invalid_line_length_65535.message(), + "line-length must be between 1 and 320 (got 65535)" + ); + + // Test value exceeding u16::MAX (65536) - should show clear error + let invalid_line_length_65536 = toml::from_str::( + r" +[tool.ruff] +line-length = 65536 +", + ) + .expect_err("Deserialization should have failed for line-length exceeding u16::MAX"); + + assert_eq!( + invalid_line_length_65536.message(), + "line-length must be between 1 and 320 (got 65536)" + ); + + // Test value far exceeding u16::MAX (99_999) - should show clear error + let invalid_line_length_99999 = toml::from_str::( + r" +[tool.ruff] +line-length = 99_999 +", + ) + .expect_err("Deserialization should have failed for line-length far exceeding u16::MAX"); + + assert_eq!( + invalid_line_length_99999.message(), + "line-length must be between 1 and 320 (got 99999)" + ); + + // Test negative value - should show clear error + let invalid_line_length_negative = toml::from_str::( + r" +[tool.ruff] +line-length = -5 +", + ) + .expect_err("Deserialization should have failed for negative line-length"); + + assert_eq!( + invalid_line_length_negative.message(), + "line-length must be between 1 and 320 (got -5)" + ); + Ok(()) } diff --git a/crates/ty/docs/rules.md b/crates/ty/docs/rules.md index 858f1f0c7c..58b1db584c 100644 --- a/crates/ty/docs/rules.md +++ b/crates/ty/docs/rules.md @@ -39,7 +39,7 @@ def test(): -> "int": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -63,7 +63,7 @@ Calling a non-callable object will raise a `TypeError` at runtime. Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -95,7 +95,7 @@ f(int) # error Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -126,7 +126,7 @@ a = 1 Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -158,7 +158,7 @@ class C(A, B): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -190,7 +190,7 @@ class B(A): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -217,7 +217,7 @@ class B(A, A): ... Default level: error · Added in 0.0.1-alpha.12 · Related issues · -View source +View source @@ -329,7 +329,7 @@ def test(): -> "Literal[5]": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -359,7 +359,7 @@ class C(A, B): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -385,7 +385,7 @@ t[3] # IndexError: tuple index out of range Default level: error · Added in 0.0.1-alpha.12 · Related issues · -View source +View source @@ -474,7 +474,7 @@ an atypical memory layout. Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -501,7 +501,7 @@ func("foo") # error: [invalid-argument-type] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -529,7 +529,7 @@ a: int = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -563,7 +563,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable Default level: error · Added in 0.0.1-alpha.19 · Related issues · -View source +View source @@ -599,7 +599,7 @@ asyncio.run(main()) Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -623,7 +623,7 @@ class A(42): ... # error: [invalid-base] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -650,7 +650,7 @@ with 1: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -679,7 +679,7 @@ a: str Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -723,7 +723,7 @@ except ZeroDivisionError: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -756,17 +756,21 @@ class C[U](Generic[T]): ... Default level: error · Added in 0.0.1-alpha.17 · Related issues · -View source +View source **What it does** -Checks for subscript accesses with invalid keys. +Checks for subscript accesses with invalid keys and `TypedDict` construction with an +unknown key. **Why is this bad?** -Using an invalid key will raise a `KeyError` at runtime. +Subscripting with an invalid key will raise a `KeyError` at runtime. + +Creating a `TypedDict` with an unknown key is likely a mistake; if the `TypedDict` is +`closed=true` it also violates the expectations of the type. **Examples** @@ -779,6 +783,10 @@ class Person(TypedDict): alice = Person(name="Alice", age=30) alice["height"] # KeyError: 'height' + +bob: Person = { "name": "Bob", "age": 30 } # typo! + +carol = Person(name="Carol", age=25) # typo! ``` ## `invalid-legacy-type-variable` @@ -787,7 +795,7 @@ alice["height"] # KeyError: 'height' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -822,7 +830,7 @@ def f(t: TypeVar("U")): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -856,7 +864,7 @@ class B(metaclass=f): ... Default level: error · Added in 0.0.1-alpha.19 · Related issues · -View source +View source @@ -882,13 +890,43 @@ in a class's bases list. TypeError: can only inherit from a NamedTuple type and Generic ``` +## `invalid-newtype` + + +Default level: error · +Preview (since 1.0.0) · +Related issues · +View source + + + +**What it does** + +Checks for the creation of invalid `NewType`s + +**Why is this bad?** + +There are several requirements that you must follow when creating a `NewType`. + +**Examples** + +```python +from typing import NewType + +def get_name() -> str: ... + +Foo = NewType("Foo", int) # okay +Bar = NewType(get_name(), int) # error: The first argument to `NewType` must be a string literal +Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType` +``` + ## `invalid-overload` Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -938,7 +976,7 @@ def foo(x: int) -> int: ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -958,13 +996,44 @@ weakens a type checker's ability to accurately reason about your code. def f(a: int = ''): ... ``` +## `invalid-paramspec` + + +Default level: error · +Added in 0.0.1-alpha.1 · +Related issues · +View source + + + +**What it does** + +Checks for the creation of invalid `ParamSpec`s + +**Why is this bad?** + +There are several requirements that you must follow when creating a `ParamSpec`. + +**Examples** + +```python +from typing import ParamSpec + +P1 = ParamSpec("P1") # okay +P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assigned to +``` + +**References** + +- [Typing spec: ParamSpec](https://typing.python.org/en/latest/spec/generics.html#paramspec) + ## `invalid-protocol` Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -998,7 +1067,7 @@ TypeError: Protocols can only inherit from other protocols, got Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1047,7 +1116,7 @@ def g(): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1072,7 +1141,7 @@ def func() -> int: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1130,7 +1199,7 @@ TODO #14889 Default level: error · Added in 0.0.1-alpha.6 · Related issues · -View source +View source @@ -1157,7 +1226,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1187,7 +1256,7 @@ TYPE_CHECKING = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1217,7 +1286,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1251,7 +1320,7 @@ f(10) # Error Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1285,7 +1354,7 @@ class C: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1320,7 +1389,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1345,7 +1414,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x' Default level: error · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -1378,7 +1447,7 @@ alice["age"] # KeyError Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1407,7 +1476,7 @@ func("string") # error: [no-matching-overload] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1431,7 +1500,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1457,7 +1526,7 @@ for i in 34: # TypeError: 'int' object is not iterable Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1484,7 +1553,7 @@ f(1, x=2) # Error raised here Default level: error · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -1542,7 +1611,7 @@ def test(): -> "int": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1572,7 +1641,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1601,7 +1670,7 @@ class B(A): ... # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1628,7 +1697,7 @@ f("foo") # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1656,7 +1725,7 @@ def _(x: int): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1702,7 +1771,7 @@ class A: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1729,7 +1798,7 @@ f(x=1, y=2) # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1757,7 +1826,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1782,7 +1851,7 @@ import foo # ModuleNotFoundError: No module named 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1807,7 +1876,7 @@ print(x) # NameError: name 'x' is not defined Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1844,7 +1913,7 @@ b1 < b2 < b1 # exception raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1872,7 +1941,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1897,7 +1966,7 @@ l[1:10:0] # ValueError: slice step cannot be zero Default level: warn · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -1938,7 +2007,7 @@ class SubProto(BaseProto, Protocol): Default level: warn · Added in 0.0.1-alpha.16 · Related issues · -View source +View source @@ -2026,7 +2095,7 @@ a = 20 / 0 # type: ignore Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2054,7 +2123,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c' Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2086,7 +2155,7 @@ A()[0] # TypeError: 'A' object is not subscriptable Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2118,7 +2187,7 @@ from module import a # ImportError: cannot import name 'a' from 'module' Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2145,7 +2214,7 @@ cast(int, f()) # Redundant Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2169,7 +2238,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined Default level: warn · Added in 0.0.1-alpha.15 · Related issues · -View source +View source @@ -2227,7 +2296,7 @@ def g(): Default level: warn · Added in 0.0.1-alpha.7 · Related issues · -View source +View source @@ -2266,7 +2335,7 @@ class D(C): ... # error: [unsupported-base] Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2329,7 +2398,7 @@ def foo(x: int | str) -> int | str: Default level: ignore · Preview (since 0.0.1-alpha.1) · Related issues · -View source +View source @@ -2353,7 +2422,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime. Default level: ignore · Added in 0.0.1-alpha.1 · Related issues · -View source +View source diff --git a/crates/ty/src/args.rs b/crates/ty/src/args.rs index f6a52a3c8c..ac334f37bf 100644 --- a/crates/ty/src/args.rs +++ b/crates/ty/src/args.rs @@ -1,5 +1,7 @@ use crate::logging::Verbosity; use crate::python_version::PythonVersion; +use clap::builder::Styles; +use clap::builder::styling::{AnsiColor, Effects}; use clap::error::ErrorKind; use clap::{ArgAction, ArgMatches, Error, Parser}; use ruff_db::system::SystemPathBuf; @@ -8,9 +10,17 @@ use ty_project::metadata::options::{EnvironmentOptions, Options, SrcOptions, Ter use ty_project::metadata::value::{RangedValue, RelativeGlobPattern, RelativePathBuf, ValueSource}; use ty_python_semantic::lint; +// Configures Clap v3-style help menu colors +const STYLES: Styles = Styles::styled() + .header(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .usage(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD)) + .placeholder(AnsiColor::Cyan.on_default()); + #[derive(Debug, Parser)] #[command(author, name = "ty", about = "An extremely fast Python type checker.")] #[command(long_version = crate::version::version())] +#[command(styles = STYLES)] pub struct Cli { #[command(subcommand)] pub(crate) command: Command, diff --git a/crates/ty/src/lib.rs b/crates/ty/src/lib.rs index 2b28329f3f..9c667b0c82 100644 --- a/crates/ty/src/lib.rs +++ b/crates/ty/src/lib.rs @@ -450,12 +450,12 @@ impl ty_project::ProgressReporter for IndicatifReporter { self.bar.set_draw_target(self.printer.progress_target()); } - fn report_checked_file(&self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]) { + fn report_checked_file(&self, db: &ProjectDatabase, file: File, diagnostics: &[Diagnostic]) { self.collector.report_checked_file(db, file, diagnostics); self.bar.inc(1); } - fn report_diagnostics(&mut self, db: &dyn Db, diagnostics: Vec) { + fn report_diagnostics(&mut self, db: &ProjectDatabase, diagnostics: Vec) { self.collector.report_diagnostics(db, diagnostics); } } diff --git a/crates/ty/tests/cli/file_selection.rs b/crates/ty/tests/cli/file_selection.rs index 5668e5829d..46f9106c21 100644 --- a/crates/ty/tests/cli/file_selection.rs +++ b/crates/ty/tests/cli/file_selection.rs @@ -589,6 +589,81 @@ fn explicit_path_overrides_exclude() -> anyhow::Result<()> { Ok(()) } +#[test] +fn cli_and_configuration_exclude() -> anyhow::Result<()> { + let case = CliTest::with_files([ + ( + "src/main.py", + r#" + print(undefined_var) # error: unresolved-reference + "#, + ), + ( + "tests/generated.py", + r#" + print(dist_undefined_var) # error: unresolved-reference + "#, + ), + ( + "my_dist/other.py", + r#" + print(other_undefined_var) # error: unresolved-reference + "#, + ), + ( + "ty.toml", + r#" + [src] + exclude = ["tests/"] + "#, + ), + ])?; + + assert_cmd_snapshot!(case.command(), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-reference]: Name `other_undefined_var` used when not defined + --> my_dist/other.py:2:7 + | + 2 | print(other_undefined_var) # error: unresolved-reference + | ^^^^^^^^^^^^^^^^^^^ + | + info: rule `unresolved-reference` is enabled by default + + error[unresolved-reference]: Name `undefined_var` used when not defined + --> src/main.py:2:7 + | + 2 | print(undefined_var) # error: unresolved-reference + | ^^^^^^^^^^^^^ + | + info: rule `unresolved-reference` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command().arg("--exclude").arg("my_dist/"), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-reference]: Name `undefined_var` used when not defined + --> src/main.py:2:7 + | + 2 | print(undefined_var) # error: unresolved-reference + | ^^^^^^^^^^^^^ + | + info: rule `unresolved-reference` is enabled by default + + Found 1 diagnostic + + ----- stderr ----- + "); + + Ok(()) +} + #[test] fn invalid_include_pattern() -> anyhow::Result<()> { let case = CliTest::with_files([ diff --git a/crates/ty/tests/cli/main.rs b/crates/ty/tests/cli/main.rs index e911e300c0..446ba86611 100644 --- a/crates/ty/tests/cli/main.rs +++ b/crates/ty/tests/cli/main.rs @@ -5,6 +5,7 @@ mod python_environment; mod rule_selection; use anyhow::Context as _; +use insta::Settings; use insta::internals::SettingsBindDropGuard; use insta_cmd::{assert_cmd_snapshot, get_cargo_bin}; use std::{ @@ -760,8 +761,10 @@ fn can_handle_large_binop_expressions() -> anyhow::Result<()> { pub(crate) struct CliTest { _temp_dir: TempDir, - _settings_scope: SettingsBindDropGuard, + settings: Settings, + settings_scope: Option, project_dir: PathBuf, + ty_binary_path: PathBuf, } impl CliTest { @@ -794,7 +797,9 @@ impl CliTest { Ok(Self { project_dir, _temp_dir: temp_dir, - _settings_scope: settings_scope, + settings, + settings_scope: Some(settings_scope), + ty_binary_path: get_cargo_bin("ty"), }) } @@ -823,6 +828,30 @@ impl CliTest { Ok(()) } + /// Return [`Self`] with the ty binary copied to the specified path instead. + pub(crate) fn with_ty_at(mut self, dest_path: impl AsRef) -> anyhow::Result { + let dest_path = dest_path.as_ref(); + let dest_path = self.project_dir.join(dest_path); + + Self::ensure_parent_directory(&dest_path)?; + std::fs::copy(&self.ty_binary_path, &dest_path) + .with_context(|| format!("Failed to copy ty binary to `{}`", dest_path.display()))?; + + self.ty_binary_path = dest_path; + Ok(self) + } + + /// Add a filter to the settings and rebind them. + pub(crate) fn with_filter(mut self, pattern: &str, replacement: &str) -> Self { + self.settings.add_filter(pattern, replacement); + // Drop the old scope before binding a new one, otherwise the old scope is dropped _after_ + // binding and assigning the new one, restoring the settings to their state before the old + // scope was bound. + drop(self.settings_scope.take()); + self.settings_scope = Some(self.settings.bind_to_scope()); + self + } + fn ensure_parent_directory(path: &Path) -> anyhow::Result<()> { if let Some(parent) = path.parent() { std::fs::create_dir_all(parent) @@ -868,7 +897,7 @@ impl CliTest { } pub(crate) fn command(&self) -> Command { - let mut command = Command::new(get_cargo_bin("ty")); + let mut command = Command::new(&self.ty_binary_path); command.current_dir(&self.project_dir).arg("check"); // Unset all environment variables because they can affect test behavior. @@ -881,3 +910,11 @@ impl CliTest { fn tempdir_filter(path: &Path) -> String { format!(r"{}\\?/?", regex::escape(path.to_str().unwrap())) } + +fn site_packages_filter(python_version: &str) -> String { + if cfg!(windows) { + "Lib/site-packages".to_string() + } else { + format!("lib/python{}/site-packages", regex::escape(python_version)) + } +} diff --git a/crates/ty/tests/cli/python_environment.rs b/crates/ty/tests/cli/python_environment.rs index 04fa8be88f..638fc6c4ca 100644 --- a/crates/ty/tests/cli/python_environment.rs +++ b/crates/ty/tests/cli/python_environment.rs @@ -1,7 +1,7 @@ use insta_cmd::assert_cmd_snapshot; use ruff_python_ast::PythonVersion; -use crate::CliTest; +use crate::{CliTest, site_packages_filter}; /// Specifying an option on the CLI should take precedence over the same setting in the /// project's configuration. Here, this is tested for the Python version. @@ -323,6 +323,231 @@ fn python_version_inferred_from_system_installation() -> anyhow::Result<()> { Ok(()) } +/// This attempts to simulate the tangled web of symlinks that a homebrew install has +/// which can easily confuse us if we're ever told to use it. +/// +/// The main thing this is regression-testing is a panic in one *extremely* specific case +/// that you have to try really hard to hit (but vscode, hilariously, did hit). +#[cfg(unix)] +#[test] +fn python_argument_trapped_in_a_symlink_factory() -> anyhow::Result<()> { + let case = CliTest::with_files([ + // This is the real python binary. + ( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13", + "", + ), + // There's a real site-packages here (although it's basically empty). + ( + "opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages/foo.py", + "", + ), + // There's also a real site-packages here (although it's basically empty). + ("opt/homebrew/lib/python3.13/site-packages/bar.py", ""), + // This has the real stdlib, but the site-packages in this dir is a symlink. + ( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/abc.py", + "", + ), + // It's important that this our faux-homebrew not be in the same dir as our working directory + // to reproduce the crash, don't ask me why. + ( + "project/test.py", + "\ +import foo +import bar +import colorama +", + ), + ])?; + + // many python symlinks pointing to a single real python (the longest path) + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13", + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3", + )?; + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3", + "opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3", + )?; + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3", + "opt/homebrew/bin/python3", + )?; + // the "real" python's site-packages is a symlink to a different dir + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages", + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages", + )?; + + // Try all 4 pythons with absolute paths to our fauxbrew install + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/bin/python3")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `foo` + --> test.py:1:8 + | + 1 | import foo + | ^^^ + 2 | import bar + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `bar` + --> test.py:2:8 + | + 1 | import foo + 2 | import bar + | ^^^ + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `bar` + --> test.py:2:8 + | + 1 | import foo + 2 | import bar + | ^^^ + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `bar` + --> test.py:2:8 + | + 1 | import foo + 2 | import bar + | ^^^ + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + Ok(()) +} + /// On Unix systems, it's common for a Python installation at `.venv/bin/python` to only be a symlink /// to a system Python installation. We must be careful not to resolve the symlink too soon! /// If we do, we will incorrectly add the system installation's `site-packages` as a search path, @@ -1654,6 +1879,278 @@ home = ./ Ok(()) } +/// ty should include site packages from its own environment when no other environment is found. +#[test] +fn ty_environment_is_only_environment() -> anyhow::Result<()> { + let ty_venv_site_packages = if cfg!(windows) { + "ty-venv/Lib/site-packages" + } else { + "ty-venv/lib/python3.13/site-packages" + }; + + let ty_executable_path = if cfg!(windows) { + "ty-venv/Scripts/ty.exe" + } else { + "ty-venv/bin/ty" + }; + + let ty_package_path = format!("{ty_venv_site_packages}/ty_package/__init__.py"); + + let case = CliTest::with_files([ + (ty_package_path.as_str(), "class TyEnvClass: ..."), + ( + "ty-venv/pyvenv.cfg", + r" + home = ./ + version = 3.13 + ", + ), + ( + "test.py", + r" + from ty_package import TyEnvClass + ", + ), + ])?; + + let case = case.with_ty_at(ty_executable_path)?; + assert_cmd_snapshot!(case.command(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + All checks passed! + + ----- stderr ----- + "###); + + Ok(()) +} + +/// ty should include site packages from both its own environment and a local `.venv`. The packages +/// from ty's environment should take precedence. +#[test] +fn ty_environment_and_discovered_venv() -> anyhow::Result<()> { + let ty_venv_site_packages = if cfg!(windows) { + "ty-venv/Lib/site-packages" + } else { + "ty-venv/lib/python3.13/site-packages" + }; + + let ty_executable_path = if cfg!(windows) { + "ty-venv/Scripts/ty.exe" + } else { + "ty-venv/bin/ty" + }; + + let local_venv_site_packages = if cfg!(windows) { + ".venv/Lib/site-packages" + } else { + ".venv/lib/python3.13/site-packages" + }; + + let ty_unique_package = format!("{ty_venv_site_packages}/ty_package/__init__.py"); + let local_unique_package = format!("{local_venv_site_packages}/local_package/__init__.py"); + let ty_conflicting_package = format!("{ty_venv_site_packages}/shared_package/__init__.py"); + let local_conflicting_package = + format!("{local_venv_site_packages}/shared_package/__init__.py"); + + let case = CliTest::with_files([ + (ty_unique_package.as_str(), "class TyEnvClass: ..."), + (local_unique_package.as_str(), "class LocalClass: ..."), + (ty_conflicting_package.as_str(), "class FromTyEnv: ..."), + ( + local_conflicting_package.as_str(), + "class FromLocalVenv: ...", + ), + ( + "ty-venv/pyvenv.cfg", + r" + home = ./ + version = 3.13 + ", + ), + ( + ".venv/pyvenv.cfg", + r" + home = ./ + version = 3.13 + ", + ), + ( + "test.py", + r" + # Should resolve from ty's environment + from ty_package import TyEnvClass + # Should resolve from local .venv + from local_package import LocalClass + # Should resolve from ty's environment (takes precedence) + from shared_package import FromTyEnv + # Should NOT resolve (shadowed by ty's environment version) + from shared_package import FromLocalVenv + ", + ), + ])? + .with_ty_at(ty_executable_path)?; + + assert_cmd_snapshot!(case.command(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Module `shared_package` has no member `FromLocalVenv` + --> test.py:9:28 + | + 7 | from shared_package import FromTyEnv + 8 | # Should NOT resolve (shadowed by ty's environment version) + 9 | from shared_package import FromLocalVenv + | ^^^^^^^^^^^^^ + | + info: rule `unresolved-import` is enabled by default + + Found 1 diagnostic + + ----- stderr ----- + "###); + + Ok(()) +} + +/// When `VIRTUAL_ENV` is set, ty should *not* discover its own environment's site-packages. +#[test] +fn ty_environment_and_active_environment() -> anyhow::Result<()> { + let ty_venv_site_packages = if cfg!(windows) { + "ty-venv/Lib/site-packages" + } else { + "ty-venv/lib/python3.13/site-packages" + }; + + let ty_executable_path = if cfg!(windows) { + "ty-venv/Scripts/ty.exe" + } else { + "ty-venv/bin/ty" + }; + + let active_venv_site_packages = if cfg!(windows) { + "active-venv/Lib/site-packages" + } else { + "active-venv/lib/python3.13/site-packages" + }; + + let ty_package_path = format!("{ty_venv_site_packages}/ty_package/__init__.py"); + let active_package_path = format!("{active_venv_site_packages}/active_package/__init__.py"); + + let case = CliTest::with_files([ + (ty_package_path.as_str(), "class TyEnvClass: ..."), + ( + "ty-venv/pyvenv.cfg", + r" + home = ./ + version = 3.13 + ", + ), + (active_package_path.as_str(), "class ActiveClass: ..."), + ( + "active-venv/pyvenv.cfg", + r" + home = ./ + version = 3.13 + ", + ), + ( + "test.py", + r" + from ty_package import TyEnvClass + from active_package import ActiveClass + ", + ), + ])? + .with_ty_at(ty_executable_path)? + .with_filter(&site_packages_filter("3.13"), ""); + + assert_cmd_snapshot!( + case.command() + .env("VIRTUAL_ENV", case.root().join("active-venv")), + @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `ty_package` + --> test.py:2:6 + | + 2 | from ty_package import TyEnvClass + | ^^^^^^^^^^ + 3 | from active_package import ActiveClass + | + info: Searched in the following paths during module resolution: + info: 1. / (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /active-venv/ (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 1 diagnostic + + ----- stderr ----- + " + ); + + Ok(()) +} + +/// When ty is installed in a system environment rather than a virtual environment, it should +/// not include the environment's site-packages in its search path. +#[test] +fn ty_environment_is_system_not_virtual() -> anyhow::Result<()> { + let ty_system_site_packages = if cfg!(windows) { + "system-python/Lib/site-packages" + } else { + "system-python/lib/python3.13/site-packages" + }; + + let ty_executable_path = if cfg!(windows) { + "system-python/Scripts/ty.exe" + } else { + "system-python/bin/ty" + }; + + let ty_package_path = format!("{ty_system_site_packages}/system_package/__init__.py"); + + let case = CliTest::with_files([ + // Package in system Python installation (should NOT be discovered) + (ty_package_path.as_str(), "class SystemClass: ..."), + // Note: NO pyvenv.cfg - this is a system installation, not a venv + ( + "test.py", + r" + from system_package import SystemClass + ", + ), + ])? + .with_ty_at(ty_executable_path)?; + + assert_cmd_snapshot!(case.command(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `system_package` + --> test.py:2:6 + | + 2 | from system_package import SystemClass + | ^^^^^^^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. / (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 1 diagnostic + + ----- stderr ----- + "###); + + Ok(()) +} + #[test] fn src_root_deprecation_warning() -> anyhow::Result<()> { let case = CliTest::with_files([ diff --git a/crates/ty_completion_eval/completion-evaluation-tasks.csv b/crates/ty_completion_eval/completion-evaluation-tasks.csv index 01b8ca4373..93b6cd4736 100644 --- a/crates/ty_completion_eval/completion-evaluation-tasks.csv +++ b/crates/ty_completion_eval/completion-evaluation-tasks.csv @@ -1,5 +1,5 @@ name,file,index,rank -auto-import-skips-current-module,main.py,0,4 +auto-import-skips-current-module,main.py,0,1 fstring-completions,main.py,0,1 higher-level-symbols-preferred,main.py,0, higher-level-symbols-preferred,main.py,1,1 @@ -10,17 +10,19 @@ import-deprioritizes-type_check_only,main.py,1,1 import-deprioritizes-type_check_only,main.py,2,1 import-deprioritizes-type_check_only,main.py,3,2 import-deprioritizes-type_check_only,main.py,4,3 -internal-typeshed-hidden,main.py,0,4 -none-completion,main.py,0,11 +import-keyword-completion,main.py,0,1 +internal-typeshed-hidden,main.py,0,5 +none-completion,main.py,0,2 numpy-array,main.py,0, numpy-array,main.py,1,1 object-attr-instance-methods,main.py,0,1 object-attr-instance-methods,main.py,1,1 +pass-keyword-completion,main.py,0,1 raise-uses-base-exception,main.py,0,2 -scope-existing-over-new-import,main.py,0,474 +scope-existing-over-new-import,main.py,0,1 scope-prioritize-closer,main.py,0,2 scope-simple-long-identifier,main.py,0,1 tstring-completions,main.py,0,1 ty-extensions-lower-stdlib,main.py,0,8 type-var-typing-over-ast,main.py,0,3 -type-var-typing-over-ast,main.py,1,270 +type-var-typing-over-ast,main.py,1,279 diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/completion.toml b/crates/ty_completion_eval/truth/import-keyword-completion/completion.toml new file mode 100644 index 0000000000..1c3c4b8ea4 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/completion.toml @@ -0,0 +1,2 @@ +[settings] +auto-import = false diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/main.py b/crates/ty_completion_eval/truth/import-keyword-completion/main.py new file mode 100644 index 0000000000..beb6aff8e4 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/main.py @@ -0,0 +1 @@ +from collections im diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/pyproject.toml b/crates/ty_completion_eval/truth/import-keyword-completion/pyproject.toml new file mode 100644 index 0000000000..cd277d8097 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/pyproject.toml @@ -0,0 +1,5 @@ +[project] +name = "test" +version = "0.1.0" +requires-python = ">=3.13" +dependencies = [] diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/uv.lock b/crates/ty_completion_eval/truth/import-keyword-completion/uv.lock new file mode 100644 index 0000000000..a4937d10d3 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/uv.lock @@ -0,0 +1,8 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "test" +version = "0.1.0" +source = { virtual = "." } diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/completion.toml b/crates/ty_completion_eval/truth/pass-keyword-completion/completion.toml new file mode 100644 index 0000000000..1c3c4b8ea4 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/completion.toml @@ -0,0 +1,2 @@ +[settings] +auto-import = false diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/main.py b/crates/ty_completion_eval/truth/pass-keyword-completion/main.py new file mode 100644 index 0000000000..48bd0f44a3 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/main.py @@ -0,0 +1,3 @@ +match x: + case int(): + pa diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/pyproject.toml b/crates/ty_completion_eval/truth/pass-keyword-completion/pyproject.toml new file mode 100644 index 0000000000..cd277d8097 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/pyproject.toml @@ -0,0 +1,5 @@ +[project] +name = "test" +version = "0.1.0" +requires-python = ">=3.13" +dependencies = [] diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/uv.lock b/crates/ty_completion_eval/truth/pass-keyword-completion/uv.lock new file mode 100644 index 0000000000..a4937d10d3 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/uv.lock @@ -0,0 +1,8 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "test" +version = "0.1.0" +source = { virtual = "." } diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 99debdf4d9..8c90597c74 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -127,7 +127,8 @@ impl<'db> Completion<'db> { Type::NominalInstance(_) | Type::PropertyInstance(_) | Type::BoundSuper(_) - | Type::TypedDict(_) => CompletionKind::Struct, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => CompletionKind::Struct, Type::IntLiteral(_) | Type::BooleanLiteral(_) | Type::TypeIs(_) @@ -160,6 +161,34 @@ impl<'db> Completion<'db> { .and_then(|ty| imp(db, ty, &CompletionKindVisitor::default())) }) } + + fn keyword(name: &str) -> Self { + Completion { + name: name.into(), + insert: None, + ty: None, + kind: Some(CompletionKind::Keyword), + module_name: None, + import: None, + builtin: false, + is_type_check_only: false, + documentation: None, + } + } + + fn value_keyword(name: &str, ty: Type<'db>) -> Completion<'db> { + Completion { + name: name.into(), + insert: None, + ty: Some(ty), + kind: Some(CompletionKind::Keyword), + module_name: None, + import: None, + builtin: true, + is_type_check_only: false, + documentation: None, + } + } } /// The "kind" of a completion. @@ -212,11 +241,16 @@ pub fn completion<'db>( offset: TextSize, ) -> Vec> { let parsed = parsed_module(db, file).load(db); - if is_in_comment(&parsed, offset) || is_in_string(&parsed, offset) { + let tokens = tokens_start_before(parsed.tokens(), offset); + let typed = find_typed_text(db, file, &parsed, offset); + + if is_in_no_completions_place(db, tokens, file) { return vec![]; } + if let Some(completions) = only_keyword_completion(tokens, typed.as_deref()) { + return vec![completions]; + } - let typed = find_typed_text(db, file, &parsed, offset); let typed_query = typed .as_deref() .map(QueryPattern::new) @@ -255,7 +289,7 @@ pub fn completion<'db>( .collect(); if scoped.is_some() { - add_keyword_value_completions(db, &typed_query, &mut completions); + add_keyword_completions(db, &typed_query, &mut completions); } if settings.auto_import { if let Some(scoped) = scoped { @@ -274,36 +308,57 @@ pub fn completion<'db>( completions } -/// Adds a subset of completions derived from keywords. +/// Adds completions derived from keywords. /// -/// Note that at present, these should only be added to "scoped" -/// completions. i.e., This will include `None`, `True`, `False`, etc. -fn add_keyword_value_completions<'db>( +/// This should generally only be used when offering "scoped" completions. +/// This will include keywords corresponding to Python values (like `None`) +/// and general language keywords (like `raise`). +fn add_keyword_completions<'db>( db: &'db dyn Db, query: &QueryPattern, completions: &mut Vec>, ) { - let keywords = [ + let keyword_values = [ ("None", Type::none(db)), ("True", Type::BooleanLiteral(true)), ("False", Type::BooleanLiteral(false)), ]; - for (name, ty) in keywords { + for (name, ty) in keyword_values { if !query.is_match_symbol_name(name) { continue; } - completions.push(Completion { - name: ast::name::Name::new(name), - insert: None, - ty: Some(ty), - kind: None, - module_name: None, - import: None, - is_type_check_only: false, - builtin: true, - documentation: None, - }); + completions.push(Completion::value_keyword(name, ty)); } + + // Note that we specifically omit the `type` keyword here, since + // it will be included via `builtins`. This does make its sorting + // priority slighty different than other keywords, but it's not + // clear (to me, AG) if that's an issue or not. Since the builtin + // completion has an actual type associated with it, we use that + // instead of a keyword completion. + let keywords = [ + "and", "as", "assert", "async", "await", "break", "class", "continue", "def", "del", + "elif", "else", "except", "finally", "for", "from", "global", "if", "import", "in", "is", + "lambda", "nonlocal", "not", "or", "pass", "raise", "return", "try", "while", "with", + "yield", "case", "match", + ]; + for name in keywords { + if !query.is_match_symbol_name(name) { + continue; + } + completions.push(Completion::keyword(name)); + } +} + +/// When the tokens indicate that the last token should be precisely one +/// possible keyword, we provide a single completion for it. +/// +/// `typed` should be the text that we think the user has typed so far. +fn only_keyword_completion<'db>(tokens: &[Token], typed: Option<&str>) -> Option> { + if is_import_from_incomplete(tokens, typed) { + return Some(Completion::keyword("import")); + } + None } /// Adds completions not in scope. @@ -798,6 +853,67 @@ fn import_tokens(tokens: &[Token]) -> Option<(&Token, &Token)> { None } +/// Looks for the start of a `from module ` statement. +/// +/// If found, `true` is returned. +/// +/// `typed` should be the text that we think the user has typed so far. +fn is_import_from_incomplete(tokens: &[Token], typed: Option<&str>) -> bool { + // N.B. The implementation here is very similar to + // `from_import_tokens`. The main difference is that + // we're just looking for whether we should suggest + // the `import` keyword. So this is a little simpler. + + use TokenKind as TK; + + const LIMIT: usize = 1_000; + + /// A state used to "parse" the tokens preceding the user's cursor, + /// in reverse, to detect a "from import" statement. + enum S { + Start, + ImportKeyword, + ModulePossiblyDotted, + ModuleOnlyDotted, + } + + let mut state = S::Start; + if typed.is_none() { + state = S::ImportKeyword; + } + // Move backward through the tokens until we get to + // the `from` token. + for token in tokens.iter().rev().take(LIMIT) { + state = match (state, token.kind()) { + // Match an incomplete `import` keyword. + // + // It's okay to pop off a newline token here initially, + // since it may occur before the user starts typing + // `import` but after the module name. + (S::Start, TK::Newline | TK::Name | TK::Import) => S::ImportKeyword, + // We are a bit more careful with how we parse the module + // here than in `from_import_tokens`. In particular, we + // want to make sure we don't incorrectly suggest `import` + // for `from os.i`. If we aren't careful, then + // `i` could be considered an incomplete `import` keyword + // and `os.` is the module. But of course, ending with a + // `.` (unless the entire module is dots) is invalid. + (S::ImportKeyword, TK::Dot | TK::Ellipsis) => S::ModuleOnlyDotted, + (S::ImportKeyword, TK::Name | TK::Case | TK::Match | TK::Type | TK::Unknown) => { + S::ModulePossiblyDotted + } + (S::ModuleOnlyDotted, TK::Dot | TK::Ellipsis) => S::ModuleOnlyDotted, + ( + S::ModulePossiblyDotted, + TK::Name | TK::Dot | TK::Ellipsis | TK::Case | TK::Match | TK::Type | TK::Unknown, + ) => S::ModulePossiblyDotted, + (S::ModulePossiblyDotted | S::ModuleOnlyDotted, TK::From) => return true, + _ => return false, + }; + } + false +} + /// Looks for the text typed immediately before the cursor offset /// given. /// @@ -812,7 +928,10 @@ fn find_typed_text( let source = source_text(db, file); let tokens = tokens_start_before(parsed.tokens(), offset); let last = tokens.last()?; - if !matches!(last.kind(), TokenKind::Name) { + // It's odd to include `TokenKind::Import` here, but it + // indicates that the user has typed `import`. This is + // useful to know in some contexts. + if !matches!(last.kind(), TokenKind::Name | TokenKind::Import) { return None; } // This one's weird, but if the cursor is beyond @@ -827,20 +946,21 @@ fn find_typed_text( Some(source[last.range()].to_string()) } -/// Whether the given offset within the parsed module is within -/// a comment or not. -fn is_in_comment(parsed: &ParsedModuleRef, offset: TextSize) -> bool { - let tokens = tokens_start_before(parsed.tokens(), offset); +/// Whether the last token is in a place where we should not provide completions. +fn is_in_no_completions_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { + is_in_comment(tokens) || is_in_string(tokens) || is_in_definition_place(db, tokens, file) +} + +/// Whether the last token is within a comment or not. +fn is_in_comment(tokens: &[Token]) -> bool { tokens.last().is_some_and(|t| t.kind().is_comment()) } -/// Returns true when the cursor at `offset` is positioned within -/// a string token (regular, f-string, t-string, etc). +/// Whether the last token is positioned within a string token (regular, f-string, t-string, etc). /// -/// Note that this will return `false` when positioned within an +/// Note that this will return `false` when the last token is positioned within an /// interpolation block in an f-string or a t-string. -fn is_in_string(parsed: &ParsedModuleRef, offset: TextSize) -> bool { - let tokens = tokens_start_before(parsed.tokens(), offset); +fn is_in_string(tokens: &[Token]) -> bool { tokens.last().is_some_and(|t| { matches!( t.kind(), @@ -849,6 +969,29 @@ fn is_in_string(parsed: &ParsedModuleRef, offset: TextSize) -> bool { }) } +/// Returns true when the tokens indicate that the definition of a new name is being introduced at the end. +fn is_in_definition_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { + let is_definition_keyword = |token: &Token| { + if matches!( + token.kind(), + TokenKind::Def | TokenKind::Class | TokenKind::Type + ) { + true + } else if token.kind() == TokenKind::Name { + let source = source_text(db, file); + &source[token.range()] == "type" + } else { + false + } + }; + + tokens + .len() + .checked_sub(2) + .and_then(|i| tokens.get(i)) + .is_some_and(is_definition_keyword) +} + /// Order completions according to the following rules: /// /// 1) Names with no underscore prefix @@ -861,9 +1004,26 @@ fn is_in_string(parsed: &ParsedModuleRef, offset: TextSize) -> bool { /// This has the effect of putting all dunder attributes after "normal" /// attributes, and all single-underscore attributes after dunder attributes. fn compare_suggestions(c1: &Completion, c2: &Completion) -> Ordering { - let (kind1, kind2) = (NameKind::classify(&c1.name), NameKind::classify(&c2.name)); + fn key<'a>(completion: &'a Completion) -> (bool, bool, bool, NameKind, bool, &'a Name) { + ( + completion.module_name.is_some(), + // At time of writing (2025-11-11), keyword completions + // are classified as builtins, which makes them sort after + // everything else. But we probably want keyword completions + // to sort *before* anything else since they are so common. + // Moreover, it seems VS Code forcefully does this sorting. + // By doing it ourselves, we make our natural sorting match + // VS Code's, and thus our completion evaluation framework + // should be more representative of real world conditions. + completion.kind != Some(CompletionKind::Keyword), + completion.builtin, + NameKind::classify(&completion.name), + completion.is_type_check_only, + &completion.name, + ) + } - (kind1, c1.is_type_check_only, &c1.name).cmp(&(kind2, c2.is_type_check_only, &c2.name)) + key(c1).cmp(&key(c2)) } #[cfg(test)] @@ -952,7 +1112,7 @@ mod tests { ); assert_snapshot!( - test.skip_builtins().build().snapshot(), + test.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -974,6 +1134,73 @@ mod tests { // See `private_symbols_in_stub` for more comprehensive testing private of symbol filtering. } + #[test] + fn keywords() { + let test = completion_test_builder( + "\ + +", + ); + + assert_snapshot!( + test.skip_builtins().build().snapshot(), + @r" + and + as + assert + async + await + break + case + class + continue + def + del + elif + else + except + finally + for + from + global + if + import + in + is + lambda + match + nonlocal + not + or + pass + raise + return + try + while + with + yield + ", + ); + } + + #[test] + fn type_keyword_dedup() { + let test = completion_test_builder( + "\ +type +", + ); + + assert_snapshot!( + test.type_signatures().build().snapshot(), + @r" + TypeError :: + type :: + _NotImplementedType :: + ", + ); + } + #[test] fn builtins_not_included_object_attr() { let builder = completion_test_builder( @@ -1006,7 +1233,7 @@ import re ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"re"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"re"); } #[test] @@ -1019,7 +1246,7 @@ from os import path ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"path"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"path"); } // N.B. We don't currently explore module APIs. This @@ -1161,7 +1388,7 @@ f ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1175,7 +1402,7 @@ g ); assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1190,7 +1417,7 @@ def foo(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1206,7 +1433,7 @@ f ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1220,7 +1447,7 @@ def foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1235,7 +1462,7 @@ def foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo "); @@ -1269,7 +1496,7 @@ def foo(): // matches the current cursor's indentation. This seems fraught // however. It's not clear to me that we can always assume a // correspondence between scopes and indentation level. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1285,7 +1512,7 @@ def foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo "); @@ -1301,7 +1528,7 @@ def foo(): f", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo "); @@ -1319,7 +1546,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo frob @@ -1338,7 +1565,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1356,7 +1583,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo foofoofoo @@ -1384,7 +1611,7 @@ def foo(): // account for the indented whitespace, or some other technique // needs to be used to get the scope containing `foofoo` but not // `foofoofoo`. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1400,7 +1627,7 @@ def foo(): ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1418,7 +1645,7 @@ def frob(): ... ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1438,7 +1665,7 @@ def frob(): ... ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1459,7 +1686,7 @@ def frob(): ... ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1494,7 +1721,7 @@ def frob(): ... // the list comprehension is not yet valid and so we do not // detect this as a definition of `bar`. assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1507,7 +1734,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1518,7 +1745,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1529,7 +1756,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1540,7 +1767,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1551,7 +1778,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1562,7 +1789,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1585,7 +1812,7 @@ def frob(): ... // The `lambda_blank1` test works because there are expressions // on either side of . assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1600,7 +1827,7 @@ def frob(): ... // FIXME: Should include `foo`. assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1615,7 +1842,7 @@ def frob(): ... // FIXME: Should include `foo`. assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1631,7 +1858,7 @@ class Foo: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" bar frob "); @@ -1647,7 +1874,7 @@ class Foo: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"bar"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"bar"); } #[test] @@ -1667,7 +1894,7 @@ class Foo: // // These don't work for similar reasons as other // tests above with the inside of whitespace. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Foo "); } @@ -1686,7 +1913,7 @@ class Foo: // FIXME: Should include `bar`, `quux` and `frob`. // (Unclear if `Foo` should be included, but a false // positive isn't the end of the world.) - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Foo "); } @@ -1702,7 +1929,7 @@ class Foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1719,7 +1946,7 @@ class Bar: ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1736,7 +1963,7 @@ class Bar: ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1751,7 +1978,7 @@ class Bar: ... class Foo(", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1772,7 +1999,8 @@ quux. ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" bar :: Unknown | Literal[2] baz :: Unknown | Literal[3] foo :: Unknown | Literal[1] @@ -1817,7 +2045,8 @@ quux.b ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" bar :: Unknown | Literal[2] baz :: Unknown | Literal[3] __getattribute__ :: bound method Quux.__getattribute__(name: str, /) -> Any @@ -1841,7 +2070,8 @@ C. ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" meta_attr :: int mro :: bound method .mro() -> list[type] __annotate__ :: @Todo | None @@ -1912,7 +2142,8 @@ Meta. // just redact them. ---AG filters => [(r"(?m)\s*__(annotations|new|annotate)__.+$", "")]}, { - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" meta_attr :: property mro :: def mro(self) -> list[type] __base__ :: type | None @@ -1973,7 +2204,7 @@ class Quux: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" bar baz foo @@ -2034,7 +2265,8 @@ Quux. ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" mro :: bound method .mro() -> list[type] some_attribute :: int some_class_method :: bound method .some_class_method() -> int @@ -2106,7 +2338,8 @@ Answer. // rendered differently in release mode. filters => [(r"(?m)\s*__(call|reduce_ex|annotate|signature)__.+$", "")]}, { - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" NO :: Literal[Answer.NO] YES :: Literal[Answer.YES] mro :: bound method .mro() -> list[type] @@ -2191,7 +2424,7 @@ bar(o ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -2206,7 +2439,7 @@ bar( ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" bar foo "); @@ -2225,7 +2458,7 @@ class C: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo self "); @@ -2242,7 +2475,7 @@ class C: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"C"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"C"); } #[test] @@ -2259,7 +2492,7 @@ class C: // FIXME: Should NOT include `foo` here, since // that is only a method that can be called on // `self`. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo self "); @@ -2275,7 +2508,10 @@ class ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"classy_variable_name"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"classy_variable_name", + ); } #[test] @@ -2288,7 +2524,10 @@ print(f\"{some ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"some_symbol"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"some_symbol", + ); } #[test] @@ -2302,7 +2541,10 @@ hidden_ ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2322,7 +2564,7 @@ if sys.platform == \"not-my-current-platform\": // currently make no effort to provide a good IDE experience within sections that // are unreachable assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -2428,7 +2670,10 @@ A(). ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2649,7 +2894,10 @@ q.foo.xyz ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2660,7 +2908,7 @@ q.foo.xyz ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" __annotations__ __class__ __delattr__ @@ -2695,7 +2943,10 @@ class Foo: ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2740,7 +2991,10 @@ def m ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2752,7 +3006,10 @@ def m(): pass ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2765,9 +3022,7 @@ def m(): pass ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" - m - "); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r"m"); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2779,7 +3034,10 @@ class M ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2791,7 +3049,10 @@ Fo = float ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"Fo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"Fo", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2807,7 +3068,14 @@ import fo // which is kind of annoying. So just assert that it // runs without panicking and produces some non-empty // output. - assert!(!builder.skip_builtins().build().completions().is_empty()); + assert!( + !builder + .skip_keywords() + .skip_builtins() + .build() + .completions() + .is_empty() + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2823,7 +3091,14 @@ import foo as ba // which is kind of annoying. So just assert that it // runs without panicking and produces some non-empty // output. - assert!(!builder.skip_builtins().build().completions().is_empty()); + assert!( + !builder + .skip_keywords() + .skip_builtins() + .build() + .completions() + .is_empty() + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2839,7 +3114,14 @@ from fo import wat // which is kind of annoying. So just assert that it // runs without panicking and produces some non-empty // output. - assert!(!builder.skip_builtins().build().completions().is_empty()); + assert!( + !builder + .skip_keywords() + .skip_builtins() + .build() + .completions() + .is_empty() + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2851,7 +3133,10 @@ from foo import wa ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2863,7 +3148,10 @@ from foo import wat as ba ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2879,7 +3167,7 @@ except Type: ); assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -2894,7 +3182,10 @@ def _(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2914,7 +3205,10 @@ f = Foo() "#, ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @r"", + ); } #[test] @@ -3100,7 +3394,10 @@ foo = 1 from ? import ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @r"", + ); } #[test] @@ -3418,8 +3715,8 @@ from os. .build() .snapshot(); assert_snapshot!(snapshot, @r" - AbraKadabra :: Unavailable :: package Kadabra :: Literal[1] :: Current module + AbraKadabra :: Unavailable :: package "); } @@ -3494,7 +3791,7 @@ from os. ); assert_snapshot!( - test.skip_builtins().build().snapshot(), + test.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -3523,7 +3820,10 @@ zqzqzq = 1 ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3534,7 +3834,10 @@ zqzqzq = 1 print(\"zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3543,7 +3846,10 @@ class Foo: print(\"Foo.zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3554,7 +3860,10 @@ zqzqzq = 1 print(\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3563,7 +3872,10 @@ class Foo: print(\"Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3574,7 +3886,10 @@ zqzqzq = 1 print('zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3583,7 +3898,10 @@ class Foo: print('Foo.zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3594,7 +3912,10 @@ zqzqzq = 1 print('zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3603,7 +3924,10 @@ class Foo: print('Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3614,7 +3938,10 @@ zqzqzq = 1 print(\"\"\"zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3623,7 +3950,10 @@ class Foo: print(\"\"\"Foo.zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3634,7 +3964,10 @@ zqzqzq = 1 print(\"\"\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3643,7 +3976,10 @@ class Foo: print(\"\"\"Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3654,7 +3990,10 @@ zqzqzq = 1 print('''zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3663,7 +4002,10 @@ class Foo: print('''Foo.zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3674,7 +4016,10 @@ zqzqzq = 1 print('''zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3683,7 +4028,10 @@ class Foo: print('''Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3694,7 +4042,10 @@ zqzqzq = 1 print(f\"zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3703,7 +4054,10 @@ class Foo: print(f\"{Foo} and Foo.zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3714,7 +4068,10 @@ zqzqzq = 1 print(f\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3723,7 +4080,10 @@ class Foo: print(f\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3734,7 +4094,10 @@ zqzqzq = 1 print(f'zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3743,7 +4106,10 @@ class Foo: print(f'{Foo} and Foo.zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3754,7 +4120,10 @@ zqzqzq = 1 print(f'zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3763,7 +4132,10 @@ class Foo: print(f'{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3774,7 +4146,10 @@ zqzqzq = 1 print(f\"\"\"zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3783,7 +4158,10 @@ class Foo: print(f\"\"\"{Foo} and Foo.zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3794,7 +4172,10 @@ zqzqzq = 1 print(f\"\"\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3803,7 +4184,10 @@ class Foo: print(f\"\"\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3814,7 +4198,10 @@ zqzqzq = 1 print(f'''zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3823,7 +4210,10 @@ class Foo: print(f'''{Foo} and Foo.zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3834,7 +4224,10 @@ zqzqzq = 1 print(f'''zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3843,7 +4236,10 @@ class Foo: print(f'''{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3854,7 +4250,10 @@ zqzqzq = 1 print(t\"zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3863,7 +4262,10 @@ class Foo: print(t\"{Foo} and Foo.zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3874,7 +4276,10 @@ zqzqzq = 1 print(t\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3883,7 +4288,10 @@ class Foo: print(t\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3894,7 +4302,10 @@ zqzqzq = 1 print(t'zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3903,7 +4314,10 @@ class Foo: print(t'{Foo} and Foo.zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3914,7 +4328,10 @@ zqzqzq = 1 print(t'zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3923,7 +4340,10 @@ class Foo: print(t'{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3934,7 +4354,10 @@ zqzqzq = 1 print(t\"\"\"zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3943,7 +4366,10 @@ class Foo: print(t\"\"\"{Foo} and Foo.zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3954,7 +4380,10 @@ zqzqzq = 1 print(t\"\"\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3963,7 +4392,10 @@ class Foo: print(t\"\"\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3974,7 +4406,10 @@ zqzqzq = 1 print(t'''zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3983,7 +4418,10 @@ class Foo: print(t'''{Foo} and Foo.zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3994,7 +4432,10 @@ zqzqzq = 1 print(t'''zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4003,7 +4444,10 @@ class Foo: print(t'''{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4058,6 +4502,266 @@ def f[T](x: T): test.build().contains("__repr__"); } + #[test] + fn no_completions_in_function_def_name() { + let builder = completion_test_builder( + "\ +foo = 1 + +def f + ", + ); + + assert!(builder.build().completions().is_empty()); + } + + #[test] + fn completions_in_function_def_empty_name() { + let builder = completion_test_builder( + "\ +def + ", + ); + + // This is okay because the ide will not request completions when the cursor is in this position. + assert!(!builder.build().completions().is_empty()); + } + + #[test] + fn no_completions_in_class_def_name() { + let builder = completion_test_builder( + "\ +foo = 1 + +class f + ", + ); + + assert!(builder.build().completions().is_empty()); + } + + #[test] + fn completions_in_class_def_empty_name() { + let builder = completion_test_builder( + "\ +class + ", + ); + + // This is okay because the ide will not request completions when the cursor is in this position. + assert!(!builder.build().completions().is_empty()); + } + + #[test] + fn no_completions_in_type_def_name() { + let builder = completion_test_builder( + "\ +foo = 1 + +type f = int + ", + ); + + assert!(builder.build().completions().is_empty()); + } + + #[test] + fn no_completions_in_maybe_type_def_name() { + let builder = completion_test_builder( + "\ +foo = 1 + +type f + ", + ); + + assert!(builder.build().completions().is_empty()); + } + + #[test] + fn completions_in_type_def_empty_name() { + let builder = completion_test_builder( + "\ +type + ", + ); + + // This is okay because the ide will not request completions when the cursor is in this position. + assert!(!builder.build().completions().is_empty()); + } + + #[test] + fn favour_symbols_currently_imported() { + let snapshot = CursorTest::builder() + .source("main.py", "long_nameb = 1\nlong_name") + .source("foo.py", "def long_namea(): ...") + .completion_test_builder() + .type_signatures() + .auto_import() + .module_names() + .filter(|c| c.name.contains("long_name")) + .build() + .snapshot(); + + // Even though long_namea is alphabetically before long_nameb, + // long_nameb is currently imported and should be preferred. + assert_snapshot!(snapshot, @r" + long_nameb :: Literal[1] :: Current module + long_namea :: Unavailable :: foo + "); + } + + #[test] + fn favour_imported_over_builtin() { + let snapshot = + completion_test_builder("from typing import Protocol\nclass Foo(P: ...") + .filter(|c| c.name.starts_with('P')) + .build() + .snapshot(); + + // Here we favour `Protocol` over the other completions + // because `Protocol` has been imported, and the other completions are builtin. + assert_snapshot!(snapshot, @r" + Protocol + PendingDeprecationWarning + PermissionError + ProcessLookupError + PythonFinalizationError + "); + } + + #[test] + fn from_import_i_suggests_import() { + let builder = completion_test_builder("from typing i"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_import_suggests_nothing() { + let builder = completion_test_builder("from typing import"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_importt_suggests_import() { + let builder = completion_test_builder("from typing importt"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_space_suggests_import() { + let builder = completion_test_builder("from typing "); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_no_space_not_suggests_import() { + let builder = completion_test_builder("from typing"); + assert_snapshot!(builder.build().snapshot(), @r" + typing + typing_extensions + "); + } + + #[test] + fn from_import_two_imports_suggests_import() { + let builder = completion_test_builder( + "from collections.abc import Sequence + from typing i", + ); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + /// The following behaviour may not be reflected in editors, since LSP + /// clients may do their own filtering of completion suggestions. + #[test] + fn from_import_random_name_suggests_import() { + let builder = completion_test_builder("from typing aa"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_dotted_name_suggests_import() { + let builder = completion_test_builder("from collections.abc i"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_relative_import_suggests_import() { + let builder = CursorTest::builder() + .source("main.py", "from .foo i") + .source("foo.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_dotted_name_relative_import_suggests_import() { + let builder = CursorTest::builder() + .source("main.py", "from .foo.bar i") + .source("foo/bar.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_nested_dotted_name_relative_import_suggests_import() { + let builder = CursorTest::builder() + .source("src/main.py", "from ..foo i") + .source("foo.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_nested_very_dotted_name_relative_import_suggests_import() { + let builder = CursorTest::builder() + // N.B. the `...` tokenizes as `TokenKind::Ellipsis` + .source("src/main.py", "from ...foo i") + .source("foo.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_only_dot() { + let builder = CursorTest::builder() + .source( + "main.py", + " + import_zqzqzq = 1 + from . + ", + ) + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_only_dot_incomplete() { + let builder = CursorTest::builder() + .source( + "main.py", + " + import_zqzqzq = 1 + from .imp + ", + ) + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_incomplete() { + let builder = completion_test_builder( + "from collections.abc i + + ZQZQZQ = 1 + ZQ", + ); + assert_snapshot!(builder.build().snapshot(), @"ZQZQZQ"); + } + /// A way to create a simple single-file (named `main.py`) completion test /// builder. /// @@ -4076,10 +4780,12 @@ def f[T](x: T): /// of completions from a cursor position in source code. Most of /// the options involve some kind of filtering or adjustment to /// apply to the snapshots, depending on what one wants to test. + #[expect(clippy::struct_excessive_bools)] // free the bools! struct CompletionTestBuilder { cursor_test: CursorTest, settings: CompletionSettings, skip_builtins: bool, + skip_keywords: bool, type_signatures: bool, module_names: bool, // This doesn't seem like a "very complex" type to me... ---AG @@ -4099,6 +4805,7 @@ def f[T](x: T): let filtered = original .iter() .filter(|c| !self.skip_builtins || !c.builtin) + .filter(|c| !self.skip_keywords || c.kind != Some(CompletionKind::Keyword)) .filter(|c| { self.predicate .as_ref() @@ -4139,6 +4846,21 @@ def f[T](x: T): self } + /// When set, keywords from completions are skipped. This + /// is useful in tests to reduce noise for scope based + /// completions. + /// + /// Not enabled by default. + /// + /// Note that, at time of writing (2025-11-11), keywords are + /// *also* considered builtins. So `skip_builtins()` will also + /// skip keywords. But this may not always be true. And one + /// might want to skip keywords but *not* builtins. + fn skip_keywords(mut self) -> CompletionTestBuilder { + self.skip_keywords = true; + self + } + /// When set, type signatures of each completion item are /// included in the snapshot. This is useful when one wants /// to specifically test types, but it usually best to leave @@ -4258,6 +4980,7 @@ def f[T](x: T): cursor_test: self.build(), settings: CompletionSettings::default(), skip_builtins: false, + skip_keywords: false, type_signatures: false, module_names: false, predicate: None, diff --git a/crates/ty_ide/src/goto.rs b/crates/ty_ide/src/goto.rs index d7a7091f94..094d2008d2 100644 --- a/crates/ty_ide/src/goto.rs +++ b/crates/ty_ide/src/goto.rs @@ -209,16 +209,11 @@ impl<'db> DefinitionsOrTargets<'db> { ty_python_semantic::types::TypeDefinition::Module(module) => { ResolvedDefinition::Module(module.file(db)?) } - ty_python_semantic::types::TypeDefinition::Class(definition) => { - ResolvedDefinition::Definition(definition) - } - ty_python_semantic::types::TypeDefinition::Function(definition) => { - ResolvedDefinition::Definition(definition) - } - ty_python_semantic::types::TypeDefinition::TypeVar(definition) => { - ResolvedDefinition::Definition(definition) - } - ty_python_semantic::types::TypeDefinition::TypeAlias(definition) => { + ty_python_semantic::types::TypeDefinition::Class(definition) + | ty_python_semantic::types::TypeDefinition::Function(definition) + | ty_python_semantic::types::TypeDefinition::TypeVar(definition) + | ty_python_semantic::types::TypeDefinition::TypeAlias(definition) + | ty_python_semantic::types::TypeDefinition::NewType(definition) => { ResolvedDefinition::Definition(definition) } }; diff --git a/crates/ty_ide/src/goto_definition.rs b/crates/ty_ide/src/goto_definition.rs index fc0fc28fb9..81caea650f 100644 --- a/crates/ty_ide/src/goto_definition.rs +++ b/crates/ty_ide/src/goto_definition.rs @@ -1592,6 +1592,111 @@ a = Test() "); } + #[test] + fn float_annotation() { + let test = CursorTest::builder() + .source( + "main.py", + " +a: float = 3.14 +", + ) + .build(); + + assert_snapshot!(test.goto_definition(), @r#" + info[goto-definition]: Definition + --> stdlib/builtins.pyi:346:7 + | + 345 | @disjoint_base + 346 | class int: + | ^^^ + 347 | """int([x]) -> integer + 348 | int(x, base=10) -> integer + | + info: Source + --> main.py:2:4 + | + 2 | a: float = 3.14 + | ^^^^^ + | + + info[goto-definition]: Definition + --> stdlib/builtins.pyi:659:7 + | + 658 | @disjoint_base + 659 | class float: + | ^^^^^ + 660 | """Convert a string or number to a floating-point number, if possible.""" + | + info: Source + --> main.py:2:4 + | + 2 | a: float = 3.14 + | ^^^^^ + | + "#); + } + + #[test] + fn complex_annotation() { + let test = CursorTest::builder() + .source( + "main.py", + " +a: complex = 3.14 +", + ) + .build(); + + assert_snapshot!(test.goto_definition(), @r#" + info[goto-definition]: Definition + --> stdlib/builtins.pyi:346:7 + | + 345 | @disjoint_base + 346 | class int: + | ^^^ + 347 | """int([x]) -> integer + 348 | int(x, base=10) -> integer + | + info: Source + --> main.py:2:4 + | + 2 | a: complex = 3.14 + | ^^^^^^^ + | + + info[goto-definition]: Definition + --> stdlib/builtins.pyi:659:7 + | + 658 | @disjoint_base + 659 | class float: + | ^^^^^ + 660 | """Convert a string or number to a floating-point number, if possible.""" + | + info: Source + --> main.py:2:4 + | + 2 | a: complex = 3.14 + | ^^^^^^^ + | + + info[goto-definition]: Definition + --> stdlib/builtins.pyi:820:7 + | + 819 | @disjoint_base + 820 | class complex: + | ^^^^^^^ + 821 | """Create a complex number from a string or numbers. + | + info: Source + --> main.py:2:4 + | + 2 | a: complex = 3.14 + | ^^^^^^^ + | + "#); + } + /// Regression test for . /// We must ensure we respect re-import convention for stub files for /// imports in builtins.pyi. diff --git a/crates/ty_ide/src/goto_type_definition.rs b/crates/ty_ide/src/goto_type_definition.rs index f39e5dc4b6..7d86743af4 100644 --- a/crates/ty_ide/src/goto_type_definition.rs +++ b/crates/ty_ide/src/goto_type_definition.rs @@ -276,10 +276,20 @@ mod tests { "#, ); - // TODO: Goto type definition currently doesn't work for type param specs - // because the inference doesn't support them yet. - // This snapshot should show a single target pointing to `T` - assert_snapshot!(test.goto_type_definition(), @"No type definitions found"); + assert_snapshot!(test.goto_type_definition(), @r" + info[goto-type-definition]: Type definition + --> main.py:2:14 + | + 2 | type Alias[**P = [int, str]] = Callable[P, int] + | ^ + | + info: Source + --> main.py:2:41 + | + 2 | type Alias[**P = [int, str]] = Callable[P, int] + | ^ + | + "); } #[test] diff --git a/crates/ty_ide/src/hover.rs b/crates/ty_ide/src/hover.rs index b555295678..c1278637a1 100644 --- a/crates/ty_ide/src/hover.rs +++ b/crates/ty_ide/src/hover.rs @@ -1633,11 +1633,12 @@ def ab(a: int, *, c: int): "#, ); + // TODO: This should be `P@Alias ()` assert_snapshot!(test.hover(), @r" - @Todo + typing.ParamSpec --------------------------------------------- ```python - @Todo + typing.ParamSpec ``` --------------------------------------------- info[hover]: Hovered content is @@ -2633,6 +2634,40 @@ def ab(a: int, *, c: int): "); } + #[test] + fn hover_float_annotation() { + let test = cursor_test( + r#" + a: float = 3.14 + "#, + ); + + assert_snapshot!(test.hover(), @r" + int | float + --------------------------------------------- + Convert a string or number to a floating-point number, if possible. + + --------------------------------------------- + ```python + int | float + ``` + --- + ```text + Convert a string or number to a floating-point number, if possible. + + ``` + --------------------------------------------- + info[hover]: Hovered content is + --> main.py:2:4 + | + 2 | a: float = 3.14 + | ^^^^^- Cursor offset + | | + | source + | + "); + } + impl CursorTest { fn hover(&self) -> String { use std::fmt::Write; diff --git a/crates/ty_ide/src/inlay_hints.rs b/crates/ty_ide/src/inlay_hints.rs index 1e3f545aff..353f761e90 100644 --- a/crates/ty_ide/src/inlay_hints.rs +++ b/crates/ty_ide/src/inlay_hints.rs @@ -4,7 +4,7 @@ use crate::Db; use ruff_db::files::File; use ruff_db::parsed::parsed_module; use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor, TraversalSignal}; -use ruff_python_ast::{AnyNodeRef, Expr, Stmt}; +use ruff_python_ast::{AnyNodeRef, ArgOrKeyword, Expr, ExprUnaryOp, Stmt, UnaryOp}; use ruff_text_size::{Ranged, TextRange, TextSize}; use ty_python_semantic::types::Type; use ty_python_semantic::types::ide_support::inlay_hint_function_argument_details; @@ -231,7 +231,7 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { match stmt { Stmt::Assign(assign) => { - self.in_assignment = true; + self.in_assignment = !type_hint_is_excessive_for_expr(&assign.value); for target in &assign.targets { self.visit_expr(target); } @@ -283,7 +283,9 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { self.visit_expr(&call.func); for (index, arg_or_keyword) in call.arguments.arguments_source_order().enumerate() { - if let Some(name) = argument_names.get(&index) { + if let Some(name) = argument_names.get(&index) + && !arg_matches_name(&arg_or_keyword, name) + { self.add_call_argument_name(arg_or_keyword.range().start(), name); } self.visit_expr(arg_or_keyword.value()); @@ -296,13 +298,67 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { } } +/// Given a positional argument, check if the expression is the "same name" +/// as the function argument itself. +/// +/// This allows us to filter out reptitive inlay hints like `x=x`, `x=y.x`, etc. +fn arg_matches_name(arg_or_keyword: &ArgOrKeyword, name: &str) -> bool { + // Only care about positional args + let ArgOrKeyword::Arg(arg) = arg_or_keyword else { + return false; + }; + + let mut expr = *arg; + loop { + match expr { + // `x=x(1, 2)` counts as a match, recurse for it + Expr::Call(expr_call) => expr = &expr_call.func, + // `x=x[0]` is a match, recurse for it + Expr::Subscript(expr_subscript) => expr = &expr_subscript.value, + // `x=x` is a match + Expr::Name(expr_name) => return expr_name.id.as_str() == name, + // `x=y.x` is a match + Expr::Attribute(expr_attribute) => return expr_attribute.attr.as_str() == name, + _ => return false, + } + } +} + +/// Given an expression that's the RHS of an assignment, would it be excessive to +/// emit an inlay type hint for the variable assigned to it? +/// +/// This is used to suppress inlay hints for things like `x = 1`, `x, y = (1, 2)`, etc. +fn type_hint_is_excessive_for_expr(expr: &Expr) -> bool { + match expr { + // A tuple of all literals is excessive to typehint + Expr::Tuple(expr_tuple) => expr_tuple.elts.iter().all(type_hint_is_excessive_for_expr), + + // Various Literal[...] types which are always excessive to hint + | Expr::BytesLiteral(_) + | Expr::NumberLiteral(_) + | Expr::BooleanLiteral(_) + | Expr::StringLiteral(_) + // `None` isn't terribly verbose, but still redundant + | Expr::NoneLiteral(_) + // This one expands to `str` which isn't verbose but is redundant + | Expr::FString(_) + // This one expands to `Template` which isn't verbose but is redundant + | Expr::TString(_)=> true, + + // You too `+1 and `-1`, get back here + Expr::UnaryOp(ExprUnaryOp { op: UnaryOp::UAdd | UnaryOp::USub, operand, .. }) => matches!(**operand, Expr::NumberLiteral(_)), + + // Everything else is reasonable + _ => false, + } +} + #[cfg(test)] mod tests { use super::*; use insta::assert_snapshot; use ruff_db::{ - Db as _, files::{File, system_path_to_file}, source::source_text, }; @@ -311,9 +367,6 @@ mod tests { use ruff_db::system::{DbWithWritableSystem, SystemPathBuf}; use ty_project::ProjectMetadata; - use ty_python_semantic::{ - Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings, - }; pub(super) fn inlay_hint_test(source: &str) -> InlayHintTest { const START: &str = ""; @@ -324,6 +377,8 @@ mod tests { SystemPathBuf::from("/"), )); + db.init_program().unwrap(); + let source = dedent(source); let start = source.find(START); @@ -345,19 +400,6 @@ mod tests { let file = system_path_to_file(&db, "main.py").expect("newly written file to existing"); - let search_paths = SearchPathSettings::new(vec![SystemPathBuf::from("/")]) - .to_search_paths(db.system(), db.vendored()) - .expect("Valid search path settings"); - - Program::from_settings( - &db, - ProgramSettings { - python_version: PythonVersionWithSource::default(), - python_platform: PythonPlatform::default(), - search_paths, - }, - ); - InlayHintTest { db, file, range } } @@ -402,47 +444,183 @@ mod tests { #[test] fn test_assign_statement() { - let test = inlay_hint_test("x = 1"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + + x = 1 + y = x + z = i(1) + w = z + ", + ); assert_snapshot!(test.inlay_hints(), @r" - x[: Literal[1]] = 1 + def i(x: int, /) -> int: + return x + + x = 1 + y[: Literal[1]] = x + z[: int] = i(1) + w[: int] = z "); } #[test] - fn test_tuple_assignment() { - let test = inlay_hint_test("x, y = (1, 'abc')"); + fn test_unpacked_tuple_assignment() { + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = (1, 'abc') + x2, y2 = (x1, y1) + x3, y3 = (i(1), s('abc')) + x4, y4 = (x3, y3) + ", + ); assert_snapshot!(test.inlay_hints(), @r#" - x[: Literal[1]], y[: Literal["abc"]] = (1, 'abc') + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = (1, 'abc') + x2[: Literal[1]], y2[: Literal["abc"]] = (x1, y1) + x3[: int], y3[: str] = (i(1), s('abc')) + x4[: int], y4[: str] = (x3, y3) + "#); + } + + #[test] + fn test_multiple_assignment() { + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = 1, 'abc' + x2, y2 = x1, y1 + x3, y3 = i(1), s('abc') + x4, y4 = x3, y3 + ", + ); + + assert_snapshot!(test.inlay_hints(), @r#" + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = 1, 'abc' + x2[: Literal[1]], y2[: Literal["abc"]] = x1, y1 + x3[: int], y3[: str] = i(1), s('abc') + x4[: int], y4[: str] = x3, y3 + "#); + } + + #[test] + fn test_tuple_assignment() { + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x = (1, 'abc') + y = x + z = (i(1), s('abc')) + w = z + ", + ); + + assert_snapshot!(test.inlay_hints(), @r#" + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x = (1, 'abc') + y[: tuple[Literal[1], Literal["abc"]]] = x + z[: tuple[int, str]] = (i(1), s('abc')) + w[: tuple[int, str]] = z "#); } #[test] fn test_nested_tuple_assignment() { - let test = inlay_hint_test("x, (y, z) = (1, ('abc', 2))"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, (y1, z1) = (1, ('abc', 2)) + x2, (y2, z2) = (x1, (y1, z1)) + x3, (y3, z3) = (i(1), (s('abc'), i(2))) + x4, (y4, z4) = (x3, (y3, z3))", + ); assert_snapshot!(test.inlay_hints(), @r#" - x[: Literal[1]], (y[: Literal["abc"]], z[: Literal[2]]) = (1, ('abc', 2)) + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, (y1, z1) = (1, ('abc', 2)) + x2[: Literal[1]], (y2[: Literal["abc"]], z2[: Literal[2]]) = (x1, (y1, z1)) + x3[: int], (y3[: str], z3[: int]) = (i(1), (s('abc'), i(2))) + x4[: int], (y4[: str], z4[: int]) = (x3, (y3, z3)) "#); } #[test] fn test_assign_statement_with_type_annotation() { - let test = inlay_hint_test("x: int = 1"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + + x: int = 1 + y = x + z: int = i(1) + w = z", + ); assert_snapshot!(test.inlay_hints(), @r" + def i(x: int, /) -> int: + return x + x: int = 1 + y[: Literal[1]] = x + z: int = i(1) + w[: int] = z "); } #[test] fn test_assign_statement_out_of_range() { - let test = inlay_hint_test("x = 1\ny = 2"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + x = i(1) + z = x", + ); assert_snapshot!(test.inlay_hints(), @r" - x[: Literal[1]] = 1 - y = 2 + def i(x: int, /) -> int: + return x + x[: int] = i(1) + z = x "); } @@ -452,28 +630,256 @@ mod tests { " class A: def __init__(self, y): - self.x = 1 + self.x = int(1) self.y = y a = A(2) - a.y = 3 + a.y = int(3) ", ); assert_snapshot!(test.inlay_hints(), @r" class A: def __init__(self, y): - self.x[: Literal[1]] = 1 + self.x[: int] = int(1) self.y[: Unknown] = y a[: A] = A([y=]2) - a.y[: Literal[3]] = 3 + a.y[: int] = int(3) "); } + #[test] + fn test_many_literals() { + let test = inlay_hint_test( + r#" + a = 1 + b = 1.0 + c = True + d = None + e = "hello" + f = 'there' + g = f"{e} {f}" + h = t"wow %d" + i = b'\x00' + j = +1 + k = -1.0 + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a = 1 + b = 1.0 + c = True + d = None + e = "hello" + f = 'there' + g = f"{e} {f}" + h = t"wow %d" + i = b'\x00' + j = +1 + k = -1.0 + "#); + } + + #[test] + fn test_many_literals_tuple() { + let test = inlay_hint_test( + r#" + a = (1, 2) + b = (1.0, 2.0) + c = (True, False) + d = (None, None) + e = ("hel", "lo") + f = ('the', 're') + g = (f"{ft}", f"{ft}") + h = (t"wow %d", t"wow %d") + i = (b'\x01', b'\x02') + j = (+1, +2.0) + k = (-1, -2.0) + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a = (1, 2) + b = (1.0, 2.0) + c = (True, False) + d = (None, None) + e = ("hel", "lo") + f = ('the', 're') + g = (f"{ft}", f"{ft}") + h = (t"wow %d", t"wow %d") + i = (b'\x01', b'\x02') + j = (+1, +2.0) + k = (-1, -2.0) + "#); + } + + #[test] + fn test_many_literals_unpacked_tuple() { + let test = inlay_hint_test( + r#" + a1, a2 = (1, 2) + b1, b2 = (1.0, 2.0) + c1, c2 = (True, False) + d1, d2 = (None, None) + e1, e2 = ("hel", "lo") + f1, f2 = ('the', 're') + g1, g2 = (f"{ft}", f"{ft}") + h1, h2 = (t"wow %d", t"wow %d") + i1, i2 = (b'\x01', b'\x02') + j1, j2 = (+1, +2.0) + k1, k2 = (-1, -2.0) + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a1, a2 = (1, 2) + b1, b2 = (1.0, 2.0) + c1, c2 = (True, False) + d1, d2 = (None, None) + e1, e2 = ("hel", "lo") + f1, f2 = ('the', 're') + g1, g2 = (f"{ft}", f"{ft}") + h1, h2 = (t"wow %d", t"wow %d") + i1, i2 = (b'\x01', b'\x02') + j1, j2 = (+1, +2.0) + k1, k2 = (-1, -2.0) + "#); + } + + #[test] + fn test_many_literals_multiple() { + let test = inlay_hint_test( + r#" + a1, a2 = 1, 2 + b1, b2 = 1.0, 2.0 + c1, c2 = True, False + d1, d2 = None, None + e1, e2 = "hel", "lo" + f1, f2 = 'the', 're' + g1, g2 = f"{ft}", f"{ft}" + h1, h2 = t"wow %d", t"wow %d" + i1, i2 = b'\x01', b'\x02' + j1, j2 = +1, +2.0 + k1, k2 = -1, -2.0 + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a1, a2 = 1, 2 + b1, b2 = 1.0, 2.0 + c1, c2 = True, False + d1, d2 = None, None + e1, e2 = "hel", "lo" + f1, f2 = 'the', 're' + g1, g2 = f"{ft}", f"{ft}" + h1, h2 = t"wow %d", t"wow %d" + i1, i2 = b'\x01', b'\x02' + j1, j2 = +1, +2.0 + k1, k2 = -1, -2.0 + "#); + } + + #[test] + fn test_many_literals_list() { + let test = inlay_hint_test( + r#" + a = [1, 2] + b = [1.0, 2.0] + c = [True, False] + d = [None, None] + e = ["hel", "lo"] + f = ['the', 're'] + g = [f"{ft}", f"{ft}"] + h = [t"wow %d", t"wow %d"] + i = [b'\x01', b'\x02'] + j = [+1, +2.0] + k = [-1, -2.0] + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a[: list[Unknown | int]] = [1, 2] + b[: list[Unknown | float]] = [1.0, 2.0] + c[: list[Unknown | bool]] = [True, False] + d[: list[Unknown | None]] = [None, None] + e[: list[Unknown | str]] = ["hel", "lo"] + f[: list[Unknown | str]] = ['the', 're'] + g[: list[Unknown | str]] = [f"{ft}", f"{ft}"] + h[: list[Unknown | Template]] = [t"wow %d", t"wow %d"] + i[: list[Unknown | bytes]] = [b'\x01', b'\x02'] + j[: list[Unknown | int | float]] = [+1, +2.0] + k[: list[Unknown | int | float]] = [-1, -2.0] + "#); + } + + #[test] + fn test_simple_init_call() { + let test = inlay_hint_test( + r#" + class MyClass: + def __init__(self): + self.x: int = 1 + + x = MyClass() + y = (MyClass(), MyClass()) + a, b = MyClass(), MyClass() + c, d = (MyClass(), MyClass()) + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r" + class MyClass: + def __init__(self): + self.x: int = 1 + + x[: MyClass] = MyClass() + y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) + a[: MyClass], b[: MyClass] = MyClass(), MyClass() + c[: MyClass], d[: MyClass] = (MyClass(), MyClass()) + "); + } + + #[test] + fn test_generic_init_call() { + let test = inlay_hint_test( + r#" + class MyClass[T, U]: + def __init__(self, x: list[T], y: tuple[U, U]): + self.x = x + self.y = y + + x = MyClass([42], ("a", "b")) + y = (MyClass([42], ("a", "b")), MyClass([42], ("a", "b"))) + a, b = MyClass([42], ("a", "b")), MyClass([42], ("a", "b")) + c, d = (MyClass([42], ("a", "b")), MyClass([42], ("a", "b"))) + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + class MyClass[T, U]: + def __init__(self, x: list[T], y: tuple[U, U]): + self.x[: list[T@MyClass]] = x + self.y[: tuple[U@MyClass, U@MyClass]] = y + + x[: MyClass[Unknown | int, str]] = MyClass([x=][42], [y=]("a", "b")) + y[: tuple[MyClass[Unknown | int, str], MyClass[Unknown | int, str]]] = (MyClass([x=][42], [y=]("a", "b")), MyClass([x=][42], [y=]("a", "b"))) + a[: MyClass[Unknown | int, str]], b[: MyClass[Unknown | int, str]] = MyClass([x=][42], [y=]("a", "b")), MyClass([x=][42], [y=]("a", "b")) + c[: MyClass[Unknown | int, str]], d[: MyClass[Unknown | int, str]] = (MyClass([x=][42], [y=]("a", "b")), MyClass([x=][42], [y=]("a", "b"))) + "#); + } + #[test] fn test_disabled_variable_types() { - let test = inlay_hint_test("x = 1"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + + x = i(1) + ", + ); assert_snapshot!( test.inlay_hints_with_settings(&InlayHintSettings { @@ -481,7 +887,10 @@ mod tests { ..Default::default() }), @r" - x = 1 + def i(x: int, /) -> int: + return x + + x = i(1) " ); } @@ -500,6 +909,173 @@ mod tests { "); } + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_name() { + let test = inlay_hint_test( + " + def foo(x: int): pass + x = 1 + y = 2 + foo(x) + foo(y)", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + x = 1 + y = 2 + foo(x) + foo([x=]y) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_attribute() { + let test = inlay_hint_test( + " + def foo(x: int): pass + class MyClass: + def __init__(self): + self.x: int = 1 + self.y: int = 2 + val = MyClass() + + foo(val.x) + foo(val.y)", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + class MyClass: + def __init__(self): + self.x: int = 1 + self.y: int = 2 + val[: MyClass] = MyClass() + + foo(val.x) + foo([x=]val.y) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_attribute_not() { + // This one checks that we don't allow elide `x=` for `x.y` + let test = inlay_hint_test( + " + def foo(x: int): pass + class MyClass: + def __init__(self): + self.x: int = 1 + self.y: int = 2 + x = MyClass() + + foo(x.x) + foo(x.y)", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + class MyClass: + def __init__(self): + self.x: int = 1 + self.y: int = 2 + x[: MyClass] = MyClass() + + foo(x.x) + foo([x=]x.y) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_call() { + let test = inlay_hint_test( + " + def foo(x: int): pass + class MyClass: + def __init__(self): + def x() -> int: + return 1 + def y() -> int: + return 2 + val = MyClass() + + foo(val.x()) + foo(val.y())", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + class MyClass: + def __init__(self): + def x() -> int: + return 1 + def y() -> int: + return 2 + val[: MyClass] = MyClass() + + foo(val.x()) + foo([x=]val.y()) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_complex() { + let test = inlay_hint_test( + " + from typing import List + + def foo(x: int): pass + class MyClass: + def __init__(self): + def x() -> List[int]: + return 1 + def y() -> List[int]: + return 2 + val = MyClass() + + foo(val.x()[0]) + foo(val.y()[1])", + ); + + assert_snapshot!(test.inlay_hints(), @r" + from typing import List + + def foo(x: int): pass + class MyClass: + def __init__(self): + def x() -> List[int]: + return 1 + def y() -> List[int]: + return 2 + val[: MyClass] = MyClass() + + foo(val.x()[0]) + foo([x=]val.y()[1]) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_subscript() { + let test = inlay_hint_test( + " + def foo(x: int): pass + x = [1] + y = [2] + + foo(x[0]) + foo(y[0])", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + x[: list[Unknown | int]] = [1] + y[: list[Unknown | int]] = [2] + + foo(x[0]) + foo([x=]y[0]) + "); + } + #[test] fn test_function_call_with_positional_only_parameter() { let test = inlay_hint_test( diff --git a/crates/ty_ide/src/lib.rs b/crates/ty_ide/src/lib.rs index 6a23302561..057d75b688 100644 --- a/crates/ty_ide/src/lib.rs +++ b/crates/ty_ide/src/lib.rs @@ -338,9 +338,6 @@ mod tests { use ruff_python_trivia::textwrap::dedent; use ruff_text_size::TextSize; use ty_project::ProjectMetadata; - use ty_python_semantic::{ - Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings, - }; /// A way to create a simple single-file (named `main.py`) cursor test. /// @@ -417,18 +414,7 @@ mod tests { SystemPathBuf::from("/"), )); - let search_paths = SearchPathSettings::new(vec![SystemPathBuf::from("/")]) - .to_search_paths(db.system(), db.vendored()) - .expect("Valid search path settings"); - - Program::from_settings( - &db, - ProgramSettings { - python_version: PythonVersionWithSource::default(), - python_platform: PythonPlatform::default(), - search_paths, - }, - ); + db.init_program().unwrap(); let mut cursor: Option = None; for &Source { diff --git a/crates/ty_ide/src/semantic_tokens.rs b/crates/ty_ide/src/semantic_tokens.rs index 12e5e6581b..a4df094f21 100644 --- a/crates/ty_ide/src/semantic_tokens.rs +++ b/crates/ty_ide/src/semantic_tokens.rs @@ -1,3 +1,37 @@ +//! This module walks the AST and collects a set of "semantic tokens" for a file +//! or a range within a file. Each semantic token provides a "token type" and zero +//! or more "modifiers". This information can be used by an editor to provide +//! color coding based on semantic meaning. +//! +//! Visual Studio has a very useful debugger that allows you to inspect the +//! semantic tokens for any given position in the code. Not only is this useful +//! to debug our semantic highlighting, it also allows easy comparison with +//! how Pylance (or other LSPs) highlight a certain token. You can open the scope inspector, +//! with the Command Palette (Command/Ctrl+Shift+P), then select the +//! `Developer: Inspect Editor Tokens and Scopes` command. +//! +//! Current limitations and areas for future improvement: +//! +//! TODO: Need to handle semantic tokens within quoted annotations. +//! +//! TODO: Need to properly handle Annotated expressions. All type arguments other +//! than the first should be treated as value expressions, not as type expressions. +//! +//! TODO: An identifier that resolves to a parameter when used within a function +//! should be classified as a parameter, selfParameter, or clsParameter token. +//! +//! TODO: Properties (or perhaps more generally, descriptor objects?) should be +//! classified as property tokens rather than just variables. +//! +//! TODO: Special forms like `Protocol` and `TypedDict` should probably be classified +//! as class tokens, but they are currently classified as variables. +//! +//! TODO: Type aliases (including those defined with the Python 3.12 "type" statement) +//! do not currently have a dedicated semantic token type, but they maybe should. +//! +//! TODO: Additional token modifiers might be added (e.g. for static methods, +//! abstract methods and classes). + use crate::Db; use bitflags::bitflags; use itertools::Itertools; @@ -11,45 +45,15 @@ use ruff_python_ast::{ AnyNodeRef, BytesLiteral, Expr, FString, InterpolatedStringElement, Stmt, StringLiteral, TypeParam, }; -use ruff_text_size::{Ranged, TextLen, TextRange}; +use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use std::ops::Deref; +use ty_python_semantic::semantic_index::definition::Definition; +use ty_python_semantic::types::TypeVarKind; use ty_python_semantic::{ HasType, SemanticModel, semantic_index::definition::DefinitionKind, types::Type, - types::ide_support::definition_kind_for_name, + types::ide_support::definition_for_name, }; -// This module walks the AST and collects a set of "semantic tokens" for a file -// or a range within a file. Each semantic token provides a "token type" and zero -// or more "modifiers". This information can be used by an editor to provide -// color coding based on semantic meaning. - -// Current limitations and areas for future improvement: - -// TODO: Need to provide better classification for name tokens that are imported -// from other modules. Currently, these are classified based on their types, -// which often means they're classified as variables when they should be classes -// in many cases. - -// TODO: Need to handle semantic tokens within quoted annotations. - -// TODO: Need to properly handle Annotated expressions. All type arguments other -// than the first should be treated as value expressions, not as type expressions. - -// TODO: An identifier that resolves to a parameter when used within a function -// should be classified as a parameter, selfParameter, or clsParameter token. - -// TODO: Properties (or perhaps more generally, descriptor objects?) should be -// classified as property tokens rather than just variables. - -// TODO: Special forms like Protocol and TypedDict should probably be classified -// as class tokens, but they are currently classified as variables. - -// TODO: Type aliases (including those defined with the Python 3.12 "type" statement) -// do not currently have a dedicated semantic token type, but they maybe should. - -// TODO: Additional token modifiers might be added (e.g. for static methods, -// abstract methods and classes). - /// Semantic token types supported by the language server. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum SemanticTokenType { @@ -198,6 +202,7 @@ struct SemanticTokenVisitor<'db> { tokens: Vec, in_class_scope: bool, in_type_annotation: bool, + in_target_creating_definition: bool, range_filter: Option, } @@ -212,6 +217,7 @@ impl<'db> SemanticTokenVisitor<'db> { file, tokens: Vec::new(), in_class_scope: false, + in_target_creating_definition: false, in_type_annotation: false, range_filter, } @@ -226,7 +232,12 @@ impl<'db> SemanticTokenVisitor<'db> { let range = ranged.range(); // Only emit tokens that intersect with the range filter, if one is specified if let Some(range_filter) = self.range_filter { - if range.intersect(range_filter).is_none() { + // Only include ranges that have a non-empty overlap. Adjacent ranges + // should be excluded. + if range + .intersect(range_filter) + .is_none_or(TextRange::is_empty) + { return; } } @@ -254,13 +265,11 @@ impl<'db> SemanticTokenVisitor<'db> { fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) { // First try to classify the token based on its definition kind. - let definition_kind = definition_kind_for_name(self.semantic_model.db(), self.file, name); + let definition = definition_for_name(self.semantic_model.db(), self.file, name); - if let Some(definition_kind) = definition_kind { + if let Some(definition) = definition { let name_str = name.id.as_str(); - if let Some(classification) = - self.classify_from_definition_kind(&definition_kind, name_str) - { + if let Some(classification) = self.classify_from_definition(definition, name_str) { return classification; } } @@ -271,14 +280,16 @@ impl<'db> SemanticTokenVisitor<'db> { self.classify_from_type_and_name_str(ty, name_str) } - fn classify_from_definition_kind( + fn classify_from_definition( &self, - definition_kind: &DefinitionKind<'_>, + definition: Definition, name_str: &str, ) -> Option<(SemanticTokenType, SemanticTokenModifier)> { let mut modifiers = SemanticTokenModifier::empty(); + let db = self.semantic_model.db(); + let model = SemanticModel::new(db, definition.file(db)); - match definition_kind { + match definition.kind(db) { DefinitionKind::Function(_) => { // Check if this is a method based on current scope if self.in_class_scope { @@ -289,7 +300,24 @@ impl<'db> SemanticTokenVisitor<'db> { } DefinitionKind::Class(_) => Some((SemanticTokenType::Class, modifiers)), DefinitionKind::TypeVar(_) => Some((SemanticTokenType::TypeParameter, modifiers)), - DefinitionKind::Parameter(_) => Some((SemanticTokenType::Parameter, modifiers)), + DefinitionKind::Parameter(parameter) => { + let parsed = parsed_module(db, definition.file(db)); + let ty = parameter.node(&parsed.load(db)).inferred_type(&model); + + if let Type::TypeVar(type_var) = ty { + match type_var.typevar(db).kind(db) { + TypeVarKind::TypingSelf => { + return Some((SemanticTokenType::SelfParameter, modifiers)); + } + TypeVarKind::Legacy + | TypeVarKind::ParamSpec + | TypeVarKind::Pep695ParamSpec + | TypeVarKind::Pep695 => {} + } + } + + Some((SemanticTokenType::Parameter, modifiers)) + } DefinitionKind::VariadicPositionalParameter(_) => { Some((SemanticTokenType::Parameter, modifiers)) } @@ -310,6 +338,25 @@ impl<'db> SemanticTokenVisitor<'db> { if Self::is_constant_name(name_str) { modifiers |= SemanticTokenModifier::READONLY; } + + let parsed = parsed_module(db, definition.file(db)); + let parsed = parsed.load(db); + let value = match definition.kind(db) { + DefinitionKind::Assignment(assignment) => Some(assignment.value(&parsed)), + _ => None, + }; + + if let Some(value) = value { + let value_ty = value.inferred_type(&model); + + if value_ty.is_class_literal() + || value_ty.is_subclass_of() + || value_ty.is_generic_alias() + { + return Some((SemanticTokenType::Class, modifiers)); + } + } + Some((SemanticTokenType::Variable, modifiers)) } } @@ -446,11 +493,11 @@ impl<'db> SemanticTokenVisitor<'db> { let name_start = name.start(); // Split the dotted name and calculate positions for each part - let mut current_offset = ruff_text_size::TextSize::default(); + let mut current_offset = TextSize::default(); for part in name_str.split('.') { if !part.is_empty() { self.add_token( - ruff_text_size::TextRange::at(name_start + current_offset, part.text_len()), + TextRange::at(name_start + current_offset, part.text_len()), token_type, SemanticTokenModifier::empty(), ); @@ -584,6 +631,7 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { // Clear the in_class_scope flag so inner functions // are not treated as methods let prev_in_class = self.in_class_scope; + self.in_class_scope = false; self.visit_body(&func.body); self.in_class_scope = prev_in_class; @@ -679,6 +727,27 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { ); } } + ast::Stmt::Assign(assignment) => { + self.in_target_creating_definition = true; + for element in &assignment.targets { + self.visit_expr(element); + } + self.in_target_creating_definition = false; + + self.visit_expr(&assignment.value); + } + ast::Stmt::AnnAssign(assignment) => { + self.in_target_creating_definition = true; + self.visit_expr(&assignment.target); + self.in_target_creating_definition = false; + + self.visit_expr(&assignment.annotation); + + if let Some(value) = &assignment.value { + self.visit_expr(value); + } + } + _ => { // For all other statement types, let the default visitor handle them walk_stmt(self, stmt); @@ -696,7 +765,10 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { fn visit_expr(&mut self, expr: &Expr) { match expr { ast::Expr::Name(name) => { - let (token_type, modifiers) = self.classify_name(name); + let (token_type, mut modifiers) = self.classify_name(name); + if self.in_target_creating_definition && name.ctx.is_store() { + modifiers |= SemanticTokenModifier::DEFINITION; + } self.add_token(name, token_type, modifiers); walk_expr(self, expr); } @@ -740,6 +812,15 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { // Visit the lambda body self.visit_expr(&lambda.body); } + + ast::Expr::Named(named) => { + let prev_in_target = self.in_target_creating_definition; + self.in_target_creating_definition = true; + self.visit_expr(&named.target); + self.in_target_creating_definition = prev_in_target; + + self.visit_expr(&named.value); + } _ => { // For all other expression types, let the default visitor handle them walk_expr(self, expr); @@ -925,126 +1006,118 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { #[cfg(test)] mod tests { use super::*; - use crate::tests::cursor_test; + use insta::assert_snapshot; - - /// Helper function to get semantic tokens for full file (for testing) - fn semantic_tokens_full_file(db: &dyn Db, file: File) -> SemanticTokens { - semantic_tokens(db, file, None) - } - - /// Helper function to convert semantic tokens to a snapshot-friendly text format - fn semantic_tokens_to_snapshot(db: &dyn Db, file: File, tokens: &SemanticTokens) -> String { - use std::fmt::Write; - let source = ruff_db::source::source_text(db, file); - let mut result = String::new(); - - for token in tokens.iter() { - let token_text = &source[token.range()]; - let modifiers_text = if token.modifiers.is_empty() { - String::new() - } else { - let mut mods = Vec::new(); - if token.modifiers.contains(SemanticTokenModifier::DEFINITION) { - mods.push("definition"); - } - if token.modifiers.contains(SemanticTokenModifier::READONLY) { - mods.push("readonly"); - } - if token.modifiers.contains(SemanticTokenModifier::ASYNC) { - mods.push("async"); - } - format!(" [{}]", mods.join(", ")) - }; - - writeln!( - result, - "{:?} @ {}..{}: {:?}{}", - token_text, - u32::from(token.range().start()), - u32::from(token.range().end()), - token.token_type, - modifiers_text - ) - .unwrap(); - } - - result - } + use ruff_db::{ + files::system_path_to_file, + system::{DbWithWritableSystem, SystemPath, SystemPathBuf}, + }; + use ty_project::ProjectMetadata; #[test] fn test_semantic_tokens_basic() { - let test = cursor_test("def foo(): pass"); + let test = SemanticTokenTest::new("def foo(): pass"); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r###" "foo" @ 4..7: Function [definition] "###); } #[test] fn test_semantic_tokens_class() { - let test = cursor_test("class MyClass: pass"); + let test = SemanticTokenTest::new("class MyClass: pass"); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r###" "MyClass" @ 6..13: Class [definition] "###); } #[test] fn test_semantic_tokens_variables() { - let test = cursor_test( + let test = SemanticTokenTest::new( " x = 42 -y = 'hello' +y = 'hello' ", ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###" - "x" @ 1..2: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] "42" @ 5..7: Number - "y" @ 8..9: Variable + "y" @ 8..9: Variable [definition] "'hello'" @ 12..19: String - "###); + "#); + } + + #[test] + fn test_semantic_tokens_walrus() { + let test = SemanticTokenTest::new( + " +if x := 42: + y = 'hello' +", + ); + + let tokens = test.highlight_file(); + + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 4..5: Variable [definition] + "42" @ 9..11: Number + "y" @ 17..18: Variable [definition] + "'hello'" @ 21..28: String + "#); } #[test] fn test_semantic_tokens_self_parameter() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: - def method(self, x): pass + def method(self, x): + self.x = 10 + + def method_unidiomatic_self(self2): + print(self2.x)) ", ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] "method" @ 24..30: Method [definition] "self" @ 31..35: SelfParameter "x" @ 37..38: Parameter - "###); + "self" @ 49..53: SelfParameter + "x" @ 54..55: Variable + "10" @ 58..60: Number + "method_unidiomatic_self" @ 70..93: Method [definition] + "self2" @ 94..99: SelfParameter + "print" @ 110..115: Function + "self2" @ 116..121: SelfParameter + "x" @ 122..123: Variable + "#); } #[test] fn test_semantic_tokens_cls_parameter() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: @classmethod - def method(cls, x): pass + def method(cls, x): pass ", ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] "classmethod" @ 21..32: Decorator "method" @ 41..47: Method [definition] @@ -1055,17 +1128,17 @@ class MyClass: #[test] fn test_semantic_tokens_staticmethod_parameter() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: @staticmethod - def method(x, y): pass + def method(x, y): pass ", ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] "staticmethod" @ 21..33: Decorator "method" @ 42..48: Method [definition] @@ -1076,19 +1149,19 @@ class MyClass: #[test] fn test_semantic_tokens_custom_self_cls_names() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: def method(instance, x): pass @classmethod def other(klass, y): pass - def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass + def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass ", ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] "method" @ 24..30: Method [definition] "instance" @ 31..39: SelfParameter @@ -1109,28 +1182,28 @@ class MyClass: #[test] fn test_semantic_tokens_modifiers() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: CONSTANT = 42 - async def method(self): pass + async def method(self): pass ", ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "CONSTANT" @ 20..28: Variable [readonly] + "CONSTANT" @ 20..28: Variable [definition, readonly] "42" @ 31..33: Number "method" @ 48..54: Method [definition, async] "self" @ 55..59: SelfParameter - "###); + "#); } #[test] fn test_semantic_classification_vs_heuristic() { - let test = cursor_test( + let test = SemanticTokenTest::new( " import sys class MyClass: @@ -1141,22 +1214,22 @@ def my_function(): x = MyClass() y = my_function() -z = sys.version +z = sys.version ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "sys" @ 8..11: Namespace "MyClass" @ 18..25: Class [definition] "my_function" @ 41..52: Function [definition] "42" @ 67..69: Number - "x" @ 71..72: Variable + "x" @ 71..72: Variable [definition] "MyClass" @ 75..82: Class - "y" @ 85..86: Variable + "y" @ 85..86: Variable [definition] "my_function" @ 89..100: Function - "z" @ 103..104: Variable + "z" @ 103..104: Variable [definition] "sys" @ 107..110: Namespace "version" @ 111..118: Variable "#); @@ -1164,57 +1237,102 @@ z = sys.version #[test] fn test_builtin_constants() { - let test = cursor_test( + let test = SemanticTokenTest::new( " x = True y = False -z = None +z = None ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###" - "x" @ 1..2: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] "True" @ 5..9: BuiltinConstant - "y" @ 10..11: Variable + "y" @ 10..11: Variable [definition] "False" @ 14..19: BuiltinConstant - "z" @ 20..21: Variable + "z" @ 20..21: Variable [definition] "None" @ 24..28: BuiltinConstant - "###); + "#); } #[test] fn test_builtin_constants_in_expressions() { - let test = cursor_test( + let test = SemanticTokenTest::new( " def check(value): if value is None: return False return True -result = check(None) +result = check(None) ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "check" @ 5..10: Function [definition] "value" @ 11..16: Parameter - "value" @ 26..31: Variable + "value" @ 26..31: Parameter "None" @ 35..39: BuiltinConstant "False" @ 56..61: BuiltinConstant "True" @ 73..77: BuiltinConstant - "result" @ 79..85: Variable + "result" @ 79..85: Variable [definition] "check" @ 88..93: Function "None" @ 94..98: BuiltinConstant "#); } + #[test] + fn test_builtin_types() { + let test = SemanticTokenTest::new( + r#" + type U = str | int + + class Test: + a: int + b: bool + c: str + d: float + e: list[int] + f: list[float] + g: int | float + h: U + "#, + ); + + assert_snapshot!(test.to_snapshot(&test.highlight_file()), @r#" + "U" @ 6..7: TypeParameter + "str" @ 10..13: Class + "int" @ 16..19: Class + "Test" @ 27..31: Class [definition] + "a" @ 37..38: Variable [definition] + "int" @ 40..43: Class + "b" @ 48..49: Variable [definition] + "bool" @ 51..55: Class + "c" @ 60..61: Variable [definition] + "str" @ 63..66: Class + "d" @ 71..72: Variable [definition] + "float" @ 74..79: Class + "e" @ 84..85: Variable [definition] + "list" @ 87..91: Class + "int" @ 92..95: Class + "f" @ 101..102: Variable [definition] + "list" @ 104..108: Class + "float" @ 109..114: Class + "g" @ 120..121: Variable [definition] + "int" @ 123..126: Class + "float" @ 129..134: Class + "h" @ 139..140: Variable [definition] + "U" @ 142..143: TypeParameter + "#); + } + #[test] fn test_semantic_tokens_range() { - let test = cursor_test( + let test = SemanticTokenTest::new( " def function1(): x = 42 @@ -1223,45 +1341,42 @@ def function1(): def function2(): y = \"hello\" z = True - return y + z + return y + z ", ); - let full_tokens = semantic_tokens(&test.db, test.cursor.file, None); + let full_tokens = test.highlight_file(); // Get the range that covers only the second function // Hardcoded offsets: function2 starts at position 42, source ends at position 108 - let range = ruff_text_size::TextRange::new( - ruff_text_size::TextSize::from(42u32), - ruff_text_size::TextSize::from(108u32), - ); + let range = TextRange::new(TextSize::from(42u32), TextSize::from(108u32)); - let range_tokens = semantic_tokens(&test.db, test.cursor.file, Some(range)); + let range_tokens = test.highlight_range(range); // Range-based tokens should have fewer tokens than full scan // (should exclude tokens from function1) assert!(range_tokens.len() < full_tokens.len()); // Test both full tokens and range tokens with snapshots - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &full_tokens), @r#" + assert_snapshot!(test.to_snapshot(&full_tokens), @r#" "function1" @ 5..14: Function [definition] - "x" @ 22..23: Variable + "x" @ 22..23: Variable [definition] "42" @ 26..28: Number "x" @ 40..41: Variable "function2" @ 47..56: Function [definition] - "y" @ 64..65: Variable - "/"hello/"" @ 68..75: String - "z" @ 80..81: Variable + "y" @ 64..65: Variable [definition] + "\"hello\"" @ 68..75: String + "z" @ 80..81: Variable [definition] "True" @ 84..88: BuiltinConstant "y" @ 100..101: Variable "z" @ 104..105: Variable "#); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &range_tokens), @r#" + assert_snapshot!(test.to_snapshot(&range_tokens), @r#" "function2" @ 47..56: Function [definition] - "y" @ 64..65: Variable - "/"hello/"" @ 68..75: String - "z" @ 80..81: Variable + "y" @ 64..65: Variable [definition] + "\"hello\"" @ 68..75: String + "z" @ 80..81: Variable [definition] "True" @ 84..88: BuiltinConstant "y" @ 100..101: Variable "z" @ 104..105: Variable @@ -1278,20 +1393,45 @@ def function2(): } } + /// When a token starts right at where the requested range ends, + /// don't include it in the semantic tokens. + #[test] + fn test_semantic_tokens_range_excludes_boundary_tokens() { + let test = SemanticTokenTest::new( + " +x = 1 +y = 2 +z = 3 +", + ); + + // Range [6..13) starts where "1" ends and ends where "z" starts. + // Expected: only "y" @ 7..8 and "2" @ 11..12 (non-empty overlap with target range). + // Not included: "1" @ 5..6 and "z" @ 13..14 (adjacent, but not overlapping at offsets 6 and 13). + let range = TextRange::new(TextSize::from(6), TextSize::from(13)); + + let range_tokens = test.highlight_range(range); + + assert_snapshot!(test.to_snapshot(&range_tokens), @r#" + "y" @ 7..8: Variable [definition] + "2" @ 11..12: Number + "#); + } + #[test] fn test_dotted_module_names() { - let test = cursor_test( + let test = SemanticTokenTest::new( " import os.path import sys.version_info from urllib.parse import urlparse -from collections.abc import Mapping +from collections.abc import Mapping ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "os" @ 8..10: Namespace "path" @ 11..15: Namespace "sys" @ 23..26: Namespace @@ -1307,7 +1447,7 @@ from collections.abc import Mapping #[test] fn test_module_type_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( " import os import sys @@ -1315,38 +1455,38 @@ from collections import defaultdict # os and sys should be classified as namespace/module types x = os -y = sys +y = sys ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "os" @ 8..10: Namespace "sys" @ 18..21: Namespace "collections" @ 27..38: Namespace "defaultdict" @ 46..57: Class - "x" @ 119..120: Namespace + "x" @ 119..120: Variable [definition] "os" @ 123..125: Namespace - "y" @ 126..127: Namespace + "y" @ 126..127: Variable [definition] "sys" @ 130..133: Namespace "#); } #[test] fn test_import_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( " from os import path from collections import defaultdict, OrderedDict, Counter from typing import List, Dict, Optional -from mymodule import CONSTANT, my_function, MyClass +from mymodule import CONSTANT, my_function, MyClass ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "os" @ 6..8: Namespace "path" @ 16..20: Namespace "collections" @ 26..37: Namespace @@ -1366,7 +1506,7 @@ from mymodule import CONSTANT, my_function, MyClass #[test] fn test_attribute_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( " import os import sys @@ -1375,10 +1515,10 @@ from typing import List class MyClass: CONSTANT = 42 - + def method(self): return \"hello\" - + @property def prop(self): return self.CONSTANT @@ -1391,13 +1531,13 @@ y = obj.method # method should be method (bound method) z = obj.CONSTANT # CONSTANT should be variable with readonly modifier w = obj.prop # prop should be property v = MyClass.method # method should be method (function) -u = List.__name__ # __name__ should be variable +u = List.__name__ # __name__ should be variable ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "os" @ 8..10: Namespace "sys" @ 18..21: Namespace "collections" @ 27..38: Namespace @@ -1405,34 +1545,34 @@ u = List.__name__ # __name__ should be variable "typing" @ 63..69: Namespace "List" @ 77..81: Variable "MyClass" @ 89..96: Class [definition] - "CONSTANT" @ 102..110: Variable [readonly] + "CONSTANT" @ 102..110: Variable [definition, readonly] "42" @ 113..115: Number "method" @ 125..131: Method [definition] "self" @ 132..136: SelfParameter - "/"hello/"" @ 154..161: String + "\"hello\"" @ 154..161: String "property" @ 168..176: Decorator "prop" @ 185..189: Method [definition] "self" @ 190..194: SelfParameter - "self" @ 212..216: TypeParameter + "self" @ 212..216: SelfParameter "CONSTANT" @ 217..225: Variable [readonly] - "obj" @ 227..230: Variable + "obj" @ 227..230: Variable [definition] "MyClass" @ 233..240: Class - "x" @ 278..279: Namespace + "x" @ 278..279: Variable [definition] "os" @ 282..284: Namespace "path" @ 285..289: Namespace - "y" @ 339..340: Method + "y" @ 339..340: Variable [definition] "obj" @ 343..346: Variable "method" @ 347..353: Method - "z" @ 405..406: Variable + "z" @ 405..406: Variable [definition] "obj" @ 409..412: Variable "CONSTANT" @ 413..421: Variable [readonly] - "w" @ 483..484: Variable + "w" @ 483..484: Variable [definition] "obj" @ 487..490: Variable "prop" @ 491..495: Variable - "v" @ 534..535: Function + "v" @ 534..535: Variable [definition] "MyClass" @ 538..545: Class "method" @ 546..552: Method - "u" @ 596..597: Variable + "u" @ 596..597: Variable [definition] "List" @ 600..604: Variable "__name__" @ 605..613: Variable "#); @@ -1440,30 +1580,30 @@ u = List.__name__ # __name__ should be variable #[test] fn test_attribute_fallback_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: some_attr = \"value\" - + obj = MyClass() # Test attribute that might not have detailed semantic info x = obj.some_attr # Should fall back to variable, not property -y = obj.unknown_attr # Should fall back to variable +y = obj.unknown_attr # Should fall back to variable ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "some_attr" @ 20..29: Variable - "/"value/"" @ 32..39: String - "obj" @ 41..44: Variable + "some_attr" @ 20..29: Variable [definition] + "\"value\"" @ 32..39: String + "obj" @ 41..44: Variable [definition] "MyClass" @ 47..54: Class - "x" @ 117..118: Variable + "x" @ 117..118: Variable [definition] "obj" @ 121..124: Variable "some_attr" @ 125..134: Variable - "y" @ 187..188: Variable + "y" @ 187..188: Variable [definition] "obj" @ 191..194: Variable "unknown_attr" @ 195..207: Variable "#); @@ -1471,54 +1611,54 @@ y = obj.unknown_attr # Should fall back to variable #[test] fn test_constant_name_detection() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: UPPER_CASE = 42 lower_case = 24 MixedCase = 12 A = 1 - + obj = MyClass() x = obj.UPPER_CASE # Should have readonly modifier -y = obj.lower_case # Should not have readonly modifier +y = obj.lower_case # Should not have readonly modifier z = obj.MixedCase # Should not have readonly modifier -w = obj.A # Should not have readonly modifier (length == 1) +w = obj.A # Should not have readonly modifier (length == 1) ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "UPPER_CASE" @ 20..30: Variable [readonly] + "UPPER_CASE" @ 20..30: Variable [definition, readonly] "42" @ 33..35: Number - "lower_case" @ 40..50: Variable + "lower_case" @ 40..50: Variable [definition] "24" @ 53..55: Number - "MixedCase" @ 60..69: Variable + "MixedCase" @ 60..69: Variable [definition] "12" @ 72..74: Number - "A" @ 79..80: Variable + "A" @ 79..80: Variable [definition] "1" @ 83..84: Number - "obj" @ 86..89: Variable + "obj" @ 86..89: Variable [definition] "MyClass" @ 92..99: Class - "x" @ 102..103: Variable + "x" @ 102..103: Variable [definition] "obj" @ 106..109: Variable "UPPER_CASE" @ 110..120: Variable [readonly] - "y" @ 156..157: Variable + "y" @ 156..157: Variable [definition] "obj" @ 160..163: Variable "lower_case" @ 164..174: Variable - "z" @ 216..217: Variable - "obj" @ 220..223: Variable - "MixedCase" @ 224..233: Variable - "w" @ 274..275: Variable - "obj" @ 278..281: Variable - "A" @ 282..283: Variable + "z" @ 214..215: Variable [definition] + "obj" @ 218..221: Variable + "MixedCase" @ 222..231: Variable + "w" @ 272..273: Variable [definition] + "obj" @ 276..279: Variable + "A" @ 280..281: Variable "#); } #[test] fn test_type_annotations() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" from typing import List, Optional @@ -1526,13 +1666,13 @@ def function_with_annotations(param1: int, param2: str) -> Optional[List[str]]: pass x: int = 42 -y: Optional[str] = None +y: Optional[str] = None "#, ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "typing" @ 6..12: Namespace "List" @ 20..24: Variable "Optional" @ 26..34: Variable @@ -1544,10 +1684,10 @@ y: Optional[str] = None "Optional" @ 95..103: Variable "List" @ 104..108: Variable "str" @ 109..112: Class - "x" @ 126..127: Variable + "x" @ 126..127: Variable [definition] "int" @ 129..132: Class "42" @ 135..137: Number - "y" @ 138..139: Variable + "y" @ 138..139: Variable [definition] "Optional" @ 141..149: Variable "str" @ 150..153: Class "None" @ 157..161: BuiltinConstant @@ -1556,37 +1696,37 @@ y: Optional[str] = None #[test] fn test_debug_int_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( " -x: int = 42 +x: int = 42 ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###" - "x" @ 1..2: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] "int" @ 4..7: Class "42" @ 10..12: Number - "###); + "#); } #[test] fn test_debug_user_defined_type_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( " class MyClass: pass -x: MyClass = MyClass() +x: MyClass = MyClass() ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "x" @ 26..27: Variable + "x" @ 26..27: Variable [definition] "MyClass" @ 29..36: Class "MyClass" @ 39..46: Class "#); @@ -1594,7 +1734,7 @@ x: MyClass = MyClass() #[test] fn test_type_annotation_vs_variable_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( " from typing import List, Optional @@ -1606,16 +1746,16 @@ def test_function(param: int, other: MyClass) -> Optional[List[str]]: x: int = 42 y: MyClass = MyClass() z: List[str] = [\"hello\"] - + # Type annotations should be Class tokens: # int, MyClass, Optional, List, str - return None + return None ", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "typing" @ 6..12: Namespace "List" @ 20..24: Variable "Optional" @ 26..34: Variable @@ -1628,23 +1768,23 @@ def test_function(param: int, other: MyClass) -> Optional[List[str]]: "Optional" @ 110..118: Variable "List" @ 119..123: Variable "str" @ 124..127: Class - "x" @ 190..191: Variable + "x" @ 190..191: Variable [definition] "int" @ 193..196: Class "42" @ 199..201: Number - "y" @ 206..207: Variable + "y" @ 206..207: Variable [definition] "MyClass" @ 209..216: Class "MyClass" @ 219..226: Class - "z" @ 233..234: Variable + "z" @ 233..234: Variable [definition] "List" @ 236..240: Variable "str" @ 241..244: Class - "/"hello/"" @ 249..256: String + "\"hello\"" @ 249..256: String "None" @ 357..361: BuiltinConstant "#); } #[test] fn test_protocol_types_in_annotations() { - let test = cursor_test( + let test = SemanticTokenTest::new( " from typing import Protocol @@ -1653,12 +1793,12 @@ class MyProtocol(Protocol): def test_function(param: MyProtocol) -> None: pass -", +", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "typing" @ 6..12: Namespace "Protocol" @ 20..28: Variable "MyProtocol" @ 36..46: Class [definition] @@ -1675,7 +1815,7 @@ def test_function(param: MyProtocol) -> None: #[test] fn test_protocol_type_annotation_vs_value_context() { - let test = cursor_test( + let test = SemanticTokenTest::new( " from typing import Protocol @@ -1685,15 +1825,15 @@ class MyProtocol(Protocol): # Value context - MyProtocol is still a class literal, so should be Class my_protocol_var = MyProtocol -# Type annotation context - should be Class +# Type annotation context - should be Class def test_function(param: MyProtocol) -> MyProtocol: return param -", +", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "typing" @ 6..12: Namespace "Protocol" @ 20..28: Variable "MyProtocol" @ 36..46: Class [definition] @@ -1701,19 +1841,67 @@ def test_function(param: MyProtocol) -> MyProtocol: "method" @ 66..72: Method [definition] "self" @ 73..77: SelfParameter "int" @ 82..85: Class - "my_protocol_var" @ 166..181: Class + "my_protocol_var" @ 166..181: Class [definition] "MyProtocol" @ 184..194: Class - "test_function" @ 246..259: Function [definition] - "param" @ 260..265: Parameter - "MyProtocol" @ 267..277: Class - "MyProtocol" @ 282..292: Class - "param" @ 305..310: Parameter + "test_function" @ 244..257: Function [definition] + "param" @ 258..263: Parameter + "MyProtocol" @ 265..275: Class + "MyProtocol" @ 280..290: Class + "param" @ 303..308: Parameter + "#); + } + + #[test] + fn type_alias_type_of() { + let test = SemanticTokenTest::new( + " +class Test[T]: ... + +my_type_alias = Test[str] # TODO: `my_type_alias` should be classified as a Class + +def test_function(param: my_type_alias): ... +", + ); + + let tokens = test.highlight_file(); + + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "Test" @ 7..11: Class [definition] + "T" @ 12..13: TypeParameter [definition] + "my_type_alias" @ 21..34: Class [definition] + "Test" @ 37..41: Class + "str" @ 42..45: Class + "test_function" @ 109..122: Function [definition] + "param" @ 123..128: Parameter + "my_type_alias" @ 130..143: Class + "#); + } + + #[test] + fn type_alias_to_generic_alias() { + let test = SemanticTokenTest::new( + " +my_type_alias = type[str] + +def test_function(param: my_type_alias): ... +", + ); + + let tokens = test.highlight_file(); + + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "my_type_alias" @ 1..14: Variable [definition] + "type" @ 17..21: Class + "str" @ 22..25: Class + "test_function" @ 32..45: Function [definition] + "param" @ 46..51: Parameter + "my_type_alias" @ 53..66: Variable "#); } #[test] fn test_type_parameters_pep695() { - let test = cursor_test( + let test = SemanticTokenTest::new( " # Test Python 3.12 PEP 695 type parameter syntax @@ -1721,7 +1909,7 @@ def test_function(param: MyProtocol) -> MyProtocol: def func[T](x: T) -> T: return x -# Generic function with TypeVarTuple +# Generic function with TypeVarTuple def func_tuple[*Ts](args: tuple[*Ts]) -> tuple[*Ts]: return args @@ -1736,10 +1924,10 @@ class Container[T, U]: def __init__(self, value1: T, value2: U): self.value1: T = value1 self.value2: U = value2 - + def get_first(self) -> T: return self.value1 - + def get_second(self) -> U: return self.value2 @@ -1747,118 +1935,118 @@ class Container[T, U]: class BoundedContainer[T: int, U = str]: def process(self, x: T, y: U) -> tuple[T, U]: return (x, y) -", +", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "func" @ 87..91: Function [definition] "T" @ 92..93: TypeParameter [definition] "x" @ 95..96: Parameter "T" @ 98..99: TypeParameter "T" @ 104..105: TypeParameter "x" @ 118..119: Parameter - "func_tuple" @ 164..174: Function [definition] - "Ts" @ 176..178: TypeParameter [definition] - "args" @ 180..184: Parameter - "tuple" @ 186..191: Class - "Ts" @ 193..195: Variable - "tuple" @ 201..206: Class - "Ts" @ 208..210: Variable - "args" @ 224..228: Parameter - "func_paramspec" @ 268..282: Function [definition] - "P" @ 285..286: TypeParameter [definition] - "func" @ 288..292: Parameter - "Callable" @ 294..302: Variable - "P" @ 303..304: Variable - "int" @ 306..309: Class - "Callable" @ 315..323: Variable - "P" @ 324..325: Variable - "str" @ 327..330: Class - "wrapper" @ 341..348: Function [definition] - "args" @ 350..354: Parameter - "P" @ 356..357: Variable - "args" @ 358..362: Variable - "kwargs" @ 366..372: Parameter - "P" @ 374..375: Variable - "kwargs" @ 376..382: Variable - "str" @ 387..390: Class - "str" @ 407..410: Class - "func" @ 411..415: Variable - "args" @ 417..421: Parameter - "kwargs" @ 425..431: Parameter - "wrapper" @ 445..452: Function - "Container" @ 506..515: Class [definition] - "T" @ 516..517: TypeParameter [definition] - "U" @ 519..520: TypeParameter [definition] - "__init__" @ 531..539: Method [definition] - "self" @ 540..544: SelfParameter - "value1" @ 546..552: Parameter - "T" @ 554..555: TypeParameter - "value2" @ 557..563: Parameter - "U" @ 565..566: TypeParameter - "self" @ 577..581: TypeParameter - "value1" @ 582..588: Variable - "T" @ 590..591: TypeParameter - "value1" @ 594..600: Parameter - "self" @ 609..613: TypeParameter - "value2" @ 614..620: Variable - "U" @ 622..623: TypeParameter - "value2" @ 626..632: Parameter - "get_first" @ 642..651: Method [definition] - "self" @ 652..656: SelfParameter - "T" @ 661..662: TypeParameter - "self" @ 679..683: TypeParameter - "value1" @ 684..690: Variable - "get_second" @ 700..710: Method [definition] - "self" @ 711..715: SelfParameter - "U" @ 720..721: TypeParameter - "self" @ 738..742: TypeParameter - "value2" @ 743..749: Variable - "BoundedContainer" @ 798..814: Class [definition] - "T" @ 815..816: TypeParameter [definition] - "int" @ 818..821: Class - "U" @ 823..824: TypeParameter [definition] - "str" @ 827..830: Class - "process" @ 841..848: Method [definition] - "self" @ 849..853: SelfParameter - "x" @ 855..856: Parameter - "T" @ 858..859: TypeParameter - "y" @ 861..862: Parameter - "U" @ 864..865: TypeParameter - "tuple" @ 870..875: Class - "T" @ 876..877: TypeParameter - "U" @ 879..880: TypeParameter - "x" @ 899..900: Parameter - "y" @ 902..903: Parameter + "func_tuple" @ 162..172: Function [definition] + "Ts" @ 174..176: TypeParameter [definition] + "args" @ 178..182: Parameter + "tuple" @ 184..189: Class + "Ts" @ 191..193: Variable + "tuple" @ 199..204: Class + "Ts" @ 206..208: Variable + "args" @ 222..226: Parameter + "func_paramspec" @ 266..280: Function [definition] + "P" @ 283..284: TypeParameter [definition] + "func" @ 286..290: Parameter + "Callable" @ 292..300: Variable + "P" @ 301..302: Variable + "int" @ 304..307: Class + "Callable" @ 313..321: Variable + "P" @ 322..323: Variable + "str" @ 325..328: Class + "wrapper" @ 339..346: Function [definition] + "args" @ 348..352: Parameter + "P" @ 354..355: Variable + "args" @ 356..360: Variable + "kwargs" @ 364..370: Parameter + "P" @ 372..373: Variable + "kwargs" @ 374..380: Variable + "str" @ 385..388: Class + "str" @ 405..408: Class + "func" @ 409..413: Parameter + "args" @ 415..419: Parameter + "kwargs" @ 423..429: Parameter + "wrapper" @ 443..450: Function + "Container" @ 504..513: Class [definition] + "T" @ 514..515: TypeParameter [definition] + "U" @ 517..518: TypeParameter [definition] + "__init__" @ 529..537: Method [definition] + "self" @ 538..542: SelfParameter + "value1" @ 544..550: Parameter + "T" @ 552..553: TypeParameter + "value2" @ 555..561: Parameter + "U" @ 563..564: TypeParameter + "self" @ 575..579: SelfParameter + "value1" @ 580..586: Variable + "T" @ 588..589: TypeParameter + "value1" @ 592..598: Parameter + "self" @ 607..611: SelfParameter + "value2" @ 612..618: Variable + "U" @ 620..621: TypeParameter + "value2" @ 624..630: Parameter + "get_first" @ 640..649: Method [definition] + "self" @ 650..654: SelfParameter + "T" @ 659..660: TypeParameter + "self" @ 677..681: SelfParameter + "value1" @ 682..688: Variable + "get_second" @ 698..708: Method [definition] + "self" @ 709..713: SelfParameter + "U" @ 718..719: TypeParameter + "self" @ 736..740: SelfParameter + "value2" @ 741..747: Variable + "BoundedContainer" @ 796..812: Class [definition] + "T" @ 813..814: TypeParameter [definition] + "int" @ 816..819: Class + "U" @ 821..822: TypeParameter [definition] + "str" @ 825..828: Class + "process" @ 839..846: Method [definition] + "self" @ 847..851: SelfParameter + "x" @ 853..854: Parameter + "T" @ 856..857: TypeParameter + "y" @ 859..860: Parameter + "U" @ 862..863: TypeParameter + "tuple" @ 868..873: Class + "T" @ 874..875: TypeParameter + "U" @ 877..878: TypeParameter + "x" @ 897..898: Parameter + "y" @ 900..901: Parameter "#); } #[test] fn test_type_parameters_usage_in_function_body() { - let test = cursor_test( + let test = SemanticTokenTest::new( " def generic_function[T](value: T) -> T: # Type parameter T should be recognized here too result: T = value temp = result # This could potentially be T as well return result -", +", ); - let tokens = semantic_tokens(&test.db, test.cursor.file, None); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "generic_function" @ 5..21: Function [definition] "T" @ 22..23: TypeParameter [definition] "value" @ 25..30: Parameter "T" @ 32..33: TypeParameter "T" @ 38..39: TypeParameter - "result" @ 98..104: Variable + "result" @ 98..104: Variable [definition] "T" @ 106..107: TypeParameter "value" @ 110..115: Parameter - "temp" @ 120..124: TypeParameter + "temp" @ 120..124: Variable [definition] "result" @ 127..133: Variable "result" @ 184..190: Variable "#); @@ -1866,7 +2054,7 @@ def generic_function[T](value: T) -> T: #[test] fn test_decorator_classification() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" @staticmethod @property @@ -1876,117 +2064,117 @@ def my_function(): @dataclass class MyClass: - pass + pass "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r###" "staticmethod" @ 2..14: Decorator "property" @ 16..24: Decorator "app" @ 26..29: Variable "route" @ 30..35: Variable - "/"/path/"" @ 36..43: String + "\"/path\"" @ 36..43: String "my_function" @ 49..60: Function [definition] "dataclass" @ 75..84: Decorator "MyClass" @ 91..98: Class [definition] - "#); + "###); } #[test] fn test_implicitly_concatenated_strings() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#"x = "hello" "world" -y = ("multi" - "line" +y = ("multi" + "line" "string") -z = 'single' "mixed" 'quotes'"#, +z = 'single' "mixed" 'quotes'"#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" - "x" @ 0..1: Variable - "/"hello/"" @ 4..11: String - "/"world/"" @ 12..19: String - "y" @ 20..21: Variable - "/"multi/"" @ 25..32: String - "/"line/"" @ 39..45: String - "/"string/"" @ 52..60: String - "z" @ 62..63: Variable - "'single'" @ 66..74: String - "/"mixed/"" @ 75..82: String - "'quotes'" @ 83..91: String + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 0..1: Variable [definition] + "\"hello\"" @ 4..11: String + "\"world\"" @ 12..19: String + "y" @ 20..21: Variable [definition] + "\"multi\"" @ 25..32: String + "\"line\"" @ 38..44: String + "\"string\"" @ 50..58: String + "z" @ 60..61: Variable [definition] + "'single'" @ 64..72: String + "\"mixed\"" @ 73..80: String + "'quotes'" @ 81..89: String "#); } #[test] fn test_bytes_literals() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#"x = b"hello" b"world" -y = (b"multi" - b"line" +y = (b"multi" + b"line" b"bytes") -z = b'single' b"mixed" b'quotes'"#, +z = b'single' b"mixed" b'quotes'"#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" - "x" @ 0..1: Variable - "b/"hello/"" @ 4..12: String - "b/"world/"" @ 13..21: String - "y" @ 22..23: Variable - "b/"multi/"" @ 27..35: String - "b/"line/"" @ 42..49: String - "b/"bytes/"" @ 56..64: String - "z" @ 66..67: Variable - "b'single'" @ 70..79: String - "b/"mixed/"" @ 80..88: String - "b'quotes'" @ 89..98: String + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 0..1: Variable [definition] + "b\"hello\"" @ 4..12: String + "b\"world\"" @ 13..21: String + "y" @ 22..23: Variable [definition] + "b\"multi\"" @ 27..35: String + "b\"line\"" @ 41..48: String + "b\"bytes\"" @ 54..62: String + "z" @ 64..65: Variable [definition] + "b'single'" @ 68..77: String + "b\"mixed\"" @ 78..86: String + "b'quotes'" @ 87..96: String "#); } #[test] fn test_mixed_string_and_bytes_literals() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#"# Test mixed string and bytes literals string_concat = "hello" "world" bytes_concat = b"hello" b"world" mixed_quotes_str = 'single' "double" 'single' mixed_quotes_bytes = b'single' b"double" b'single' regular_string = "just a string" -regular_bytes = b"just bytes""#, +regular_bytes = b"just bytes""#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" - "string_concat" @ 39..52: Variable - "/"hello/"" @ 55..62: String - "/"world/"" @ 63..70: String - "bytes_concat" @ 71..83: Variable - "b/"hello/"" @ 86..94: String - "b/"world/"" @ 95..103: String - "mixed_quotes_str" @ 104..120: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "string_concat" @ 39..52: Variable [definition] + "\"hello\"" @ 55..62: String + "\"world\"" @ 63..70: String + "bytes_concat" @ 71..83: Variable [definition] + "b\"hello\"" @ 86..94: String + "b\"world\"" @ 95..103: String + "mixed_quotes_str" @ 104..120: Variable [definition] "'single'" @ 123..131: String - "/"double/"" @ 132..140: String + "\"double\"" @ 132..140: String "'single'" @ 141..149: String - "mixed_quotes_bytes" @ 150..168: Variable + "mixed_quotes_bytes" @ 150..168: Variable [definition] "b'single'" @ 171..180: String - "b/"double/"" @ 181..190: String + "b\"double\"" @ 181..190: String "b'single'" @ 191..200: String - "regular_string" @ 201..215: Variable - "/"just a string/"" @ 218..233: String - "regular_bytes" @ 234..247: Variable - "b/"just bytes/"" @ 250..263: String + "regular_string" @ 201..215: Variable [definition] + "\"just a string\"" @ 218..233: String + "regular_bytes" @ 234..247: Variable [definition] + "b\"just bytes\"" @ 250..263: String "#); } #[test] fn test_fstring_with_mixed_literals() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" # Test f-strings with various literal types name = "Alice" @@ -2000,30 +2188,30 @@ result = f"Hello {name}! Value: {value}, Data: {data!r}" mixed = f"prefix" + b"suffix" # Complex f-string with nested expressions -complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}" +complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}" "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" - "name" @ 45..49: Variable - "/"Alice/"" @ 52..59: String - "data" @ 60..64: Variable - "b/"hello/"" @ 67..75: String - "value" @ 76..81: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "name" @ 45..49: Variable [definition] + "\"Alice\"" @ 52..59: String + "data" @ 60..64: Variable [definition] + "b\"hello\"" @ 67..75: String + "value" @ 76..81: Variable [definition] "42" @ 84..86: Number - "result" @ 153..159: Variable + "result" @ 153..159: Variable [definition] "Hello " @ 164..170: String "name" @ 171..175: Variable "! Value: " @ 176..185: String "value" @ 186..191: Variable ", Data: " @ 192..200: String "data" @ 201..205: Variable - "mixed" @ 266..271: Variable + "mixed" @ 266..271: Variable [definition] "prefix" @ 276..282: String - "b/"suffix/"" @ 286..295: String - "complex_fstring" @ 340..355: Variable + "b\"suffix\"" @ 286..295: String + "complex_fstring" @ 340..355: Variable [definition] "User: " @ 360..366: String "name" @ 367..371: Variable "upper" @ 372..377: Method @@ -2038,7 +2226,7 @@ complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}" + + return inner "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" - "x" @ 1..2: Variable - "/"global_value/"" @ 5..19: String - "y" @ 20..21: Variable - "/"another_global/"" @ 24..40: String + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] + "\"global_value\"" @ 5..19: String + "y" @ 20..21: Variable [definition] + "\"another_global\"" @ 24..40: String "outer" @ 46..51: Function [definition] - "x" @ 59..60: Variable - "/"outer_value/"" @ 63..76: String - "z" @ 81..82: Variable - "/"outer_local/"" @ 85..98: String + "x" @ 59..60: Variable [definition] + "\"outer_value\"" @ 63..76: String + "z" @ 81..82: Variable [definition] + "\"outer_local\"" @ 85..98: String "inner" @ 108..113: Function [definition] "x" @ 134..135: Variable "z" @ 137..138: Variable "y" @ 189..190: Variable - "x" @ 239..240: Variable - "/"modified/"" @ 243..253: String - "y" @ 262..263: Variable - "/"modified_global/"" @ 266..283: String - "z" @ 292..293: Variable - "/"modified_local/"" @ 296..312: String + "x" @ 239..240: Variable [definition] + "\"modified\"" @ 243..253: String + "y" @ 262..263: Variable [definition] + "\"modified_global\"" @ 266..283: String + "z" @ 292..293: Variable [definition] + "\"modified_local\"" @ 296..312: String "deeper" @ 326..332: Function [definition] "x" @ 357..358: Variable "y" @ 398..399: Variable @@ -2100,24 +2288,24 @@ def outer(): #[test] fn test_nonlocal_global_edge_cases() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" # Single variable statements def test(): global x nonlocal y - + # Multiple variables in one statement global a, b, c nonlocal d, e, f - - return x + y + a + b + c + d + e + f + + return x + y + a + b + c + d + e + f "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "test" @ 34..38: Function [definition] "x" @ 53..54: Variable "y" @ 68..69: Variable @@ -2140,7 +2328,7 @@ def test(): #[test] fn test_pattern_matching() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" def process_data(data): match data: @@ -2152,19 +2340,19 @@ def process_data(data): return sequence case value as fallback: print(f"Fallback: {fallback}") - return fallback + return fallback "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "process_data" @ 5..17: Function [definition] "data" @ 18..22: Parameter - "data" @ 35..39: Variable - "/"name/"" @ 55..61: String + "data" @ 35..39: Parameter + "\"name\"" @ 55..61: String "name" @ 63..67: Variable - "/"age/"" @ 69..74: String + "\"age\"" @ 69..74: String "age" @ 76..79: Variable "rest" @ 83..87: Variable "person" @ 92..98: Variable @@ -2195,7 +2383,7 @@ def process_data(data): #[test] fn test_exception_handlers() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" try: x = 1 / 0 @@ -2206,14 +2394,14 @@ except (TypeError, RuntimeError) as re: except Exception as e: print(e) finally: - pass + pass "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" - "x" @ 10..11: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 10..11: Variable [definition] "1" @ 14..15: Number "0" @ 18..19: Number "ValueError" @ 27..37: Class @@ -2234,7 +2422,7 @@ finally: #[test] fn test_self_attribute_expression() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" from typing import Self @@ -2244,32 +2432,30 @@ class C: self.annotated: int = 1 self.non_annotated = 1 self.x.test() - self.x() - - + self.x() "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "typing" @ 6..12: Namespace "Self" @ 20..24: Variable "C" @ 33..34: Class [definition] "__init__" @ 44..52: Method [definition] "self" @ 53..57: SelfParameter - "Self" @ 59..63: TypeParameter - "self" @ 74..78: Parameter + "Self" @ 59..63: Variable + "self" @ 74..78: SelfParameter "annotated" @ 79..88: Variable "int" @ 90..93: Class "1" @ 96..97: Number - "self" @ 106..110: Parameter + "self" @ 106..110: SelfParameter "non_annotated" @ 111..124: Variable "1" @ 127..128: Number - "self" @ 137..141: Parameter + "self" @ 137..141: SelfParameter "x" @ 142..143: Variable "test" @ 144..148: Variable - "self" @ 159..163: Parameter + "self" @ 159..163: SelfParameter "x" @ 164..165: Variable "#); } @@ -2277,16 +2463,16 @@ class C: /// Regression test for #[test] fn test_invalid_kwargs() { - let test = cursor_test( + let test = SemanticTokenTest::new( r#" -def foo(self, **key, value=10): +def foo(self, **key, value=10): return "#, ); - let tokens = semantic_tokens_full_file(&test.db, test.cursor.file); + let tokens = test.highlight_file(); - assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "foo" @ 5..8: Function [definition] "self" @ 9..13: Parameter "key" @ 17..20: Parameter @@ -2294,4 +2480,77 @@ def foo(self, **key, value=10): "10" @ 28..30: Number "#); } + + pub(super) struct SemanticTokenTest { + pub(super) db: ty_project::TestDb, + file: File, + } + + impl SemanticTokenTest { + fn new(source: &str) -> Self { + let mut db = ty_project::TestDb::new(ProjectMetadata::new( + "test".into(), + SystemPathBuf::from("/"), + )); + + db.init_program().unwrap(); + + let path = SystemPath::new("src/main.py"); + db.write_file(path, ruff_python_trivia::textwrap::dedent(source)) + .expect("Write to memory file system to always succeed"); + + let file = system_path_to_file(&db, path).expect("newly written file to existing"); + + Self { db, file } + } + + /// Get semantic tokens for the entire file + fn highlight_file(&self) -> SemanticTokens { + semantic_tokens(&self.db, self.file, None) + } + + /// Get semantic tokens for a specific range in the file + fn highlight_range(&self, range: TextRange) -> SemanticTokens { + semantic_tokens(&self.db, self.file, Some(range)) + } + + /// Helper function to convert semantic tokens to a snapshot-friendly text format + fn to_snapshot(&self, tokens: &SemanticTokens) -> String { + use std::fmt::Write; + let source = ruff_db::source::source_text(&self.db, self.file); + let mut result = String::new(); + + for token in tokens.iter() { + let token_text = &source[token.range()]; + let modifiers_text = if token.modifiers.is_empty() { + String::new() + } else { + let mut mods = Vec::new(); + if token.modifiers.contains(SemanticTokenModifier::DEFINITION) { + mods.push("definition"); + } + if token.modifiers.contains(SemanticTokenModifier::READONLY) { + mods.push("readonly"); + } + if token.modifiers.contains(SemanticTokenModifier::ASYNC) { + mods.push("async"); + } + format!(" [{}]", mods.join(", ")) + }; + + writeln!( + result, + "{:?} @ {}..{}: {:?}{}", + token_text, + u32::from(token.start()), + u32::from(token.end()), + token.token_type, + modifiers_text + ) + .unwrap(); + } + + result + } + } } diff --git a/crates/ty_project/src/db.rs b/crates/ty_project/src/db.rs index 5e2105839e..8f6ec20c95 100644 --- a/crates/ty_project/src/db.rs +++ b/crates/ty_project/src/db.rs @@ -516,11 +516,13 @@ pub(crate) mod tests { use std::sync::{Arc, Mutex}; use ruff_db::Db as SourceDb; - use ruff_db::files::Files; + use ruff_db::files::{FileRootKind, Files}; use ruff_db::system::{DbWithTestSystem, System, TestSystem}; use ruff_db::vendored::VendoredFileSystem; - use ty_python_semantic::Program; use ty_python_semantic::lint::{LintRegistry, RuleSelection}; + use ty_python_semantic::{ + Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings, + }; use crate::db::Db; use crate::{Project, ProjectMetadata}; @@ -560,6 +562,27 @@ pub(crate) mod tests { db.project = Some(project); db } + + pub fn init_program(&mut self) -> anyhow::Result<()> { + let root = self.project().root(self); + + let search_paths = SearchPathSettings::new(vec![root.to_path_buf()]) + .to_search_paths(self.system(), self.vendored()) + .expect("Valid search path settings"); + + Program::from_settings( + self, + ProgramSettings { + python_version: PythonVersionWithSource::default(), + python_platform: PythonPlatform::default(), + search_paths, + }, + ); + + self.files().try_add_root(self, root, FileRootKind::Project); + + Ok(()) + } } impl TestDb { diff --git a/crates/ty_project/src/lib.rs b/crates/ty_project/src/lib.rs index d47476c7dc..fe034b0a07 100644 --- a/crates/ty_project/src/lib.rs +++ b/crates/ty_project/src/lib.rs @@ -124,12 +124,12 @@ pub trait ProgressReporter: Send + Sync { fn set_files(&mut self, files: usize); /// Report the completion of checking a given file along with its diagnostics. - fn report_checked_file(&self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]); + fn report_checked_file(&self, db: &ProjectDatabase, file: File, diagnostics: &[Diagnostic]); /// Reports settings or IO related diagnostics. The diagnostics /// can belong to different files or no file at all. /// But it's never a file for which [`Self::report_checked_file`] gets called. - fn report_diagnostics(&mut self, db: &dyn Db, diagnostics: Vec); + fn report_diagnostics(&mut self, db: &ProjectDatabase, diagnostics: Vec); } /// Reporter that collects all diagnostics into a `Vec`. @@ -149,7 +149,7 @@ impl CollectReporter { impl ProgressReporter for CollectReporter { fn set_files(&mut self, _files: usize) {} - fn report_checked_file(&self, _db: &dyn Db, _file: File, diagnostics: &[Diagnostic]) { + fn report_checked_file(&self, _db: &ProjectDatabase, _file: File, diagnostics: &[Diagnostic]) { if diagnostics.is_empty() { return; } @@ -160,7 +160,7 @@ impl ProgressReporter for CollectReporter { .extend(diagnostics.iter().map(Clone::clone)); } - fn report_diagnostics(&mut self, _db: &dyn Db, diagnostics: Vec) { + fn report_diagnostics(&mut self, _db: &ProjectDatabase, diagnostics: Vec) { self.0.get_mut().unwrap().extend(diagnostics); } } @@ -751,34 +751,20 @@ mod tests { use crate::ProjectMetadata; use crate::check_file_impl; use crate::db::tests::TestDb; - use ruff_db::Db as _; use ruff_db::files::system_path_to_file; use ruff_db::source::source_text; use ruff_db::system::{DbWithTestSystem, DbWithWritableSystem as _, SystemPath, SystemPathBuf}; use ruff_db::testing::assert_function_query_was_not_run; use ruff_python_ast::name::Name; use ty_python_semantic::types::check_types; - use ty_python_semantic::{ - Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings, - }; #[test] fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> { let project = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("/")); let mut db = TestDb::new(project); + db.init_program().unwrap(); let path = SystemPath::new("test.py"); - Program::from_settings( - &db, - ProgramSettings { - python_version: PythonVersionWithSource::default(), - python_platform: PythonPlatform::default(), - search_paths: SearchPathSettings::new(vec![SystemPathBuf::from(".")]) - .to_search_paths(db.system(), db.vendored()) - .expect("Valid search path settings"), - }, - ); - db.write_file(path, "x = 10")?; let file = system_path_to_file(&db, path).unwrap(); diff --git a/crates/ty_project/src/metadata/options.rs b/crates/ty_project/src/metadata/options.rs index bcd821c53d..8fb75ab201 100644 --- a/crates/ty_project/src/metadata/options.rs +++ b/crates/ty_project/src/metadata/options.rs @@ -131,9 +131,7 @@ impl Options { ValueSource::File(path) => PythonVersionSource::ConfigFile( PythonVersionFileSource::new(path.clone(), ranged_version.range()), ), - ValueSource::PythonVSCodeExtension => { - PythonVersionSource::PythonVSCodeExtension - } + ValueSource::Editor => PythonVersionSource::Editor, }, }); @@ -153,7 +151,7 @@ impl Options { ValueSource::File(path) => { SysPrefixPathOrigin::ConfigFileSetting(path.clone(), python_path.range()) } - ValueSource::PythonVSCodeExtension => SysPrefixPathOrigin::PythonVSCodeExtension, + ValueSource::Editor => SysPrefixPathOrigin::Editor, }; Some(PythonEnvironment::new( @@ -166,14 +164,24 @@ impl Options { .context("Failed to discover local Python environment")? }; - let site_packages_paths = if let Some(python_environment) = python_environment.as_ref() { + let self_site_packages = self_environment_search_paths( python_environment - .site_packages_paths(system) - .context("Failed to discover the site-packages directory")? + .as_ref() + .map(ty_python_semantic::PythonEnvironment::origin) + .cloned(), + system, + ) + .unwrap_or_default(); + + let site_packages_paths = if let Some(python_environment) = python_environment.as_ref() { + self_site_packages.concatenate( + python_environment + .site_packages_paths(system) + .context("Failed to discover the site-packages directory")?, + ) } else { tracing::debug!("No virtual environment found"); - - SitePackagesPaths::default() + self_site_packages }; let real_stdlib_path = python_environment.as_ref().and_then(|python_environment| { @@ -463,6 +471,42 @@ impl Options { } } +/// Return the site-packages from the environment ty is installed in, as derived from ty's +/// executable. +/// +/// If there's an existing environment with an origin that does not allow including site-packages +/// from ty's environment, discovery of ty's environment is skipped and [`None`] is returned. +/// +/// Since ty may be executed from an arbitrary non-Python location, errors during discovery of ty's +/// environment are not raised, instead [`None`] is returned. +fn self_environment_search_paths( + existing_origin: Option, + system: &dyn System, +) -> Option { + if existing_origin.is_some_and(|origin| !origin.allows_concatenation_with_self_environment()) { + return None; + } + + let Ok(exe_path) = std::env::current_exe() else { + return None; + }; + let ty_path = SystemPath::from_std_path(exe_path.as_path())?; + + let environment = PythonEnvironment::new(ty_path, SysPrefixPathOrigin::SelfEnvironment, system) + .inspect_err(|err| tracing::debug!("Failed to discover ty's environment: {err}")) + .ok()?; + + let search_paths = environment + .site_packages_paths(system) + .inspect_err(|err| { + tracing::debug!("Failed to discover site-packages in ty's environment: {err}"); + }) + .ok(); + + tracing::debug!("Using site-packages from ty's environment"); + search_paths +} + #[derive( Debug, Default, @@ -819,8 +863,8 @@ impl Rules { ValueSource::File(_) => LintSource::File, ValueSource::Cli => LintSource::Cli, - ValueSource::PythonVSCodeExtension => { - unreachable!("Can't configure rules from the Python VSCode extension") + ValueSource::Editor => { + unreachable!("Can't configure rules from the user's editor") } }; if let Ok(severity) = Severity::try_from(**level) { @@ -957,7 +1001,7 @@ fn build_include_filter( SubDiagnosticSeverity::Info, "The pattern was specified on the CLI", )), - ValueSource::PythonVSCodeExtension => unreachable!("Can't configure includes from the Python VSCode extension"), + ValueSource::Editor => unreachable!("Can't configure includes from the user's editor"), } })?; } @@ -1040,8 +1084,8 @@ fn build_exclude_filter( SubDiagnosticSeverity::Info, "The pattern was specified on the CLI", )), - ValueSource::PythonVSCodeExtension => unreachable!( - "Can't configure excludes from the Python VSCode extension" + ValueSource::Editor => unreachable!( + "Can't configure excludes from the user's editor" ) } })?; @@ -1142,6 +1186,16 @@ impl From for DiagnosticFormat { } } +impl Combine for OutputFormat { + #[inline(always)] + fn combine_with(&mut self, _other: Self) {} + + #[inline] + fn combine(self, _other: Self) -> Self { + self + } +} + #[derive( Debug, Default, diff --git a/crates/ty_project/src/metadata/value.rs b/crates/ty_project/src/metadata/value.rs index f1f08d718a..c69b5df51e 100644 --- a/crates/ty_project/src/metadata/value.rs +++ b/crates/ty_project/src/metadata/value.rs @@ -28,8 +28,11 @@ pub enum ValueSource { /// long argument (`--extra-paths`) or `--config key=value`. Cli, - /// The value comes from an LSP client configuration. - PythonVSCodeExtension, + /// The value comes from the user's editor, + /// while it's left open if specified as a setting + /// or if the value was auto-discovered by the editor + /// (e.g., the Python environment) + Editor, } impl ValueSource { @@ -37,7 +40,7 @@ impl ValueSource { match self { ValueSource::File(path) => Some(&**path), ValueSource::Cli => None, - ValueSource::PythonVSCodeExtension => None, + ValueSource::Editor => None, } } @@ -137,11 +140,7 @@ impl RangedValue { } pub fn python_extension(value: T) -> Self { - Self::with_range( - value, - ValueSource::PythonVSCodeExtension, - TextRange::default(), - ) + Self::with_range(value, ValueSource::Editor, TextRange::default()) } pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self { @@ -180,14 +179,13 @@ impl RangedValue { } } -impl Combine for RangedValue { - fn combine(self, _other: Self) -> Self - where - Self: Sized, - { - self +impl Combine for RangedValue +where + T: Combine, +{ + fn combine_with(&mut self, other: Self) { + self.value.combine_with(other.value); } - fn combine_with(&mut self, _other: Self) {} } impl IntoIterator for RangedValue @@ -368,7 +366,7 @@ impl RelativePathBuf { } pub fn python_extension(path: impl AsRef) -> Self { - Self::new(path, ValueSource::PythonVSCodeExtension) + Self::new(path, ValueSource::Editor) } /// Returns the relative path as specified by the user. @@ -398,7 +396,7 @@ impl RelativePathBuf { pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf { let relative_to = match &self.0.source { ValueSource::File(_) => project_root, - ValueSource::Cli | ValueSource::PythonVSCodeExtension => system.current_directory(), + ValueSource::Cli | ValueSource::Editor => system.current_directory(), }; SystemPath::absolute(&self.0, relative_to) @@ -454,7 +452,7 @@ impl RelativeGlobPattern { ) -> Result { let relative_to = match &self.0.source { ValueSource::File(_) => project_root, - ValueSource::Cli | ValueSource::PythonVSCodeExtension => system.current_directory(), + ValueSource::Cli | ValueSource::Editor => system.current_directory(), }; let pattern = PortableGlobPattern::parse(&self.0, kind)?; diff --git a/crates/ty_python_semantic/resources/corpus/88_regression_pr_20962.py b/crates/ty_python_semantic/resources/corpus/88_regression_pr_20962.py new file mode 100644 index 0000000000..d0b9f706ce --- /dev/null +++ b/crates/ty_python_semantic/resources/corpus/88_regression_pr_20962.py @@ -0,0 +1,18 @@ +name_1 +{0: 0 for unique_name_0 in unique_name_1 if name_1} + + +@[name_2 for unique_name_2 in name_2] +def name_2(): + pass + + +def name_2(): + pass + + +match 0: + case name_2(): + pass + case []: + name_1 = 0 diff --git a/crates/ty_python_semantic/resources/corpus/cyclic_comprehensions.py b/crates/ty_python_semantic/resources/corpus/cyclic_comprehensions.py new file mode 100644 index 0000000000..28ba9d9091 --- /dev/null +++ b/crates/ty_python_semantic/resources/corpus/cyclic_comprehensions.py @@ -0,0 +1,10 @@ +# Regression test for https://github.com/astral-sh/ruff/pull/20962 +# error message: +# `infer_definition_types(Id(1804)): execute: too many cycle iterations` + +for name_1 in { + {{0: name_4 for unique_name_0 in unique_name_1}: 0 for unique_name_2 in unique_name_3 if name_4}: 0 + for unique_name_4 in name_1 + for name_4 in name_1 +}: + pass diff --git a/crates/ty_python_semantic/resources/corpus/cyclic_symbol_in_comprehension.py b/crates/ty_python_semantic/resources/corpus/cyclic_symbol_in_comprehension.py new file mode 100644 index 0000000000..b7ba910e1e --- /dev/null +++ b/crates/ty_python_semantic/resources/corpus/cyclic_symbol_in_comprehension.py @@ -0,0 +1,30 @@ +# Regression test for https://github.com/astral-sh/ruff/pull/20962 +# error message: +# `place_by_id: execute: too many cycle iterations` + +name_5(name_3) +[0 for unique_name_0 in unique_name_1 for unique_name_2 in name_3] + +@{name_3 for unique_name_3 in unique_name_4} +class name_4[**name_3](0, name_2=name_5): + pass + +try: + name_0 = name_4 +except* 0: + pass +else: + match unique_name_12: + case 0: + from name_2 import name_3 + case name_0(): + + @name_4 + def name_3(): + pass + +(name_3 := 0) + +@name_3 +async def name_5(): + pass diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md b/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md index 5089804119..7c0ba50f1a 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md @@ -76,8 +76,7 @@ from ty_extensions import reveal_mro class C(Annotated[int, "foo"]): ... -# TODO: Should be `(, , )` -reveal_mro(C) # revealed: (, @Todo(Inference of subscript on special form), ) +reveal_mro(C) # revealed: (, , ) ``` ### Not parameterized diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/any.md b/crates/ty_python_semantic/resources/mdtest/annotations/any.md index d6baf8cbf9..c2cc2d2461 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/any.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/any.md @@ -118,7 +118,7 @@ def takes_other_protocol(f: OtherProtocol): ... takes_other_protocol(SubclassOfAny()) ``` -A subclass of `Any` cannot be assigned to literal types, since those can not be subclassed: +A subclass of `Any` cannot be assigned to literal types, since those cannot be subclassed: ```py from typing import Any, Literal diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/callable.md b/crates/ty_python_semantic/resources/mdtest/annotations/callable.md index 780b2a87db..e7e55f7a44 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/callable.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/callable.md @@ -307,8 +307,9 @@ Using a `ParamSpec` in a `Callable` annotation: from typing_extensions import Callable def _[**P1](c: Callable[P1, int]): - reveal_type(P1.args) # revealed: @Todo(ParamSpec) - reveal_type(P1.kwargs) # revealed: @Todo(ParamSpec) + # TODO: Should reveal `ParamSpecArgs` and `ParamSpecKwargs` + reveal_type(P1.args) # revealed: @Todo(ParamSpecArgs / ParamSpecKwargs) + reveal_type(P1.kwargs) # revealed: @Todo(ParamSpecArgs / ParamSpecKwargs) # TODO: Signature should be (**P1) -> int reveal_type(c) # revealed: (...) -> int diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md b/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md index 8db8d90409..89b9324ea4 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md @@ -87,9 +87,23 @@ class Foo: class Baz[T: Foo]: pass + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Qux(Foo, Bar, Baz): + pass + + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Quux[_T](Foo, Bar, Baz): + pass + # error: [unresolved-reference] type S = a type T = b + type U = Foo + # error: [unresolved-reference] + type V = Bar + type W = Baz def h[T: Bar](): # error: [unresolved-reference] @@ -141,9 +155,23 @@ class Foo: class Baz[T: Foo]: pass + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Qux(Foo, Bar, Baz): + pass + + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Quux[_T](Foo, Bar, Baz): + pass + # error: [unresolved-reference] type S = a type T = b + type U = Foo + # error: [unresolved-reference] + type V = Bar + type W = Baz def h[T: Bar](): # error: [unresolved-reference] diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/literal.md b/crates/ty_python_semantic/resources/mdtest/annotations/literal.md index 3bd9e54c85..0c6a443afa 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/literal.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/literal.md @@ -39,6 +39,8 @@ def f(): reveal_type(a7) # revealed: None reveal_type(a8) # revealed: Literal[1] reveal_type(b1) # revealed: Literal[Color.RED] + # TODO should be `Literal[MissingT.MISSING]` + reveal_type(b2) # revealed: @Todo(functional `Enum` syntax) # error: [invalid-type-form] invalid1: Literal[3 + 4] @@ -66,6 +68,188 @@ a_list: list[int] = [1, 2, 3] invalid6: Literal[a_list[0]] ``` +## Parameterizing with a type alias + +`typing.Literal` can also be parameterized with a type alias for any literal type or union of +literal types. + +### PEP 695 type alias + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Literal +from enum import Enum + +import mod + +class E(Enum): + A = 1 + B = 2 + +type SingleInt = Literal[1] +type SingleStr = Literal["foo"] +type SingleBytes = Literal[b"bar"] +type SingleBool = Literal[True] +type SingleNone = Literal[None] +type SingleEnum = Literal[E.A] +type UnionLiterals = Literal[1, "foo", b"bar", True, None, E.A] +# We support this because it is an equivalent type to the following union of literals, but maybe +# we should not, because it doesn't use `Literal` form? Other type checkers do not. +type AnEnum1 = E +type AnEnum2 = Literal[E.A, E.B] +# Similarly, we support this because it is equivalent to `Literal[True, False]`. +type Bool1 = bool +type Bool2 = Literal[True, False] + +def _( + single_int: Literal[SingleInt], + single_str: Literal[SingleStr], + single_bytes: Literal[SingleBytes], + single_bool: Literal[SingleBool], + single_none: Literal[SingleNone], + single_enum: Literal[SingleEnum], + union_literals: Literal[UnionLiterals], + an_enum1: Literal[AnEnum1], + an_enum2: Literal[AnEnum2], + bool1: Literal[Bool1], + bool2: Literal[Bool2], + multiple: Literal[SingleInt, SingleStr, SingleEnum], + single_int_other_module: Literal[mod.SingleInt], +): + reveal_type(single_int) # revealed: Literal[1] + reveal_type(single_str) # revealed: Literal["foo"] + reveal_type(single_bytes) # revealed: Literal[b"bar"] + reveal_type(single_bool) # revealed: Literal[True] + reveal_type(single_none) # revealed: None + reveal_type(single_enum) # revealed: Literal[E.A] + reveal_type(union_literals) # revealed: Literal[1, "foo", b"bar", True, E.A] | None + reveal_type(an_enum1) # revealed: E + reveal_type(an_enum2) # revealed: E + reveal_type(bool1) # revealed: bool + reveal_type(bool2) # revealed: bool + reveal_type(multiple) # revealed: Literal[1, "foo", E.A] + reveal_type(single_int_other_module) # revealed: Literal[2] +``` + +`mod.py`: + +```py +from typing import Literal + +type SingleInt = Literal[2] +``` + +### PEP 613 type alias + +```py +from typing import Literal, TypeAlias +from enum import Enum + +class E(Enum): + A = 1 + B = 2 + +SingleInt: TypeAlias = Literal[1] +SingleStr: TypeAlias = Literal["foo"] +SingleBytes: TypeAlias = Literal[b"bar"] +SingleBool: TypeAlias = Literal[True] +SingleNone: TypeAlias = Literal[None] +SingleEnum: TypeAlias = Literal[E.A] +UnionLiterals: TypeAlias = Literal[1, "foo", b"bar", True, None, E.A] +AnEnum1: TypeAlias = E +AnEnum2: TypeAlias = Literal[E.A, E.B] +Bool1: TypeAlias = bool +Bool2: TypeAlias = Literal[True, False] + +def _( + single_int: Literal[SingleInt], + single_str: Literal[SingleStr], + single_bytes: Literal[SingleBytes], + single_bool: Literal[SingleBool], + single_none: Literal[SingleNone], + single_enum: Literal[SingleEnum], + union_literals: Literal[UnionLiterals], + # Could also not error + an_enum1: Literal[AnEnum1], # error: [invalid-type-form] + an_enum2: Literal[AnEnum2], + # Could also not error + bool1: Literal[Bool1], # error: [invalid-type-form] + bool2: Literal[Bool2], + multiple: Literal[SingleInt, SingleStr, SingleEnum], +): + reveal_type(single_int) # revealed: Literal[1] + reveal_type(single_str) # revealed: Literal["foo"] + reveal_type(single_bytes) # revealed: Literal[b"bar"] + reveal_type(single_bool) # revealed: Literal[True] + reveal_type(single_none) # revealed: None + reveal_type(single_enum) # revealed: Literal[E.A] + reveal_type(union_literals) # revealed: Literal[1, "foo", b"bar", True, E.A] | None + # Could also be `E` + reveal_type(an_enum1) # revealed: Unknown + reveal_type(an_enum2) # revealed: E + # Could also be `bool` + reveal_type(bool1) # revealed: Unknown + reveal_type(bool2) # revealed: bool + reveal_type(multiple) # revealed: Literal[1, "foo", E.A] +``` + +### Implicit type alias + +```py +from typing import Literal +from enum import Enum + +class E(Enum): + A = 1 + B = 2 + +SingleInt = Literal[1] +SingleStr = Literal["foo"] +SingleBytes = Literal[b"bar"] +SingleBool = Literal[True] +SingleNone = Literal[None] +SingleEnum = Literal[E.A] +UnionLiterals = Literal[1, "foo", b"bar", True, None, E.A] +# For implicit type aliases, we may not want to support this. It's simpler not to, and no other +# type checker does. +AnEnum1 = E +AnEnum2 = Literal[E.A, E.B] +# For implicit type aliases, we may not want to support this. +Bool1 = bool +Bool2 = Literal[True, False] + +def _( + single_int: Literal[SingleInt], + single_str: Literal[SingleStr], + single_bytes: Literal[SingleBytes], + single_bool: Literal[SingleBool], + single_none: Literal[SingleNone], + single_enum: Literal[SingleEnum], + union_literals: Literal[UnionLiterals], + an_enum1: Literal[AnEnum1], # error: [invalid-type-form] + an_enum2: Literal[AnEnum2], + bool1: Literal[Bool1], # error: [invalid-type-form] + bool2: Literal[Bool2], + multiple: Literal[SingleInt, SingleStr, SingleEnum], +): + reveal_type(single_int) # revealed: Literal[1] + reveal_type(single_str) # revealed: Literal["foo"] + reveal_type(single_bytes) # revealed: Literal[b"bar"] + reveal_type(single_bool) # revealed: Literal[True] + reveal_type(single_none) # revealed: None + reveal_type(single_enum) # revealed: Literal[E.A] + reveal_type(union_literals) # revealed: Literal[1, "foo", b"bar", True, E.A] | None + reveal_type(an_enum1) # revealed: Unknown + reveal_type(an_enum2) # revealed: E + reveal_type(bool1) # revealed: Unknown + reveal_type(bool2) # revealed: bool + reveal_type(multiple) # revealed: Literal[1, "foo", E.A] +``` + ## Shortening unions of literals When a Literal is parameterized with more than one value, it’s treated as exactly to equivalent to diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md b/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md index 5dc14964cc..7a6e47ed32 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md @@ -1,7 +1,5 @@ # NewType -Currently, ty doesn't support `typing.NewType` in type annotations. - ## Valid forms ```py @@ -12,13 +10,389 @@ X = GenericAlias(type, ()) A = NewType("A", int) # TODO: typeshed for `typing.GenericAlias` uses `type` for the first argument. `NewType` should be special-cased # to be compatible with `type` -# error: [invalid-argument-type] "Argument to function `__new__` is incorrect: Expected `type`, found `NewType`" +# error: [invalid-argument-type] "Argument to function `__new__` is incorrect: Expected `type`, found ``" B = GenericAlias(A, ()) def _( a: A, b: B, ): - reveal_type(a) # revealed: @Todo(Support for `typing.NewType` instances in type expressions) + reveal_type(a) # revealed: A reveal_type(b) # revealed: @Todo(Support for `typing.GenericAlias` instances in type expressions) ``` + +## Subtyping + +The basic purpose of `NewType` is that it acts like a subtype of its base, but not the exact same +type (i.e. not an alias). + +```py +from typing_extensions import NewType +from ty_extensions import static_assert, is_subtype_of, is_equivalent_to + +Foo = NewType("Foo", int) +Bar = NewType("Bar", Foo) + +static_assert(is_subtype_of(Foo, int)) +static_assert(not is_equivalent_to(Foo, int)) + +static_assert(is_subtype_of(Bar, Foo)) +static_assert(is_subtype_of(Bar, int)) +static_assert(not is_equivalent_to(Bar, Foo)) + +Foo(42) +Foo(Foo(42)) # allowed: `Foo` is a subtype of `int`. +Foo(Bar(Foo(42))) # allowed: `Bar` is a subtype of `int`. +Foo(True) # allowed: `bool` is a subtype of `int`. +Foo("forty-two") # error: [invalid-argument-type] "Argument is incorrect: Expected `int`, found `Literal["forty-two"]`" + +def f(_: int): ... +def g(_: Foo): ... +def h(_: Bar): ... + +f(42) +f(Foo(42)) +f(Bar(Foo(42))) + +g(42) # error: [invalid-argument-type] "Argument to function `g` is incorrect: Expected `Foo`, found `Literal[42]`" +g(Foo(42)) +g(Bar(Foo(42))) + +h(42) # error: [invalid-argument-type] "Argument to function `h` is incorrect: Expected `Bar`, found `Literal[42]`" +h(Foo(42)) # error: [invalid-argument-type] "Argument to function `h` is incorrect: Expected `Bar`, found `Foo`" +h(Bar(Foo(42))) +``` + +## Member and method lookup work + +```py +from typing_extensions import NewType + +class Foo: + foo_member: str = "hello" + def foo_method(self) -> int: + return 42 + +Bar = NewType("Bar", Foo) +Baz = NewType("Baz", Bar) +baz = Baz(Bar(Foo())) +reveal_type(baz.foo_member) # revealed: str +reveal_type(baz.foo_method()) # revealed: int +``` + +We also infer member access on the `NewType` pseudo-type itself correctly: + +```py +reveal_type(Bar.__supertype__) # revealed: type | NewType +reveal_type(Baz.__supertype__) # revealed: type | NewType +``` + +## `NewType` wrapper functions are `Callable` + +```py +from collections.abc import Callable +from typing_extensions import NewType +from ty_extensions import CallableTypeOf + +Foo = NewType("Foo", int) + +def _(obj: CallableTypeOf[Foo]): + reveal_type(obj) # revealed: (int, /) -> Foo + +def f(_: Callable[[int], Foo]): ... + +f(Foo) +map(Foo, [1, 2, 3]) + +def g(_: Callable[[str], Foo]): ... + +g(Foo) # error: [invalid-argument-type] +``` + +## `NewType` instances are `Callable` if the base type is + +```py +from typing import NewType, Callable, Any +from ty_extensions import CallableTypeOf + +N = NewType("N", int) +i = N(42) + +y: Callable[..., Any] = i # error: [invalid-assignment] "Object of type `N` is not assignable to `(...) -> Any`" + +# error: [invalid-type-form] "Expected the first argument to `ty_extensions.CallableTypeOf` to be a callable object, but got an object of type `N`" +def f(x: CallableTypeOf[i]): + reveal_type(x) # revealed: Unknown + +class SomethingCallable: + def __call__(self, a: str) -> bytes: + raise NotImplementedError + +N2 = NewType("N2", SomethingCallable) +j = N2(SomethingCallable()) + +z: Callable[[str], bytes] = j # fine + +def g(x: CallableTypeOf[j]): + reveal_type(x) # revealed: (a: str) -> bytes +``` + +## The name must be a string literal + +```py +from typing_extensions import NewType + +def _(name: str) -> None: + _ = NewType(name, int) # error: [invalid-newtype] "The first argument to `NewType` must be a string literal" +``` + +However, the literal doesn't necessarily need to be inline, as long as we infer it: + +```py +name = "Foo" +Foo = NewType(name, int) +reveal_type(Foo) # revealed: +``` + +## The second argument must be a class type or another newtype + +Other typing constructs like `Union` are not allowed. + +```py +from typing_extensions import NewType + +# error: [invalid-newtype] "invalid base for `typing.NewType`" +Foo = NewType("Foo", int | str) +``` + +We don't emit the "invalid base" diagnostic for `Unknown`, because that typically results from other +errors that already have a diagnostic, and there's no need to pile on. For example, this mistake +gives you an "Int literals are not allowed" error, and we'd rather not see an "invalid base" error +on top of that: + +```py +# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression" +Foo = NewType("Foo", 42) +``` + +## A `NewType` definition must be a simple variable assignment + +```py +from typing import NewType + +N: NewType = NewType("N", int) # error: [invalid-newtype] "A `NewType` definition must be a simple variable assignment" +``` + +## Newtypes can be cyclic in various ways + +Cyclic newtypes are kind of silly, but it's possible for the user to express them, and it's +important that we don't go into infinite recursive loops and crash with a stack overflow. In fact, +this is *why* base type evaluation is deferred; otherwise Salsa itself would crash. + +```py +from typing_extensions import NewType, reveal_type, cast + +# Define a directly cyclic newtype. +A = NewType("A", "A") +reveal_type(A) # revealed: + +# Typechecking still works. We can't construct an `A` "honestly", but we can `cast` into one. +a: A +a = 42 # error: [invalid-assignment] "Object of type `Literal[42]` is not assignable to `A`" +a = A(42) # error: [invalid-argument-type] "Argument is incorrect: Expected `A`, found `Literal[42]`" +a = cast(A, 42) +reveal_type(a) # revealed: A + +# A newtype cycle might involve more than one step. +B = NewType("B", "C") +C = NewType("C", "B") +reveal_type(B) # revealed: +reveal_type(C) # revealed: +b: B = cast(B, 42) +c: C = C(b) +reveal_type(b) # revealed: B +reveal_type(c) # revealed: C +# Cyclic types behave in surprising ways. These assignments are legal, even though B and C aren't +# the same type, because each of them is a subtype of the other. +b = c +c = b + +# Another newtype could inherit from a cyclic one. +D = NewType("D", C) +reveal_type(D) # revealed: +d: D +d = D(42) # error: [invalid-argument-type] "Argument is incorrect: Expected `C`, found `Literal[42]`" +d = D(c) +d = D(b) # Allowed, the same surprise as above. B and C are subtypes of each other. +reveal_type(d) # revealed: D +``` + +Normal classes can't inherit from newtypes, but generic classes can be parametrized with them, so we +also need to detect "ordinary" type cycles that happen to involve a newtype. + +```py +E = NewType("E", list["E"]) +reveal_type(E) # revealed: +e: E = E([]) +reveal_type(e) # revealed: E +reveal_type(E(E(E(E(E([])))))) # revealed: E +reveal_type(E([E([E([]), E([E([])])]), E([])])) # revealed: E +E(["foo"]) # error: [invalid-argument-type] +E(E(E(["foo"]))) # error: [invalid-argument-type] +``` + +## `NewType` wrapping preserves singleton-ness and single-valued-ness + +```py +from typing_extensions import NewType +from ty_extensions import is_singleton, is_single_valued, static_assert +from types import EllipsisType + +A = NewType("A", EllipsisType) +static_assert(is_singleton(A)) +static_assert(is_single_valued(A)) +reveal_type(type(A(...)) is EllipsisType) # revealed: Literal[True] +# TODO: This should be `Literal[True]` also. +reveal_type(A(...) is ...) # revealed: bool + +B = NewType("B", int) +static_assert(not is_singleton(B)) +static_assert(not is_single_valued(B)) +``` + +## `NewType`s of tuples can be iterated/unpacked + +```py +from typing import NewType + +N = NewType("N", tuple[int, str]) + +a, b = N((1, "foo")) + +reveal_type(a) # revealed: int +reveal_type(b) # revealed: str +``` + +## `isinstance` of a `NewType` instance and its base class is inferred as `Literal[True]` + +```py +from typing import NewType + +N = NewType("N", int) + +def f(x: N): + reveal_type(isinstance(x, int)) # revealed: Literal[True] +``` + +However, a `NewType` isn't a real class, so it isn't a valid second argument to `isinstance`: + +```py +def f(x: N): + # error: [invalid-argument-type] "Argument to function `isinstance` is incorrect" + reveal_type(isinstance(x, N)) # revealed: bool +``` + +Because of that, we don't generate any narrowing constraints for it: + +```py +def f(x: N | str): + if isinstance(x, N): # error: [invalid-argument-type] + reveal_type(x) # revealed: N | str + else: + reveal_type(x) # revealed: N | str +``` + +## Trying to subclass a `NewType` produces an error matching CPython + + + +```py +from typing import NewType + +X = NewType("X", int) + +class Foo(X): ... # error: [invalid-base] +``` + +## Don't narrow `NewType`-wrapped `Enum`s inside of match arms + +`Literal[Foo.X]` is actually disjoint from `N` here: + +```py +from enum import Enum +from typing import NewType + +class Foo(Enum): + X = 0 + Y = 1 + +N = NewType("N", Foo) + +def f(x: N): + match x: + case Foo.X: + reveal_type(x) # revealed: N + case Foo.Y: + reveal_type(x) # revealed: N + case _: + reveal_type(x) # revealed: N +``` + +## We don't support `NewType` on Python 3.9 + +We implement `typing.NewType` as a `KnownClass`, but in Python 3.9 it's actually a function, so all +we get is the `Any` annotations from typeshed. However, `typing_extensions.NewType` is always a +class. This could be improved in the future, but Python 3.9 is now end-of-life, so it's not +high-priority. + +```toml +[environment] +python-version = "3.9" +``` + +```py +from typing import NewType + +Foo = NewType("Foo", int) +reveal_type(Foo) # revealed: Any +reveal_type(Foo(42)) # revealed: Any + +from typing_extensions import NewType + +Bar = NewType("Bar", int) +reveal_type(Bar) # revealed: +reveal_type(Bar(42)) # revealed: Bar +``` + +## The base of a `NewType` can't be a protocol class or a `TypedDict` + + + +```py +from typing import NewType, Protocol, TypedDict + +class Id(Protocol): + code: int + +UserId = NewType("UserId", Id) # error: [invalid-newtype] + +class Foo(TypedDict): + a: int + +Bar = NewType("Bar", Foo) # error: [invalid-newtype] +``` + +## TODO: A `NewType` cannot be generic + +```py +from typing import Any, NewType, TypeVar + +# All of these are allowed. +A = NewType("A", list) +B = NewType("B", list[int]) +B = NewType("B", list[Any]) + +# But a free typevar is not allowed. +T = TypeVar("T") +C = NewType("C", list[T]) # TODO: should be "error: [invalid-newtype]" +``` diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/self.md b/crates/ty_python_semantic/resources/mdtest/annotations/self.md index b635104a75..4d794fe6c4 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/self.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/self.md @@ -139,7 +139,7 @@ The first parameter of instance methods always has type `Self`, if it is not exp The name `self` is not special in any way. ```py -def some_decorator(f: Callable) -> Callable: +def some_decorator[**P, R](f: Callable[P, R]) -> Callable[P, R]: return f class B: @@ -188,10 +188,10 @@ class B: reveal_type(B().name_does_not_matter()) # revealed: B reveal_type(B().positional_only(1)) # revealed: B reveal_type(B().keyword_only(x=1)) # revealed: B +# TODO: This should deally be `B` reveal_type(B().decorated_method()) # revealed: Unknown -# TODO: this should be B -reveal_type(B().a_property) # revealed: Unknown +reveal_type(B().a_property) # revealed: B async def _(): reveal_type(await B().async_method()) # revealed: B diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/union.md b/crates/ty_python_semantic/resources/mdtest/annotations/union.md index 776d077e27..8313d7142a 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/union.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/union.md @@ -72,9 +72,6 @@ def f(x: Union) -> None: ## Implicit type aliases using new-style unions -We don't recognize these as type aliases yet, but we also don't emit false-positive diagnostics if -you use them in type expressions: - ```toml [environment] python-version = "3.10" @@ -84,5 +81,5 @@ python-version = "3.10" X = int | str def f(y: X): - reveal_type(y) # revealed: @Todo(Support for `types.UnionType` instances in type expressions) + reveal_type(y) # revealed: int | str ``` diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md b/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md index c61a94a8d6..c5d737d9eb 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md @@ -21,8 +21,9 @@ def f(*args: Unpack[Ts]) -> tuple[Unpack[Ts]]: def g() -> TypeGuard[int]: ... def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.kwargs) -> R_co: - reveal_type(args) # revealed: tuple[@Todo(Support for `typing.ParamSpec`), ...] - reveal_type(kwargs) # revealed: dict[str, @Todo(Support for `typing.ParamSpec`)] + # TODO: Should reveal a type representing `P.args` and `P.kwargs` + reveal_type(args) # revealed: tuple[@Todo(ParamSpecArgs / ParamSpecKwargs), ...] + reveal_type(kwargs) # revealed: dict[str, @Todo(ParamSpecArgs / ParamSpecKwargs)] return callback(42, *args, **kwargs) class Foo: diff --git a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md index adf0de358d..043380338b 100644 --- a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md +++ b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md @@ -259,7 +259,7 @@ class Color(Enum): RED = "red" f: dict[list[Literal[1]], list[Literal[Color.RED]]] = {[1]: [Color.RED, Color.RED]} -reveal_type(f) # revealed: dict[list[Literal[1]], list[Literal[Color.RED]]] +reveal_type(f) # revealed: dict[list[Literal[1]], list[Color]] class X[T]: def __init__(self, value: T): ... @@ -310,6 +310,65 @@ reveal_type(s) # revealed: list[Literal[1]] reveal_type(s) # revealed: list[Literal[1]] ``` +## Generic constructor annotations are understood + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Any + +class X[T]: + def __init__(self, value: T): + self.value = value + +a: X[int] = X(1) +reveal_type(a) # revealed: X[int] + +b: X[int | None] = X(1) +reveal_type(b) # revealed: X[int | None] + +c: X[int | None] | None = X(1) +reveal_type(c) # revealed: X[int | None] + +def _[T](a: X[T]): + b: X[T | int] = X(a.value) + reveal_type(b) # revealed: X[T@_ | int] + +d: X[Any] = X(1) +reveal_type(d) # revealed: X[Any] + +def _(flag: bool): + # TODO: Handle unions correctly. + # error: [invalid-assignment] "Object of type `X[int]` is not assignable to `X[int | None]`" + a: X[int | None] = X(1) if flag else X(2) + reveal_type(a) # revealed: X[int | None] +``` + +```py +from dataclasses import dataclass + +@dataclass +class Y[T]: + value: T + +y1: Y[Any] = Y(value=1) +# TODO: This should reveal `Y[Any]`. +reveal_type(y1) # revealed: Y[int] +``` + +```py +class Z[T]: + def __new__(cls, value: T): + return super().__new__(cls) + +z1: Z[Any] = Z(1) +# TODO: This should reveal `Z[Any]`. +reveal_type(z1) # revealed: Z[int] +``` + ## PEP-604 annotations are supported ```py @@ -417,6 +476,8 @@ reveal_type(x) # revealed: Literal[1] python-version = "3.12" ``` +`generic_list.py`: + ```py from typing import Literal @@ -427,14 +488,13 @@ a = f("a") reveal_type(a) # revealed: list[Literal["a"]] b: list[int | Literal["a"]] = f("a") -reveal_type(b) # revealed: list[Literal["a"] | int] +reveal_type(b) # revealed: list[int | Literal["a"]] c: list[int | str] = f("a") -reveal_type(c) # revealed: list[str | int] +reveal_type(c) # revealed: list[int | str] d: list[int | tuple[int, int]] = f((1, 2)) -# TODO: We could avoid reordering the union elements here. -reveal_type(d) # revealed: list[tuple[int, int] | int] +reveal_type(d) # revealed: list[int | tuple[int, int]] e: list[int] = f(True) reveal_type(e) # revealed: list[int] @@ -455,10 +515,217 @@ j: int | str = f2(True) reveal_type(j) # revealed: Literal[True] ``` -Types are not widened unnecessarily: +A function's arguments are also inferred using the type context: + +`typed_dict.py`: ```py -def id[T](x: T) -> T: +from typing import TypedDict + +class TD(TypedDict): + x: int + +def f[T](x: list[T]) -> T: + return x[0] + +a: TD = f([{"x": 0}, {"x": 1}]) +reveal_type(a) # revealed: TD + +b: TD | None = f([{"x": 0}, {"x": 1}]) +reveal_type(b) # revealed: TD + +# error: [missing-typed-dict-key] "Missing required key 'x' in TypedDict `TD` constructor" +# error: [invalid-key] "Invalid key for TypedDict `TD`: Unknown key "y"" +# error: [invalid-assignment] "Object of type `Unknown | dict[Unknown | str, Unknown | int]` is not assignable to `TD`" +c: TD = f([{"y": 0}, {"x": 1}]) + +# error: [missing-typed-dict-key] "Missing required key 'x' in TypedDict `TD` constructor" +# error: [invalid-key] "Invalid key for TypedDict `TD`: Unknown key "y"" +# error: [invalid-assignment] "Object of type `Unknown | dict[Unknown | str, Unknown | int]` is not assignable to `TD | None`" +c: TD | None = f([{"y": 0}, {"x": 1}]) +``` + +But not in a way that leads to assignability errors: + +`dict_any.py`: + +```py +from typing import TypedDict, Any + +class TD(TypedDict, total=False): + x: str + +class TD2(TypedDict): + x: str + +def f(self, dt: dict[str, Any], key: str): + # TODO: This should not error once typed dict assignability is implemented. + # error: [invalid-assignment] + x1: TD = dt.get(key, {}) + reveal_type(x1) # revealed: TD + + x2: TD = dt.get(key, {"x": 0}) + reveal_type(x2) # revealed: Any + + x3: TD | None = dt.get(key, {}) + # TODO: This should reveal `Any` once typed dict assignability is implemented. + reveal_type(x3) # revealed: Any | None + + x4: TD | None = dt.get(key, {"x": 0}) + reveal_type(x4) # revealed: Any + + x5: TD2 = dt.get(key, {}) + reveal_type(x5) # revealed: Any + + x6: TD2 = dt.get(key, {"x": 0}) + reveal_type(x6) # revealed: Any + + x7: TD2 | None = dt.get(key, {}) + reveal_type(x7) # revealed: Any + + x8: TD2 | None = dt.get(key, {"x": 0}) + reveal_type(x8) # revealed: Any +``` + +## Prefer the declared type of generic classes + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Any + +def f[T](x: T) -> list[T]: + return [x] + +def f2[T](x: T) -> list[T] | None: + return [x] + +def f3[T](x: T) -> list[T] | dict[T, T]: + return [x] + +a = f(1) +reveal_type(a) # revealed: list[Literal[1]] + +b: list[Any] = f(1) +reveal_type(b) # revealed: list[Any] + +c: list[Any] = [1] +reveal_type(c) # revealed: list[Any] + +d: list[Any] | None = f(1) +reveal_type(d) # revealed: list[Any] + +e: list[Any] | None = [1] +reveal_type(e) # revealed: list[Any] + +f: list[Any] | None = f2(1) +# TODO: Better constraint solver. +reveal_type(f) # revealed: list[Literal[1]] | None + +g: list[Any] | dict[Any, Any] = f3(1) +# TODO: Better constraint solver. +reveal_type(g) # revealed: list[Literal[1]] | dict[Literal[1], Literal[1]] +``` + +We currently prefer the generic declared type regardless of its variance: + +```py +class Bivariant[T]: + pass + +class Covariant[T]: + def pop(self) -> T: + raise NotImplementedError + +class Contravariant[T]: + def push(self, value: T) -> None: + pass + +class Invariant[T]: + x: T + +def bivariant[T](x: T) -> Bivariant[T]: + return Bivariant() + +def covariant[T](x: T) -> Covariant[T]: + return Covariant() + +def contravariant[T](x: T) -> Contravariant[T]: + return Contravariant() + +def invariant[T](x: T) -> Invariant[T]: + return Invariant() + +x1 = bivariant(1) +x2 = covariant(1) +x3 = contravariant(1) +x4 = invariant(1) + +reveal_type(x1) # revealed: Bivariant[Literal[1]] +reveal_type(x2) # revealed: Covariant[Literal[1]] +reveal_type(x3) # revealed: Contravariant[Literal[1]] +reveal_type(x4) # revealed: Invariant[Literal[1]] + +x5: Bivariant[Any] = bivariant(1) +x6: Covariant[Any] = covariant(1) +x7: Contravariant[Any] = contravariant(1) +x8: Invariant[Any] = invariant(1) + +reveal_type(x5) # revealed: Bivariant[Any] +reveal_type(x6) # revealed: Covariant[Any] +reveal_type(x7) # revealed: Contravariant[Any] +reveal_type(x8) # revealed: Invariant[Any] +``` + +## Narrow generic unions + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import reveal_type, TypedDict + +def identity[T](x: T) -> T: + return x + +def _(narrow: dict[str, str], target: list[str] | dict[str, str] | None): + target = identity(narrow) + reveal_type(target) # revealed: dict[str, str] + +def _(narrow: list[str], target: list[str] | dict[str, str] | None): + target = identity(narrow) + reveal_type(target) # revealed: list[str] + +def _(narrow: list[str] | dict[str, str], target: list[str] | dict[str, str] | None): + target = identity(narrow) + reveal_type(target) # revealed: list[str] | dict[str, str] + +class TD(TypedDict): + x: int + +def _(target: list[TD] | dict[str, TD] | None): + target = identity([{"x": 1}]) + reveal_type(target) # revealed: list[TD] + +def _(target: list[TD] | dict[str, TD] | None): + target = identity({"x": {"x": 1}}) + reveal_type(target) # revealed: dict[str, TD] +``` + +## Prefer the inferred type of non-generic classes + +```toml +[environment] +python-version = "3.12" +``` + +```py +def identity[T](x: T) -> T: return x def lst[T](x: T) -> list[T]: @@ -466,20 +733,18 @@ def lst[T](x: T) -> list[T]: def _(i: int): a: int | None = i - b: int | None = id(i) - c: int | str | None = id(i) + b: int | None = identity(i) + c: int | str | None = identity(i) reveal_type(a) # revealed: int reveal_type(b) # revealed: int reveal_type(c) # revealed: int a: list[int | None] | None = [i] - b: list[int | None] | None = id([i]) - c: list[int | None] | int | None = id([i]) + b: list[int | None] | None = identity([i]) + c: list[int | None] | int | None = identity([i]) reveal_type(a) # revealed: list[int | None] - # TODO: these should reveal `list[int | None]` - # we currently do not use the call expression annotation as type context for argument inference - reveal_type(b) # revealed: list[Unknown | int] - reveal_type(c) # revealed: list[Unknown | int] + reveal_type(b) # revealed: list[int | None] + reveal_type(c) # revealed: list[int | None] a: list[int | None] | None = [i] b: list[int | None] | None = lst(i) @@ -489,9 +754,44 @@ def _(i: int): reveal_type(c) # revealed: list[int | None] a: list | None = [] - b: list | None = id([]) - c: list | int | None = id([]) + b: list | None = identity([]) + c: list | int | None = identity([]) reveal_type(a) # revealed: list[Unknown] reveal_type(b) # revealed: list[Unknown] reveal_type(c) # revealed: list[Unknown] + +def f[T](x: list[T]) -> T: + return x[0] + +def _(a: int, b: str, c: int | str): + x1: int = f(lst(a)) + reveal_type(x1) # revealed: int + + x2: int | str = f(lst(a)) + reveal_type(x2) # revealed: int + + x3: int | None = f(lst(a)) + reveal_type(x3) # revealed: int + + x4: str = f(lst(b)) + reveal_type(x4) # revealed: str + + x5: int | str = f(lst(b)) + reveal_type(x5) # revealed: str + + x6: str | None = f(lst(b)) + reveal_type(x6) # revealed: str + + x7: int | str = f(lst(c)) + reveal_type(x7) # revealed: int | str + + x8: int | str = f(lst(c)) + reveal_type(x8) # revealed: int | str + + # TODO: Ideally this would reveal `int | str`. This is a known limitation of our + # call inference solver, and would # require an extra inference attempt without type + # context, or with type context # of subsets of the union, both of which are impractical + # for performance reasons. + x9: int | str | None = f(lst(c)) + reveal_type(x9) # revealed: int | str | None ``` diff --git a/crates/ty_python_semantic/resources/mdtest/attributes.md b/crates/ty_python_semantic/resources/mdtest/attributes.md index 10b6d42318..012a925e87 100644 --- a/crates/ty_python_semantic/resources/mdtest/attributes.md +++ b/crates/ty_python_semantic/resources/mdtest/attributes.md @@ -369,6 +369,11 @@ reveal_type(c_instance.y) # revealed: Unknown | int #### Attributes defined in comprehensions +```toml +[environment] +python-version = "3.12" +``` + ```py class TupleIterator: def __next__(self) -> tuple[int, str]: @@ -380,19 +385,9 @@ class TupleIterable: class C: def __init__(self) -> None: - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] [... for self.a in range(3)] - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] - # error: [unresolved-attribute] [... for (self.b, self.c) in TupleIterable()] - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] - # error: [unresolved-attribute] [... for self.d in range(3) for self.e in range(3)] - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] [[... for self.f in range(3)] for _ in range(3)] [[... for self.g in range(3)] for self in [D()]] @@ -401,35 +396,74 @@ class D: c_instance = C() -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.a) # revealed: Unknown +reveal_type(c_instance.a) # revealed: Unknown | int -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.b) # revealed: Unknown +reveal_type(c_instance.b) # revealed: Unknown | int -# TODO: no error, reveal Unknown | str -# error: [unresolved-attribute] -reveal_type(c_instance.c) # revealed: Unknown +reveal_type(c_instance.c) # revealed: Unknown | str -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.d) # revealed: Unknown +reveal_type(c_instance.d) # revealed: Unknown | int -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.e) # revealed: Unknown +reveal_type(c_instance.e) # revealed: Unknown | int -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.f) # revealed: Unknown +reveal_type(c_instance.f) # revealed: Unknown | int # This one is correctly not resolved as an attribute: # error: [unresolved-attribute] reveal_type(c_instance.g) # revealed: Unknown ``` +It does not matter how much the comprehension is nested. + +Similarly attributes defined by the comprehension in a generic method are recognized. + +```py +class C: + def f[T](self): + [... for self.a in [1]] + [[... for self.b in [1]] for _ in [1]] + +c_instance = C() + +reveal_type(c_instance.a) # revealed: Unknown | int +reveal_type(c_instance.b) # revealed: Unknown | int +``` + +If the comprehension is inside another scope like function then that attribute is not inferred. + +```py +class C: + def __init__(self): + def f(): + # error: [unresolved-attribute] + [... for self.a in [1]] + + def g(): + # error: [unresolved-attribute] + [... for self.b in [1]] + g() + +c_instance = C() + +# This attribute is in the function f and is not reachable +# error: [unresolved-attribute] +reveal_type(c_instance.a) # revealed: Unknown + +# error: [unresolved-attribute] +reveal_type(c_instance.b) # revealed: Unknown +``` + +If the comprehension is nested in any other eager scope it still can assign attributes. + +```py +class C: + def __init__(self): + class D: + [[... for self.a in [1]] for _ in [1]] + +reveal_type(C().a) # revealed: Unknown | int +``` + #### Conditionally declared / bound attributes We currently treat implicit instance attributes to be bound, even if they are only conditionally @@ -1878,7 +1912,7 @@ date.day = 8 date.month = 4 date.year = 2025 -# error: [unresolved-attribute] "Can not assign object of type `Literal["UTC"]` to attribute `tz` on type `Date` with custom `__setattr__` method." +# error: [unresolved-attribute] "Cannot assign object of type `Literal["UTC"]` to attribute `tz` on type `Date` with custom `__setattr__` method." date.tz = "UTC" ``` @@ -1894,10 +1928,10 @@ class Frozen: existing: int = 1 def __setattr__(self, name, value) -> Never: - raise AttributeError("Attributes can not be modified") + raise AttributeError("Attributes cannot be modified") instance = Frozen() -instance.non_existing = 2 # error: [invalid-assignment] "Can not assign to unresolved attribute `non_existing` on type `Frozen`" +instance.non_existing = 2 # error: [invalid-assignment] "Cannot assign to unresolved attribute `non_existing` on type `Frozen`" instance.existing = 2 # error: [invalid-assignment] "Cannot assign to attribute `existing` on type `Frozen` whose `__setattr__` method returns `Never`/`NoReturn`" ``` @@ -1949,7 +1983,7 @@ def flag() -> bool: class Frozen: if flag(): def __setattr__(self, name, value) -> Never: - raise AttributeError("Attributes can not be modified") + raise AttributeError("Attributes cannot be modified") instance = Frozen() instance.non_existing = 2 # error: [invalid-assignment] diff --git a/crates/ty_python_semantic/resources/mdtest/bidirectional.md b/crates/ty_python_semantic/resources/mdtest/bidirectional.md index 627492855f..1211f92fe5 100644 --- a/crates/ty_python_semantic/resources/mdtest/bidirectional.md +++ b/crates/ty_python_semantic/resources/mdtest/bidirectional.md @@ -50,8 +50,8 @@ def _(l: list[int] | None = None): def f[T](x: T, cond: bool) -> T | list[T]: return x if cond else [x] -# TODO: no error -# error: [invalid-assignment] "Object of type `Literal[1] | list[Literal[1]]` is not assignable to `int | list[int]`" +# TODO: Better constraint solver. +# error: [invalid-assignment] l5: int | list[int] = f(1, True) ``` @@ -76,6 +76,7 @@ def _() -> TD: def _() -> TD: # error: [missing-typed-dict-key] "Missing required key 'x' in TypedDict `TD` constructor" + # error: [invalid-return-type] return {} ``` @@ -185,12 +186,12 @@ Declared attribute types: ```py class E: - e: list[Literal[1]] + a: list[Literal[1]] + b: list[Literal[1]] def _(e: E): - # TODO: Implement attribute type context. - # error: [invalid-assignment] "Object of type `list[Unknown | int]` is not assignable to attribute `e` of type `list[Literal[1]]`" - e.e = [1] + e.a = [1] + E.b = [1] ``` Function return types: @@ -200,6 +201,59 @@ def f() -> list[Literal[1]]: return [1] ``` +## Instance attributes + +```toml +[environment] +python-version = "3.12" +``` + +Both meta and class/instance attribute annotations are used as type context: + +```py +from typing import Literal, Any + +class DataDescriptor: + def __get__(self, instance: object, owner: type | None = None) -> list[Literal[1]]: + return [] + + def __set__(self, instance: object, value: list[Literal[1]]) -> None: + pass + +def lst[T](x: T) -> list[T]: + return [x] + +def _(flag: bool): + class Meta(type): + if flag: + x: DataDescriptor = DataDescriptor() + + class C(metaclass=Meta): + x: list[int | None] + + def _(c: C): + c.x = lst(1) + C.x = lst(1) +``` + +For union targets, each element of the union is considered as a separate type context: + +```py +from typing import Literal + +class X: + x: list[int | str] + +class Y: + x: list[int | None] + +def lst[T](x: T) -> list[T]: + return [x] + +def _(xy: X | Y): + xy.x = lst(1) +``` + ## Class constructor parameters ```toml @@ -226,3 +280,72 @@ A(f(1)) # error: [invalid-argument-type] "Argument to bound method `__init__` is incorrect: Expected `list[int | None]`, found `list[list[Unknown]]`" A(f([])) ``` + +## Multi-inference diagnostics + +```toml +[environment] +python-version = "3.12" +``` + +Diagnostics unrelated to the type-context are only reported once: + +`call.py`: + +```py +def f[T](x: T) -> list[T]: + return [x] + +def a(x: list[bool], y: list[bool]): ... +def b(x: list[int], y: list[int]): ... +def c(x: list[int], y: list[int]): ... +def _(x: int): + if x == 0: + y = a + elif x == 1: + y = b + else: + y = c + + if x == 0: + z = True + + y(f(True), [True]) + + # error: [possibly-unresolved-reference] "Name `z` used when possibly not defined" + y(f(True), [z]) +``` + +`call_standalone_expression.py`: + +```py +def f(_: str): ... +def g(_: str): ... +def _(a: object, b: object, flag: bool): + if flag: + x = f + else: + x = g + + # error: [unsupported-operator] "Operator `>` is not supported for types `object` and `object`" + x(f"{'a' if a > b else 'b'}") +``` + +`attribute_assignment.py`: + +```py +from typing import TypedDict + +class TD(TypedDict): + y: int + +class X: + td: TD + +def _(x: X, flag: bool): + if flag: + y = 1 + + # error: [possibly-unresolved-reference] "Name `y` used when possibly not defined" + x.td = {"y": y} +``` diff --git a/crates/ty_python_semantic/resources/mdtest/binary/classes.md b/crates/ty_python_semantic/resources/mdtest/binary/classes.md index 7ae4c23e60..db42286c84 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/classes.md @@ -13,7 +13,7 @@ python-version = "3.10" class A: ... class B: ... -reveal_type(A | B) # revealed: UnionType +reveal_type(A | B) # revealed: types.UnionType ``` ## Union of two classes (prior to 3.10) @@ -43,14 +43,14 @@ class A: ... class B: ... def _(sub_a: type[A], sub_b: type[B]): - reveal_type(A | sub_b) # revealed: UnionType - reveal_type(sub_a | B) # revealed: UnionType - reveal_type(sub_a | sub_b) # revealed: UnionType + reveal_type(A | sub_b) # revealed: types.UnionType + reveal_type(sub_a | B) # revealed: types.UnionType + reveal_type(sub_a | sub_b) # revealed: types.UnionType class C[T]: ... class D[T]: ... -reveal_type(C | D) # revealed: UnionType +reveal_type(C | D) # revealed: types.UnionType -reveal_type(C[int] | D[str]) # revealed: UnionType +reveal_type(C[int] | D[str]) # revealed: types.UnionType ``` diff --git a/crates/ty_python_semantic/resources/mdtest/call/builtins.md b/crates/ty_python_semantic/resources/mdtest/call/builtins.md index 0eac021d1a..8de3e77d77 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/builtins.md +++ b/crates/ty_python_semantic/resources/mdtest/call/builtins.md @@ -162,3 +162,38 @@ def _(x: A | B, y: list[int]): reveal_type(x) # revealed: B & ~A reveal_type(isinstance(x, B)) # revealed: Literal[True] ``` + +Certain special forms in the typing module are not instances of `type`, so are strictly-speaking +disallowed as the second argument to `isinstance()` according to typeshed's annotations. However, at +runtime they work fine as the second argument, and we implement that special case in ty: + +```py +import typing as t + +# no errors emitted for any of these: +isinstance("", t.Dict) +isinstance("", t.List) +isinstance("", t.Set) +isinstance("", t.FrozenSet) +isinstance("", t.Tuple) +isinstance("", t.ChainMap) +isinstance("", t.Counter) +isinstance("", t.Deque) +isinstance("", t.OrderedDict) +isinstance("", t.Callable) +isinstance("", t.Type) +isinstance("", t.Callable | t.Deque) + +# `Any` is valid in `issubclass()` calls but not `isinstance()` calls +issubclass(list, t.Any) +issubclass(list, t.Any | t.Dict) +``` + +But for other special forms that are not permitted as the second argument, we still emit an error: + +```py +isinstance("", t.TypeGuard) # error: [invalid-argument-type] +isinstance("", t.ClassVar) # error: [invalid-argument-type] +isinstance("", t.Final) # error: [invalid-argument-type] +isinstance("", t.Any) # error: [invalid-argument-type] +``` diff --git a/crates/ty_python_semantic/resources/mdtest/call/dunder.md b/crates/ty_python_semantic/resources/mdtest/call/dunder.md index 721517eac4..f7be30464c 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/dunder.md +++ b/crates/ty_python_semantic/resources/mdtest/call/dunder.md @@ -194,7 +194,7 @@ class_with_descriptor_dunder = ClassWithDescriptorDunder() reveal_type(class_with_descriptor_dunder[0]) # revealed: str ``` -## Dunders can not be overwritten on instances +## Dunders cannot be overwritten on instances If we attempt to overwrite a dunder method on an instance, it does not affect the behavior of implicit dunder calls: diff --git a/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md b/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md index a8d87bbfa6..7841d04f79 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md +++ b/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md @@ -84,7 +84,7 @@ class E(metaclass=Meta): ... reveal_type(inspect.getattr_static(E, "attr")) # revealed: int ``` -Metaclass attributes can not be added when probing an instance of the class: +Metaclass attributes cannot be added when probing an instance of the class: ```py reveal_type(inspect.getattr_static(E(), "attr", "non_existent")) # revealed: Literal["non_existent"] diff --git a/crates/ty_python_semantic/resources/mdtest/call/methods.md b/crates/ty_python_semantic/resources/mdtest/call/methods.md index f101aa6e64..07740c2f89 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/methods.md +++ b/crates/ty_python_semantic/resources/mdtest/call/methods.md @@ -308,7 +308,7 @@ reveal_type(C.f) # revealed: bound method .f(arg: int) -> str reveal_type(C.f(1)) # revealed: str ``` -The method `f` can not be accessed from an instance of the class: +The method `f` cannot be accessed from an instance of the class: ```py # error: [unresolved-attribute] "Object of type `C` has no attribute `f`" diff --git a/crates/ty_python_semantic/resources/mdtest/call/overloads.md b/crates/ty_python_semantic/resources/mdtest/call/overloads.md index 726d74a630..e6ef48276a 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/overloads.md +++ b/crates/ty_python_semantic/resources/mdtest/call/overloads.md @@ -1685,8 +1685,7 @@ def int_or_str() -> int | str: x = f([{"x": 1}], int_or_str()) reveal_type(x) # revealed: int | str -# TODO: error: [no-matching-overload] "No overload of function `f` matches arguments" -# we currently incorrectly consider `list[dict[str, int]]` a subtype of `list[T]` +# error: [no-matching-overload] "No overload of function `f` matches arguments" f([{"y": 1}], int_or_str()) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/call/union.md b/crates/ty_python_semantic/resources/mdtest/call/union.md index 1a4079204d..7bb4e02044 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/union.md +++ b/crates/ty_python_semantic/resources/mdtest/call/union.md @@ -277,50 +277,6 @@ def _(flag: bool): x = f({"x": 1}) reveal_type(x) # revealed: int - # TODO: error: [invalid-argument-type] "Argument to function `f` is incorrect: Expected `T`, found `dict[str, int]`" - # we currently consider `TypedDict` instances to be subtypes of `dict` + # error: [invalid-argument-type] "Argument to function `f` is incorrect: Expected `T`, found `dict[Unknown | str, Unknown | int]`" f({"y": 1}) ``` - -Diagnostics unrelated to the type-context are only reported once: - -`expression.py`: - -```py -def f[T](x: T) -> list[T]: - return [x] - -def a(x: list[bool], y: list[bool]): ... -def b(x: list[int], y: list[int]): ... -def c(x: list[int], y: list[int]): ... -def _(x: int): - if x == 0: - y = a - elif x == 1: - y = b - else: - y = c - - if x == 0: - z = True - - y(f(True), [True]) - - # error: [possibly-unresolved-reference] "Name `z` used when possibly not defined" - y(f(True), [z]) -``` - -`standalone_expression.py`: - -```py -def f(_: str): ... -def g(_: str): ... -def _(a: object, b: object, flag: bool): - if flag: - x = f - else: - x = g - - # error: [unsupported-operator] "Operator `>` is not supported for types `object` and `object`" - x(f"{'a' if a > b else 'b'}") -``` diff --git a/crates/ty_python_semantic/resources/mdtest/class/super.md b/crates/ty_python_semantic/resources/mdtest/class/super.md index 5d4a4249b7..80a4bc9806 100644 --- a/crates/ty_python_semantic/resources/mdtest/class/super.md +++ b/crates/ty_python_semantic/resources/mdtest/class/super.md @@ -66,7 +66,7 @@ synthesized `Protocol`s that cannot be upcast to, or interpreted as, a non-`obje ```py import types -from typing_extensions import Callable, TypeIs, Literal, TypedDict +from typing_extensions import Callable, TypeIs, Literal, NewType, TypedDict def f(): ... @@ -81,6 +81,8 @@ class SomeTypedDict(TypedDict): x: int y: bytes +N = NewType("N", int) + # revealed: , FunctionType> reveal_type(super(object, f)) # revealed: , WrapperDescriptorType> @@ -95,6 +97,8 @@ reveal_type(super(object, Alias)) reveal_type(super(object, Foo().method)) # revealed: , property> reveal_type(super(object, Foo.some_property)) +# revealed: , int> +reveal_type(super(object, N(42))) def g(x: object) -> TypeIs[list[object]]: return isinstance(x, list) diff --git a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md index bdd9ec435c..0f2d1e4b0c 100644 --- a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md +++ b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md @@ -58,6 +58,24 @@ Iterating over an unbound iterable yields `Unknown`: # error: [not-iterable] "Object of type `int` is not iterable" # revealed: tuple[int, Unknown] [reveal_type((x, z)) for x in range(3) for z in x] + +# error: [unresolved-reference] "Name `foo` used when not defined" +foo +foo = [ + # revealed: tuple[int, Unknown] + reveal_type((x, z)) + for x in range(3) + # error: [unresolved-reference] "Name `foo` used when not defined" + for z in [foo] +] + +baz = [ + # revealed: tuple[int, Unknown] + reveal_type((x, z)) + for x in range(3) + # error: [unresolved-reference] "Name `baz` used when not defined" + for z in [baz] +] ``` ## Starred expressions @@ -103,3 +121,95 @@ async def _(): # revealed: Unknown [reveal_type(x) async for x in range(3)] ``` + +## Comprehension expression types + +The type of the comprehension expression itself should reflect the inferred element type: + +```py +from typing import TypedDict, Literal + +# revealed: list[int | Unknown] +reveal_type([x for x in range(10)]) + +# revealed: set[int | Unknown] +reveal_type({x for x in range(10)}) + +# revealed: dict[int | Unknown, str | Unknown] +reveal_type({x: str(x) for x in range(10)}) + +# revealed: list[tuple[int, Unknown | str] | Unknown] +reveal_type([(x, y) for x in range(5) for y in ["a", "b", "c"]]) + +squares: list[int | None] = [x**2 for x in range(10)] +reveal_type(squares) # revealed: list[int | None] +``` + +Inference for comprehensions takes the type context into account: + +```py +# Without type context: +reveal_type([x for x in [1, 2, 3]]) # revealed: list[Unknown | int] +reveal_type({x: "a" for x in [1, 2, 3]}) # revealed: dict[Unknown | int, str | Unknown] +reveal_type({str(x): x for x in [1, 2, 3]}) # revealed: dict[str | Unknown, Unknown | int] +reveal_type({x for x in [1, 2, 3]}) # revealed: set[Unknown | int] + +# With type context: +xs: list[int] = [x for x in [1, 2, 3]] +reveal_type(xs) # revealed: list[int] + +ys: dict[int, str] = {x: str(x) for x in [1, 2, 3]} +reveal_type(ys) # revealed: dict[int, str] + +zs: set[int] = {x for x in [1, 2, 3]} +``` + +This also works for nested comprehensions: + +```py +table = [[(x, y) for x in range(3)] for y in range(3)] +reveal_type(table) # revealed: list[list[tuple[int, int] | Unknown] | Unknown] + +table_with_content: list[list[tuple[int, int, str | None]]] = [[(x, y, None) for x in range(3)] for y in range(3)] +reveal_type(table_with_content) # revealed: list[list[tuple[int, int, str | None]]] +``` + +The type context is propagated down into the comprehension: + +```py +class Person(TypedDict): + name: str + +# TODO: This should not error. +# error: [invalid-assignment] +persons: list[Person] = [{"name": n} for n in ["Alice", "Bob"]] +reveal_type(persons) # revealed: list[Person] + +# TODO: This should be an invalid-key error. +# error: [invalid-assignment] +invalid: list[Person] = [{"misspelled": n} for n in ["Alice", "Bob"]] +``` + +We promote literals to avoid overly-precise types in invariant positions: + +```py +reveal_type([x for x in ("a", "b", "c")]) # revealed: list[str | Unknown] +reveal_type({x for x in (1, 2, 3)}) # revealed: set[int | Unknown] +reveal_type({k: 0 for k in ("a", "b", "c")}) # revealed: dict[str | Unknown, int | Unknown] +``` + +Type context can prevent this promotion from happening: + +```py +list_of_literals: list[Literal["a", "b", "c"]] = [x for x in ("a", "b", "c")] +reveal_type(list_of_literals) # revealed: list[Literal["a", "b", "c"]] + +dict_with_literal_keys: dict[Literal["a", "b", "c"], int] = {k: 0 for k in ("a", "b", "c")} +reveal_type(dict_with_literal_keys) # revealed: dict[Literal["a", "b", "c"], int] + +dict_with_literal_values: dict[str, Literal[1, 2, 3]] = {str(k): k for k in (1, 2, 3)} +reveal_type(dict_with_literal_values) # revealed: dict[str, Literal[1, 2, 3]] + +set_with_literals: set[Literal[1, 2, 3]] = {k for k in (1, 2, 3)} +reveal_type(set_with_literals) # revealed: set[Literal[1, 2, 3]] +``` diff --git a/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md b/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md index f16ec0505d..fd2a6ef0cf 100644 --- a/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md +++ b/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md @@ -1,20 +1,20 @@ # Comprehensions with invalid syntax ```py -# Missing 'in' keyword. +# Missing `in` keyword. # It's reasonably clear here what they *meant* to write, # so we'll still infer the correct type: -# error: [invalid-syntax] "Expected 'in', found name" +# error: [invalid-syntax] "Expected `in`, found name" # revealed: int [reveal_type(a) for a range(3)] # Missing iteration variable -# error: [invalid-syntax] "Expected an identifier, but found a keyword 'in' that cannot be used here" -# error: [invalid-syntax] "Expected 'in', found name" +# error: [invalid-syntax] "Expected an identifier, but found a keyword `in` that cannot be used here" +# error: [invalid-syntax] "Expected `in`, found name" # error: [unresolved-reference] # revealed: Unknown [reveal_type(b) for in range(3)] @@ -27,9 +27,9 @@ [reveal_type(c) for c in] -# Missing 'in' keyword and missing iterable +# Missing `in` keyword and missing iterable -# error: [invalid-syntax] "Expected 'in', found ']'" +# error: [invalid-syntax] "Expected `in`, found `]`" # revealed: Unknown [reveal_type(d) for d] ``` diff --git a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md index e7171b6dd4..8548085302 100644 --- a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md +++ b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md @@ -424,7 +424,7 @@ from dataclasses import dataclass class MyFrozenClass: ... frozen = MyFrozenClass() -frozen.x = 2 # error: [invalid-assignment] "Can not assign to unresolved attribute `x` on type `MyFrozenClass`" +frozen.x = 2 # error: [invalid-assignment] "Cannot assign to unresolved attribute `x` on type `MyFrozenClass`" ``` A diagnostic is also emitted if a frozen dataclass is inherited, and an attempt is made to mutate an @@ -838,6 +838,40 @@ class WrappedIntAndExtraData[T](Wrap[int]): reveal_type(WrappedIntAndExtraData[bytes].__init__) ``` +### Non-dataclass inheriting from generic dataclass + +This is a regression test for . + +When a non-dataclass inherits from a generic dataclass, the generic type parameters should still be +properly inferred when calling the inherited `__init__` method. + +```py +from dataclasses import dataclass + +@dataclass +class ParentDataclass[T]: + value: T + +# Non-dataclass inheriting from generic dataclass +class ChildOfParentDataclass[T](ParentDataclass[T]): ... + +def uses_dataclass[T](x: T) -> ChildOfParentDataclass[T]: + return ChildOfParentDataclass(x) + +# TODO: ParentDataclass.__init__ should show generic types, not Unknown +# revealed: (self: ParentDataclass[Unknown], value: Unknown) -> None +reveal_type(ParentDataclass.__init__) + +# revealed: (self: ParentDataclass[T@ChildOfParentDataclass], value: T@ChildOfParentDataclass) -> None +reveal_type(ChildOfParentDataclass.__init__) + +result_int = uses_dataclass(42) +reveal_type(result_int) # revealed: ChildOfParentDataclass[Literal[42]] + +result_str = uses_dataclass("hello") +reveal_type(result_str) # revealed: ChildOfParentDataclass[Literal["hello"]] +``` + ## Descriptor-typed fields ### Same type in `__get__` and `__set__` diff --git a/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md b/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md index f091a1c991..28a69081e5 100644 --- a/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md +++ b/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md @@ -37,7 +37,7 @@ class Data: content: list[int] = field(default_factory=list) timestamp: datetime = field(default_factory=datetime.now, init=False) -# revealed: (self: Data, content: list[int] = Unknown) -> None +# revealed: (self: Data, content: list[int] = list[int]) -> None reveal_type(Data.__init__) data = Data([1, 2, 3]) @@ -63,7 +63,6 @@ class Person: age: int | None = field(default=None, kw_only=True) role: str = field(default="user", kw_only=True) -# TODO: this would ideally show a default value of `None` for `age` # revealed: (self: Person, name: str, *, age: int | None = None, role: str = Literal["user"]) -> None reveal_type(Person.__init__) diff --git a/crates/ty_python_semantic/resources/mdtest/enums.md b/crates/ty_python_semantic/resources/mdtest/enums.md index c526e11124..bdd2a92995 100644 --- a/crates/ty_python_semantic/resources/mdtest/enums.md +++ b/crates/ty_python_semantic/resources/mdtest/enums.md @@ -320,6 +320,11 @@ reveal_type(enum_members(Answer)) reveal_type(Answer.YES.value) # revealed: Literal[1] reveal_type(Answer.NO.value) # revealed: Literal[2] + +class SingleMember(Enum): + SINGLE = auto() + +reveal_type(SingleMember.SINGLE.value) # revealed: Literal[1] ``` Usages of `auto()` can be combined with manual value assignments: @@ -348,6 +353,11 @@ class Answer(StrEnum): reveal_type(Answer.YES.value) # revealed: Literal["yes"] reveal_type(Answer.NO.value) # revealed: Literal["no"] + +class SingleMember(StrEnum): + SINGLE = auto() + +reveal_type(SingleMember.SINGLE.value) # revealed: Literal["single"] ``` Using `auto()` with `IntEnum` also works as expected: @@ -363,6 +373,52 @@ reveal_type(Answer.YES.value) # revealed: Literal[1] reveal_type(Answer.NO.value) # revealed: Literal[2] ``` +As does using `auto()` for other enums that use `int` as a mixin: + +```py +from enum import Enum, auto + +class Answer(int, Enum): + YES = auto() + NO = auto() + +reveal_type(Answer.YES.value) # revealed: Literal[1] +reveal_type(Answer.NO.value) # revealed: Literal[2] +``` + +It's [hard to predict](https://github.com/astral-sh/ruff/pull/20541#discussion_r2381878613) what the +effect of using `auto()` will be for an arbitrary non-integer mixin, so for anything that isn't a +`StrEnum` and has a non-`int` mixin, we simply fallback to typeshed's annotation of `Any` for the +`value` property: + +```python +from enum import Enum, auto + +class A(str, Enum): + X = auto() + Y = auto() + +reveal_type(A.X.value) # revealed: Any + +class B(bytes, Enum): + X = auto() + Y = auto() + +reveal_type(B.X.value) # revealed: Any + +class C(tuple, Enum): + X = auto() + Y = auto() + +reveal_type(C.X.value) # revealed: Any + +class D(float, Enum): + X = auto() + Y = auto() + +reveal_type(D.X.value) # revealed: Any +``` + Combining aliases with `auto()`: ```py diff --git a/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md b/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md index 7218359750..4379498f2d 100644 --- a/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md +++ b/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md @@ -182,6 +182,11 @@ def match_non_exhaustive(x: Color): ## `isinstance` checks +```toml +[environment] +python-version = "3.12" +``` + ```py from typing import assert_never @@ -189,6 +194,9 @@ class A: ... class B: ... class C: ... +class GenericClass[T]: + x: T + def if_else_exhaustive(x: A | B | C): if isinstance(x, A): pass @@ -253,6 +261,17 @@ def match_non_exhaustive(x: A | B | C): # this diagnostic is correct: the inferred type of `x` is `B & ~A & ~C` assert_never(x) # error: [type-assertion-failure] + +# Note: no invalid-return-type diagnostic; the `match` is exhaustive +def match_exhaustive_generic[T](obj: GenericClass[T]) -> GenericClass[T]: + match obj: + case GenericClass(x=42): + reveal_type(obj) # revealed: GenericClass[T@match_exhaustive_generic] + return obj + case GenericClass(x=x): + reveal_type(x) # revealed: @Todo(`match` pattern definition types) + reveal_type(obj) # revealed: GenericClass[T@match_exhaustive_generic] + return obj ``` ## `isinstance` checks with generics @@ -398,3 +417,55 @@ class Answer(Enum): case Answer.NO: return False ``` + +## Exhaustiveness checking for type variables with bounds or constraints + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import assert_never, Literal + +def f[T: bool](x: T) -> T: + match x: + case True: + return x + case False: + return x + case _: + reveal_type(x) # revealed: Never + assert_never(x) + +def g[T: Literal["foo", "bar"]](x: T) -> T: + match x: + case "foo": + return x + case "bar": + return x + case _: + reveal_type(x) # revealed: Never + assert_never(x) + +def h[T: int | str](x: T) -> T: + if isinstance(x, int): + return x + elif isinstance(x, str): + return x + else: + reveal_type(x) # revealed: Never + assert_never(x) + +def i[T: (int, str)](x: T) -> T: + match x: + case int(): + pass + case str(): + pass + case _: + reveal_type(x) # revealed: Never + assert_never(x) + + return x +``` diff --git a/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md b/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md index 629fd2b554..a207b3414f 100644 --- a/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md +++ b/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md @@ -26,7 +26,7 @@ def outer_generator(): ## `yield from` with a custom iterable `yield from` can also be used with custom iterable types. In that case, the type of the `yield from` -expression can not be determined +expression cannot be determined ```py from typing import Generator, TypeVar, Generic diff --git a/crates/ty_python_semantic/resources/mdtest/function/parameters.md b/crates/ty_python_semantic/resources/mdtest/function/parameters.md index eb7316fe91..c629f26565 100644 --- a/crates/ty_python_semantic/resources/mdtest/function/parameters.md +++ b/crates/ty_python_semantic/resources/mdtest/function/parameters.md @@ -1,12 +1,9 @@ # Function parameter types Within a function scope, the declared type of each parameter is its annotated type (or Unknown if -not annotated). The initial inferred type is the union of the declared type with the type of the -default value expression (if any). If both are fully static types, this union should simplify to the -annotated type (since the default value type must be assignable to the annotated type, and for fully -static types this means subtype-of, which simplifies in unions). But if the annotated type is -Unknown or another non-fully-static type, the default value type may still be relevant as lower -bound. +not annotated). The initial inferred type is the annotated type of the parameter, if any. If there +is no annotation, it is the union of `Unknown` with the type of the default value expression (if +any). The variadic parameter is a variadic tuple of its annotated type; the variadic-keywords parameter is a dictionary from strings to its annotated type. @@ -41,13 +38,13 @@ def g(*args, **kwargs): ## Annotation is present but not a fully static type -The default value type should be a lower bound on the inferred type. +If there is an annotation, we respect it fully and don't union in the default value type. ```py from typing import Any def f(x: Any = 1): - reveal_type(x) # revealed: Any | Literal[1] + reveal_type(x) # revealed: Any ``` ## Default value type must be assignable to annotated type @@ -64,7 +61,7 @@ def f(x: int = "foo"): from typing import Any def g(x: Any = "foo"): - reveal_type(x) # revealed: Any | Literal["foo"] + reveal_type(x) # revealed: Any ``` ## Stub functions diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md index c520b7e883..7ba6803dda 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md @@ -26,9 +26,12 @@ reveal_type(generic_context(SingleTypevar)) # revealed: tuple[T@MultipleTypevars, S@MultipleTypevars] reveal_type(generic_context(MultipleTypevars)) -# TODO: support `ParamSpec`/`TypeVarTuple` properly (these should not reveal `None`) -reveal_type(generic_context(SingleParamSpec)) # revealed: None -reveal_type(generic_context(TypeVarAndParamSpec)) # revealed: None +# revealed: tuple[P@SingleParamSpec] +reveal_type(generic_context(SingleParamSpec)) +# revealed: tuple[P@TypeVarAndParamSpec, T@TypeVarAndParamSpec] +reveal_type(generic_context(TypeVarAndParamSpec)) + +# TODO: support `TypeVarTuple` properly (these should not reveal `None`) reveal_type(generic_context(SingleTypeVarTuple)) # revealed: None reveal_type(generic_context(TypeVarAndTypeVarTuple)) # revealed: None ``` @@ -433,9 +436,7 @@ def test_seq(x: Sequence[T]) -> Sequence[T]: def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: tuple[()]): reveal_type(test_seq(t1)) # revealed: Sequence[int | float | complex | list[int]] reveal_type(test_seq(t2)) # revealed: Sequence[int | str] - - # TODO: this should be `Sequence[Never]` - reveal_type(test_seq(t3)) # revealed: Sequence[Unknown] + reveal_type(test_seq(t3)) # revealed: Sequence[Never] ``` ### `__init__` is itself generic @@ -463,6 +464,7 @@ wrong_innards: C[int] = C("five", 1) from typing_extensions import overload, Generic, TypeVar T = TypeVar("T") +U = TypeVar("U") class C(Generic[T]): @overload @@ -494,6 +496,17 @@ C[int](12) C[None]("string") # error: [no-matching-overload] C[None](b"bytes") # error: [no-matching-overload] C[None](12) + +class D(Generic[T, U]): + @overload + def __init__(self: "D[str, U]", u: U) -> None: ... + @overload + def __init__(self, t: T, u: U) -> None: ... + def __init__(self, *args) -> None: ... + +reveal_type(D("string")) # revealed: D[str, str] +reveal_type(D(1)) # revealed: D[str, int] +reveal_type(D(1, "string")) # revealed: D[int, str] ``` ### Synthesized methods with dataclasses diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md index 9745fdca21..2bbe85b5ec 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md @@ -545,3 +545,28 @@ def f(x: T, y: Not[T]) -> T: y = x # error: [invalid-assignment] return x ``` + +## Prefer exact matches for constrained typevars + +```py +from typing import TypeVar + +class Base: ... +class Sub(Base): ... + +# We solve to `Sub`, regardless of the order of constraints. +T = TypeVar("T", Base, Sub) +T2 = TypeVar("T2", Sub, Base) + +def f(x: T) -> list[T]: + return [x] + +def f2(x: T2) -> list[T2]: + return [x] + +x: list[Sub] = f(Sub()) +reveal_type(x) # revealed: list[Sub] + +y: list[Sub] = f2(Sub()) +reveal_type(y) # revealed: list[Sub] +``` diff --git a/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md b/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md index 30a9ee88ae..a01b468ad0 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md @@ -375,9 +375,7 @@ def test_seq[T](x: Sequence[T]) -> Sequence[T]: def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: tuple[()]): reveal_type(test_seq(t1)) # revealed: Sequence[int | float | complex | list[int]] reveal_type(test_seq(t2)) # revealed: Sequence[int | str] - - # TODO: this should be `Sequence[Never]` - reveal_type(test_seq(t3)) # revealed: Sequence[Unknown] + reveal_type(test_seq(t3)) # revealed: Sequence[Never] ``` ### `__init__` is itself generic @@ -436,6 +434,17 @@ C[int](12) C[None]("string") # error: [no-matching-overload] C[None](b"bytes") # error: [no-matching-overload] C[None](12) + +class D[T, U]: + @overload + def __init__(self: "D[str, U]", u: U) -> None: ... + @overload + def __init__(self, t: T, u: U) -> None: ... + def __init__(self, *args) -> None: ... + +reveal_type(D("string")) # revealed: D[str, str] +reveal_type(D(1)) # revealed: D[str, int] +reveal_type(D(1, "string")) # revealed: D[int, str] ``` ### Synthesized methods with dataclasses diff --git a/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md b/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md index a5e62f6866..5db84cfd5a 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md @@ -474,6 +474,16 @@ def g(x: str): f(prefix=x, suffix=".tar.gz") ``` +If the type variable is present multiple times in the union, we choose the correct union element to +infer against based on the argument type: + +```py +def h[T](x: list[T] | dict[T, T]) -> T | None: ... +def _(x: list[int], y: dict[int, int]): + reveal_type(h(x)) # revealed: int | None + reveal_type(h(y)) # revealed: int | None +``` + ## Nested functions see typevars bound in outer function ```py diff --git a/crates/ty_python_semantic/resources/mdtest/generics/scoping.md b/crates/ty_python_semantic/resources/mdtest/generics/scoping.md index 308092f4d1..79944b263a 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/scoping.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/scoping.md @@ -288,6 +288,43 @@ class C[T]: class Bad2(Iterable[T]): ... ``` +## Class bases are evaluated within the type parameter scope + +```py +class C[_T]( + # error: [unresolved-reference] "Name `C` used when not defined" + C +): ... + +# `D` in `list[D]` is resolved to be a type variable of class `D`. +class D[D](list[D]): ... + +# error: [unresolved-reference] "Name `E` used when not defined" +if E: + class E[_T]( + # error: [unresolved-reference] "Name `E` used when not defined" + E + ): ... + +# error: [unresolved-reference] "Name `F` used when not defined" +F + +# error: [unresolved-reference] "Name `F` used when not defined" +class F[_T](F): ... + +def foo(): + class G[_T]( + # error: [unresolved-reference] "Name `G` used when not defined" + G + ): ... + # error: [unresolved-reference] "Name `H` used when not defined" + if H: + class H[_T]( + # error: [unresolved-reference] "Name `H` used when not defined" + H + ): ... +``` + ## Class scopes do not cover inner scopes Just like regular symbols, the typevars of a generic class are only available in that class's scope, diff --git a/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md b/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md index 03ea95b4a2..e8c19625ca 100644 --- a/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md +++ b/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md @@ -130,7 +130,7 @@ static_assert(has_member(C, "base_attr")) static_assert(not has_member(C, "non_existent")) ``` -But instance attributes can not be accessed this way: +But instance attributes cannot be accessed this way: ```py static_assert(not has_member(C, "instance_attr")) diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md index 404d308083..0a45e9e3c4 100644 --- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md +++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md @@ -17,6 +17,633 @@ def f(x: MyInt): f(1) ``` +## None + +```py +MyNone = None + +def g(x: MyNone): + reveal_type(x) # revealed: None + +g(None) +``` + +## Unions + +We also support unions in type aliases: + +```py +from typing_extensions import Any, Never, Literal, LiteralString, Tuple, Annotated, Optional, Union +from ty_extensions import Unknown + +IntOrStr = int | str +IntOrStrOrBytes1 = int | str | bytes +IntOrStrOrBytes2 = (int | str) | bytes +IntOrStrOrBytes3 = int | (str | bytes) +IntOrStrOrBytes4 = IntOrStr | bytes +IntOrStrOrBytes5 = int | Union[str, bytes] +IntOrStrOrBytes6 = Union[int, str] | bytes +BytesOrIntOrStr = bytes | IntOrStr +IntOrNone = int | None +NoneOrInt = None | int +IntOrStrOrNone = IntOrStr | None +NoneOrIntOrStr = None | IntOrStr +IntOrAny = int | Any +AnyOrInt = Any | int +NoneOrAny = None | Any +AnyOrNone = Any | None +NeverOrAny = Never | Any +AnyOrNever = Any | Never +UnknownOrInt = Unknown | int +IntOrUnknown = int | Unknown +StrOrZero = str | Literal[0] +ZeroOrStr = Literal[0] | str +LiteralStringOrInt = LiteralString | int +IntOrLiteralString = int | LiteralString +NoneOrTuple = None | Tuple[int, str] +TupleOrNone = Tuple[int, str] | None +IntOrAnnotated = int | Annotated[str, "meta"] +AnnotatedOrInt = Annotated[str, "meta"] | int +IntOrOptional = int | Optional[str] +OptionalOrInt = Optional[str] | int + +reveal_type(IntOrStr) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes1) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes2) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes3) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes4) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes5) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes6) # revealed: types.UnionType +reveal_type(BytesOrIntOrStr) # revealed: types.UnionType +reveal_type(IntOrNone) # revealed: types.UnionType +reveal_type(NoneOrInt) # revealed: types.UnionType +reveal_type(IntOrStrOrNone) # revealed: types.UnionType +reveal_type(NoneOrIntOrStr) # revealed: types.UnionType +reveal_type(IntOrAny) # revealed: types.UnionType +reveal_type(AnyOrInt) # revealed: types.UnionType +reveal_type(NoneOrAny) # revealed: types.UnionType +reveal_type(AnyOrNone) # revealed: types.UnionType +reveal_type(NeverOrAny) # revealed: types.UnionType +reveal_type(AnyOrNever) # revealed: types.UnionType +reveal_type(UnknownOrInt) # revealed: types.UnionType +reveal_type(IntOrUnknown) # revealed: types.UnionType +reveal_type(StrOrZero) # revealed: types.UnionType +reveal_type(ZeroOrStr) # revealed: types.UnionType +reveal_type(IntOrLiteralString) # revealed: types.UnionType +reveal_type(LiteralStringOrInt) # revealed: types.UnionType +reveal_type(NoneOrTuple) # revealed: types.UnionType +reveal_type(TupleOrNone) # revealed: types.UnionType +reveal_type(IntOrAnnotated) # revealed: types.UnionType +reveal_type(AnnotatedOrInt) # revealed: types.UnionType +reveal_type(IntOrOptional) # revealed: types.UnionType +reveal_type(OptionalOrInt) # revealed: types.UnionType + +def _( + int_or_str: IntOrStr, + int_or_str_or_bytes1: IntOrStrOrBytes1, + int_or_str_or_bytes2: IntOrStrOrBytes2, + int_or_str_or_bytes3: IntOrStrOrBytes3, + int_or_str_or_bytes4: IntOrStrOrBytes4, + int_or_str_or_bytes5: IntOrStrOrBytes5, + int_or_str_or_bytes6: IntOrStrOrBytes6, + bytes_or_int_or_str: BytesOrIntOrStr, + int_or_none: IntOrNone, + none_or_int: NoneOrInt, + int_or_str_or_none: IntOrStrOrNone, + none_or_int_or_str: NoneOrIntOrStr, + int_or_any: IntOrAny, + any_or_int: AnyOrInt, + none_or_any: NoneOrAny, + any_or_none: AnyOrNone, + never_or_any: NeverOrAny, + any_or_never: AnyOrNever, + unknown_or_int: UnknownOrInt, + int_or_unknown: IntOrUnknown, + str_or_zero: StrOrZero, + zero_or_str: ZeroOrStr, + literal_string_or_int: LiteralStringOrInt, + int_or_literal_string: IntOrLiteralString, + none_or_tuple: NoneOrTuple, + tuple_or_none: TupleOrNone, + int_or_annotated: IntOrAnnotated, + annotated_or_int: AnnotatedOrInt, + int_or_optional: IntOrOptional, + optional_or_int: OptionalOrInt, +): + reveal_type(int_or_str) # revealed: int | str + reveal_type(int_or_str_or_bytes1) # revealed: int | str | bytes + reveal_type(int_or_str_or_bytes2) # revealed: int | str | bytes + reveal_type(int_or_str_or_bytes3) # revealed: int | str | bytes + reveal_type(int_or_str_or_bytes4) # revealed: int | str | bytes + reveal_type(int_or_str_or_bytes5) # revealed: int | str | bytes + reveal_type(int_or_str_or_bytes6) # revealed: int | str | bytes + reveal_type(bytes_or_int_or_str) # revealed: bytes | int | str + reveal_type(int_or_none) # revealed: int | None + reveal_type(none_or_int) # revealed: None | int + reveal_type(int_or_str_or_none) # revealed: int | str | None + reveal_type(none_or_int_or_str) # revealed: None | int | str + reveal_type(int_or_any) # revealed: int | Any + reveal_type(any_or_int) # revealed: Any | int + reveal_type(none_or_any) # revealed: None | Any + reveal_type(any_or_none) # revealed: Any | None + reveal_type(never_or_any) # revealed: Any + reveal_type(any_or_never) # revealed: Any + reveal_type(unknown_or_int) # revealed: Unknown | int + reveal_type(int_or_unknown) # revealed: int | Unknown + reveal_type(str_or_zero) # revealed: str | Literal[0] + reveal_type(zero_or_str) # revealed: Literal[0] | str + reveal_type(literal_string_or_int) # revealed: LiteralString | int + reveal_type(int_or_literal_string) # revealed: int | LiteralString + reveal_type(none_or_tuple) # revealed: None | tuple[int, str] + reveal_type(tuple_or_none) # revealed: tuple[int, str] | None + reveal_type(int_or_annotated) # revealed: int | str + reveal_type(annotated_or_int) # revealed: str | int + reveal_type(int_or_optional) # revealed: int | str | None + reveal_type(optional_or_int) # revealed: str | None | int +``` + +If a type is unioned with itself in a value expression, the result is just that type. No +`types.UnionType` instance is created: + +```py +IntOrInt = int | int +ListOfIntOrListOfInt = list[int] | list[int] + +reveal_type(IntOrInt) # revealed: +reveal_type(ListOfIntOrListOfInt) # revealed: + +def _(int_or_int: IntOrInt, list_of_int_or_list_of_int: ListOfIntOrListOfInt): + reveal_type(int_or_int) # revealed: int + reveal_type(list_of_int_or_list_of_int) # revealed: list[int] +``` + +`NoneType` has no special or-operator behavior, so this is an error: + +```py +None | None # error: [unsupported-operator] "Operator `|` is unsupported between objects of type `None` and `None`" +``` + +When constructing something nonsensical like `int | 1`, we emit a diagnostic for the expression +itself, as it leads to a `TypeError` at runtime. The result of the expression is then inferred as +`Unknown`, so we permit it to be used in a type expression. + +```py +IntOrOne = int | 1 # error: [unsupported-operator] + +reveal_type(IntOrOne) # revealed: Unknown + +def _(int_or_one: IntOrOne): + reveal_type(int_or_one) # revealed: Unknown +``` + +If you were to somehow get hold of an opaque instance of `types.UnionType`, that could not be used +as a type expression: + +```py +from types import UnionType + +def f(SomeUnionType: UnionType): + # error: [invalid-type-form] "Variable of type `UnionType` is not allowed in a type expression" + some_union: SomeUnionType + +f(int | str) +``` + +## `|` operator between class objects and non-class objects + +Using the `|` operator between a class object and a non-class object does not create a `UnionType` +instance; it calls the relevant dunder as normal: + +```py +class Foo: + def __or__(self, other) -> str: + return "foo" + +reveal_type(Foo() | int) # revealed: str +reveal_type(Foo() | list[int]) # revealed: str + +class Bar: + def __ror__(self, other) -> str: + return "bar" + +reveal_type(int | Bar()) # revealed: str +reveal_type(list[int] | Bar()) # revealed: str + +class Invalid: + def __or__(self, other: "Invalid") -> str: + return "Invalid" + + def __ror__(self, other: "Invalid") -> str: + return "Invalid" + +# error: [unsupported-operator] +reveal_type(int | Invalid()) # revealed: Unknown +# error: [unsupported-operator] +reveal_type(Invalid() | list[int]) # revealed: Unknown +``` + +## Custom `__(r)or__` methods on metaclasses are only partially respected + +A drawback of our extensive special casing of `|` operations between class objects is that +`__(r)or__` methods on metaclasses are completely disregarded if two classes are `|`'d together. We +respect the metaclass dunder if a class is `|`'d with a non-class, however: + +```py +class Meta(type): + def __or__(self, other) -> str: + return "Meta" + +class Foo(metaclass=Meta): ... +class Bar(metaclass=Meta): ... + +X = Foo | Bar + +# In an ideal world, perhaps we would respect `Meta.__or__` here and reveal `str`? +# But we still need to record what the elements are, since (according to the typing spec) +# `X` is still a valid type alias +reveal_type(X) # revealed: types.UnionType + +def f(obj: X): + reveal_type(obj) # revealed: Foo | Bar + +# We do respect the metaclass `__or__` if it's used between a class and a non-class, however: + +Y = Foo | 42 +reveal_type(Y) # revealed: str + +Z = Bar | 56 +reveal_type(Z) # revealed: str + +def g( + arg1: Y, # error: [invalid-type-form] + arg2: Z, # error: [invalid-type-form] +): ... +``` + +## `|` unions in stubs and `TYPE_CHECKING` blocks + +In runtime contexts, `|` unions are only permitted on Python 3.10+. But in suites of code that are +never executed at runtime (stub files, `if TYPE_CHECKING` blocks, and stringified annotations), they +are permitted even if the target version is set to Python 3.9 or earlier. + +```toml +[environment] +python-version = "3.9" +``` + +`bar.pyi`: + +```pyi +Z = int | str +GLOBAL_CONSTANT: Z +``` + +`foo.py`: + +```py +from typing import TYPE_CHECKING +from bar import GLOBAL_CONSTANT + +reveal_type(GLOBAL_CONSTANT) # revealed: int | str + +if TYPE_CHECKING: + class ItsQuiteCloudyInManchester: + X = int | str + + def f(obj: X): + reveal_type(obj) # revealed: int | str + + # TODO: we currently only understand code as being inside a `TYPE_CHECKING` block + # if a whole *scope* is inside the `if TYPE_CHECKING` block + # (like the `ItsQuiteCloudyInManchester` class above); this is a false-positive + Y = int | str # error: [unsupported-operator] + + def g(obj: Y): + # TODO: should be `int | str` + reveal_type(obj) # revealed: Unknown + +Y = list["int | str"] + +def g(obj: Y): + reveal_type(obj) # revealed: list[int | str] +``` + +## Generic types + +Implicit type aliases can also refer to generic types: + +```py +from typing_extensions import TypeVar + +T = TypeVar("T") + +MyList = list[T] + +def _(my_list: MyList[int]): + # TODO: This should be `list[int]` + reveal_type(my_list) # revealed: @Todo(unknown type subscript) + +ListOrTuple = list[T] | tuple[T, ...] + +reveal_type(ListOrTuple) # revealed: types.UnionType + +def _(list_or_tuple: ListOrTuple[int]): + reveal_type(list_or_tuple) # revealed: @Todo(Generic specialization of types.UnionType) +``` + +## `Literal`s + +We also support `typing.Literal` in implicit type aliases. + +```py +from typing import Literal +from enum import Enum + +IntLiteral1 = Literal[26] +IntLiteral2 = Literal[0x1A] +IntLiterals = Literal[-1, 0, 1] +NestedLiteral = Literal[Literal[1]] +StringLiteral = Literal["a"] +BytesLiteral = Literal[b"b"] +BoolLiteral = Literal[True] +MixedLiterals = Literal[1, "a", True, None] + +class Color(Enum): + RED = 0 + GREEN = 1 + BLUE = 2 + +EnumLiteral = Literal[Color.RED] + +def _( + int_literal1: IntLiteral1, + int_literal2: IntLiteral2, + int_literals: IntLiterals, + nested_literal: NestedLiteral, + string_literal: StringLiteral, + bytes_literal: BytesLiteral, + bool_literal: BoolLiteral, + mixed_literals: MixedLiterals, + enum_literal: EnumLiteral, +): + reveal_type(int_literal1) # revealed: Literal[26] + reveal_type(int_literal2) # revealed: Literal[26] + reveal_type(int_literals) # revealed: Literal[-1, 0, 1] + reveal_type(nested_literal) # revealed: Literal[1] + reveal_type(string_literal) # revealed: Literal["a"] + reveal_type(bytes_literal) # revealed: Literal[b"b"] + reveal_type(bool_literal) # revealed: Literal[True] + reveal_type(mixed_literals) # revealed: Literal[1, "a", True] | None + reveal_type(enum_literal) # revealed: Literal[Color.RED] +``` + +We reject invalid uses: + +```py +# error: [invalid-type-form] "Type arguments for `Literal` must be `None`, a literal value (int, bool, str, or bytes), or an enum member" +LiteralInt = Literal[int] + +reveal_type(LiteralInt) # revealed: Unknown + +def _(weird: LiteralInt): + reveal_type(weird) # revealed: Unknown + +# error: [invalid-type-form] "`Literal[26]` is not a generic class" +def _(weird: IntLiteral1[int]): + reveal_type(weird) # revealed: Unknown +``` + +## `Annotated` + +Basic usage: + +```py +from typing import Annotated + +MyAnnotatedInt = Annotated[int, "some metadata", 1, 2, 3] + +def _(annotated_int: MyAnnotatedInt): + reveal_type(annotated_int) # revealed: int +``` + +Usage with generics: + +```py +from typing import TypeVar + +T = TypeVar("T") + +Deprecated = Annotated[T, "deprecated attribute"] + +class C: + old: Deprecated[int] + +# TODO: Should be `int` +reveal_type(C().old) # revealed: @Todo(Generic specialization of typing.Annotated) +``` + +If the metadata argument is missing, we emit an error (because this code fails at runtime), but +still use the first element as the type, when used in annotations: + +```py +# error: [invalid-type-form] "Special form `typing.Annotated` expected at least 2 arguments (one type and at least one metadata element)" +WronglyAnnotatedInt = Annotated[int] + +def _(wrongly_annotated_int: WronglyAnnotatedInt): + reveal_type(wrongly_annotated_int) # revealed: int +``` + +## `Optional` + +Starting with Python 3.14, `Optional[int]` creates an instance of `typing.Union`, which is an alias +for `types.UnionType`. We only support this new behavior and do not attempt to model the details of +the pre-3.14 behavior: + +```py +from typing import Optional + +MyOptionalInt = Optional[int] + +reveal_type(MyOptionalInt) # revealed: types.UnionType + +def _(optional_int: MyOptionalInt): + reveal_type(optional_int) # revealed: int | None +``` + +A special case is `Optional[None]`, which is equivalent to `None`: + +```py +JustNone = Optional[None] + +reveal_type(JustNone) # revealed: None + +def _(just_none: JustNone): + reveal_type(just_none) # revealed: None +``` + +Invalid uses: + +```py +# error: [invalid-type-form] "`typing.Optional` requires exactly one argument" +Optional[int, str] +``` + +## `LiteralString`, `NoReturn`, `Never` + +```py +from typing_extensions import LiteralString, NoReturn, Never + +MyLiteralString = LiteralString +MyNoReturn = NoReturn +MyNever = Never + +reveal_type(MyLiteralString) # revealed: typing.LiteralString +reveal_type(MyNoReturn) # revealed: typing.NoReturn +reveal_type(MyNever) # revealed: typing.Never + +def _( + ls: MyLiteralString, + nr: MyNoReturn, + nv: MyNever, +): + reveal_type(ls) # revealed: LiteralString + reveal_type(nr) # revealed: Never + reveal_type(nv) # revealed: Never +``` + +## `Tuple` + +We support implicit type aliases using `typing.Tuple`: + +```py +from typing import Tuple + +IntAndStr = Tuple[int, str] +SingleInt = Tuple[int] +Ints = Tuple[int, ...] +EmptyTuple = Tuple[()] + +def _(int_and_str: IntAndStr, single_int: SingleInt, ints: Ints, empty_tuple: EmptyTuple): + reveal_type(int_and_str) # revealed: tuple[int, str] + reveal_type(single_int) # revealed: tuple[int] + reveal_type(ints) # revealed: tuple[int, ...] + reveal_type(empty_tuple) # revealed: tuple[()] +``` + +Invalid uses cause diagnostics: + +```py +from typing import Tuple + +# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression" +Invalid = Tuple[int, 1] + +def _(invalid: Invalid): + reveal_type(invalid) # revealed: tuple[int, Unknown] +``` + +## `Union` + +We support implicit type aliases using `typing.Union`: + +```py +from typing import Union + +IntOrStr = Union[int, str] +IntOrStrOrBytes = Union[int, Union[str, bytes]] + +reveal_type(IntOrStr) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes) # revealed: types.UnionType + +def _( + int_or_str: IntOrStr, + int_or_str_or_bytes: IntOrStrOrBytes, +): + reveal_type(int_or_str) # revealed: int | str + reveal_type(int_or_str_or_bytes) # revealed: int | str | bytes +``` + +If a single type is given, no `types.UnionType` instance is created: + +```py +JustInt = Union[int] + +reveal_type(JustInt) # revealed: + +def _(just_int: JustInt): + reveal_type(just_int) # revealed: int +``` + +An empty `typing.Union` leads to a `TypeError` at runtime, so we emit an error. We still infer +`Never` when used as a type expression, which seems reasonable for an empty union: + +```py +# error: [invalid-type-form] "`typing.Union` requires at least one type argument" +EmptyUnion = Union[()] + +reveal_type(EmptyUnion) # revealed: types.UnionType + +def _(empty: EmptyUnion): + reveal_type(empty) # revealed: Never +``` + +Other invalid uses are also caught: + +```py +# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression" +Invalid = Union[str, 1] + +def _( + invalid: Invalid, +): + reveal_type(invalid) # revealed: str | Unknown +``` + +## Stringified annotations? + +From the [typing spec on type aliases](https://typing.python.org/en/latest/spec/aliases.html): + +> Type aliases may be as complex as type hints in annotations – anything that is acceptable as a +> type hint is acceptable in a type alias + +However, no other type checker seems to support stringified annotations in implicit type aliases. We +currently also do not support them, and we detect places where these attempted unions cause runtime +errors: + +```py +AliasForStr = "str" + +# error: [invalid-type-form] "Variable of type `Literal["str"]` is not allowed in a type expression" +def _(s: AliasForStr): + reveal_type(s) # revealed: Unknown + +IntOrStr = int | "str" # error: [unsupported-operator] + +reveal_type(IntOrStr) # revealed: Unknown + +def _(int_or_str: IntOrStr): + reveal_type(int_or_str) # revealed: Unknown +``` + +We *do* support stringified annotations if they appear in a position where a type expression is +syntactically expected: + +```py +from typing import Union + +ListOfInts = list["int"] +StrOrStyle = Union[str, "Style"] + +class Style: ... + +def _( + list_of_ints: ListOfInts, + str_or_style: StrOrStyle, +): + reveal_type(list_of_ints) # revealed: list[int] + reveal_type(str_or_style) # revealed: str | Style +``` + ## Recursive ### Old union syntax diff --git a/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md b/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md index 6b7423f86d..3b8ef67e72 100644 --- a/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md +++ b/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md @@ -14,7 +14,7 @@ TODO: This is correctly flagged as an error, but we could clean up the diagnosti ```py # TODO: No second diagnostic -# error: [invalid-syntax] "Expected ',', found '.'" +# error: [invalid-syntax] "Expected `,`, found `.`" # error: [unresolved-import] "Module `a` has no member `c`" from a import b.c diff --git a/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md b/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md index 4c493f4b74..79cce812fe 100644 --- a/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md +++ b/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md @@ -60,11 +60,6 @@ def __getattr__(name: str) -> str: If a package's `__init__.py` (e.g. `mod/__init__.py`) defines a `__getattr__` function, and there is also a submodule file present (e.g. `mod/sub.py`), then: -- If you do `import mod` (without importing the submodule directly), accessing `mod.sub` will call - `mod.__getattr__('sub')`, so `reveal_type(mod.sub)` will show the return type of `__getattr__`. -- If you do `import mod.sub` (importing the submodule directly), then `mod.sub` refers to the actual - submodule, so `reveal_type(mod.sub)` will show the type of the submodule itself. - `mod/__init__.py`: ```py @@ -78,6 +73,9 @@ def __getattr__(name: str) -> str: value = 42 ``` +If you `import mod` (without importing the submodule directly), accessing `mod.sub` will call +`mod.__getattr__('sub')`, so `reveal_type(mod.sub)` will show the return type of `__getattr__`. + `test_import_mod.py`: ```py @@ -86,6 +84,9 @@ import mod reveal_type(mod.sub) # revealed: str ``` +If you `import mod.sub` (importing the submodule directly), then `mod.sub` refers to the actual +submodule, so `reveal_type(mod.sub)` will show the type of the submodule itself. + `test_import_mod_sub.py`: ```py @@ -93,3 +94,43 @@ import mod.sub reveal_type(mod.sub) # revealed: ``` + +If you `from mod import sub`, at runtime `sub` will be the value returned by the module +`__getattr__`, but other type checkers do not model the precedence this way. They will always prefer +a submodule over a package `__getattr__`, and thus this is the current expectation in the ecosystem. +Effectively, this assumes that a well-implemented package `__getattr__` will always raise +`AttributeError` for a name that also exists as a submodule (and in fact this is the case for many +module `__getattr__` in the ecosystem.) + +`test_from_import.py`: + +```py +from mod import sub + +reveal_type(sub) # revealed: +``` + +## Limiting names handled by `__getattr__` + +If a module `__getattr__` is annotated to only accept certain string literals, then the module +`__getattr__` will be ignored for other names. (In principle this could be a more explicit way to +handle the precedence issues discussed above, but it's not currently used in the ecosystem.) + +```py +from limited_getattr_module import known_attr + +# error: [unresolved-import] +from limited_getattr_module import unknown_attr + +reveal_type(known_attr) # revealed: int +reveal_type(unknown_attr) # revealed: Unknown +``` + +`limited_getattr_module.py`: + +```py +from typing import Literal + +def __getattr__(name: Literal["known_attr"]) -> int: + return 3 +``` diff --git a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md new file mode 100644 index 0000000000..ce5c995014 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md @@ -0,0 +1,1170 @@ +# Nonstandard Import Conventions + +This document covers ty-specific extensions to the +[standard import conventions](https://typing.python.org/en/latest/spec/distributing.html#import-conventions), +and other intentional deviations from actual python semantics. + +This file currently covers the following details: + +- **froms are locals**: a `from..import` can only define locals, it does not have global + side-effects. Specifically any submodule attribute `a` that's implicitly introduced by either + `from .a import b` or `from . import a as b` (in an `__init__.py(i)`) is a local and not a + global. However we only introduce this symbol if the `from..import` is in global-scope. This + means imports at the start of a file work as you'd expect, while imports in a function don't + introduce submodule attributes. + +- **first from first serve**: only the *first* `from..import` in an `__init__.py(i)` that imports a + particular direct submodule of the current package introduces that submodule as a local. + Subsequent imports of the submodule will not introduce that local. This reflects the fact that + in actual python only the first import of a submodule (in the entire execution of the program) + introduces it as an attribute of the package. By "first" we mean "the first time in global + scope". + +- **dot re-exports**: `from . import a` in an `__init__.pyi` is considered a re-export of `a` + (equivalent to `from . import a as a`). This is required to properly handle many stubs in the + wild. Equivalent imports like `from whatever.thispackage import a` also introduce a re-export + (this has essentially zero ecosystem impact, we just felt it was more consistent). The only way + to opt out of this is to rename the import to something else (`from . import a as b`). + `from .a import b` and equivalent does *not* introduce a re-export. + +Note: almost all tests in here have a stub and non-stub version, because we're interested in both +defining symbols *at all* and re-exporting them. + +## Relative `from` Import of Direct Submodule in `__init__` + +We consider the `from . import submodule` idiom in an `__init__.pyi` an explicit re-export. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from . import imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`mypackage/fails.pyi`: + +```pyi +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from . import imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`mypackage/fails.py`: + +```py +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Absolute `from` Import of Direct Submodule in `__init__` + +If an absolute `from...import` happens to import a submodule (i.e. it's equivalent to +`from . import y`) we also treat it as a re-export. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from mypackage import imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`mypackage/fails.pyi`: + +```pyi +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from mypackage import imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`mypackage/fails.py`: + +```py +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Import of Direct Submodule in `__init__` + +An `import` that happens to import a submodule does not expose the submodule as an attribute. (This +is an arbitrary decision and can be changed!) + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +import mypackage.imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this could work and would be nice to have? +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +import mypackage.imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this could work and would be nice to have +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## Relative `from` Import of Nested Submodule in `__init__` + +`from .submodule import nested` in an `__init__.pyi` does not re-export `mypackage.submodule`, +`mypackage.submodule.nested`, or `nested`. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .submodule import nested +``` + +`mypackage/submodule/__init__.pyi`: + +```pyi +``` + +`mypackage/submodule/nested.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested.X) # revealed: Unknown +``` + +### In Non-Stub + +`from .submodule import nested` in an `__init__.py` exposes `mypackage.submodule` and `nested`. + +`mypackage/__init__.py`: + +```py +from .submodule import nested +``` + +`mypackage/submodule/__init__.py`: + +```py +``` + +`mypackage/submodule/nested.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.submodule) # revealed: +# TODO: this would be nice to support +# error: "has no member `nested`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +reveal_type(mypackage.nested) # revealed: +reveal_type(mypackage.nested.X) # revealed: int +``` + +## Absolute `from` Import of Nested Submodule in `__init__` + +`from mypackage.submodule import nested` in an `__init__.pyi` does not re-export +`mypackage.submodule`, `mypackage.submodule.nested`, or `nested`. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from mypackage.submodule import nested +``` + +`mypackage/submodule/__init__.pyi`: + +```pyi +``` + +`mypackage/submodule/nested.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this could work and would be nice to have +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested.X) # revealed: Unknown +``` + +### In Non-Stub + +`from mypackage.submodule import nested` in an `__init__.py` creates both `submodule` and `nested`. + +`mypackage/__init__.py`: + +```py +from mypackage.submodule import nested +``` + +`mypackage/submodule/__init__.py`: + +```py +``` + +`mypackage/submodule/nested.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.submodule) # revealed: +# TODO: this would be nice to support +# error: "has no member `nested`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +reveal_type(mypackage.nested) # revealed: +reveal_type(mypackage.nested.X) # revealed: int +``` + +## Import of Nested Submodule in `__init__` + +`import mypackage.submodule.nested` in an `__init__.pyi` does not re-export `mypackage.submodule` or +`mypackage.submodule.nested`. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +import mypackage.submodule.nested +``` + +`mypackage/submodule/__init__.pyi`: + +```pyi +``` + +`mypackage/submodule/nested.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +### In Non-Stub + +`import mypackage.submodule.nested` in an `__init__.py` does not define `mypackage.submodule` or +`mypackage.submodule.nested` outside the package. + +`mypackage/__init__.py`: + +```py +import mypackage.submodule.nested +``` + +`mypackage/submodule/__init__.py`: + +```py +``` + +`mypackage/submodule/nested.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +## Relative `from` Import of Direct Submodule in `__init__`, Mismatched Alias + +Renaming the submodule to something else disables the `__init__.pyi` idiom. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from . import imported as imported_m +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +# error: "has no member `imported_m`" +reveal_type(mypackage.imported_m.X) # revealed: Unknown +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from . import imported as imported_m +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support, as it works at runtime +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +reveal_type(mypackage.imported_m.X) # revealed: int +``` + +## Relative `from` Import of Direct Submodule in `__init__`, Matched Alias + +The `__init__.pyi` idiom should definitely always work if the submodule is renamed to itself, as +this is the re-export idiom. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from . import imported as imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from . import imported as imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +``` + +## Star Import Unaffected + +Even if the `__init__` idiom is in effect, star imports do not pick it up. (This is an arbitrary +decision that mostly fell out of the implementation details and can be changed!) + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from . import imported +Z: int = 17 +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +from mypackage import * + +# TODO: this would be nice to support +# error: "`imported` used when not defined" +reveal_type(imported.X) # revealed: Unknown +reveal_type(Z) # revealed: int +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from . import imported + +Z: int = 17 +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +from mypackage import * + +reveal_type(imported.X) # revealed: int +reveal_type(Z) # revealed: int +``` + +## `from` Import of Non-Submodule + +A `from` import that imports a non-submodule isn't currently a special case here (various +proposed/tested approaches did treat this specially). + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .imported import X +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# error: "no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .imported import X +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +``` + +## `from` Import of Other Package's Submodule + +`from mypackage import submodule` from outside the package is not modeled as a side-effect on +`mypackage`, even in the importing file (this could be changed!). + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +# TODO: this would be nice to support, but it's dangerous with available_submodule_attributes +# for details, see: https://github.com/astral-sh/ty/issues/1488 +reveal_type(imported.X) # revealed: int +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +# TODO: this would be nice to support, as it works at runtime +reveal_type(imported.X) # revealed: int +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## `from` Import of Sibling Module + +`from . import submodule` from a sibling module is not modeled as a side-effect on `mypackage` or a +re-export from `submodule`. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +``` + +`mypackage/imported.pyi`: + +```pyi +from . import fails +X: int = 42 +``` + +`mypackage/fails.pyi`: + +```pyi +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +reveal_type(imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(imported.fails.Y) # revealed: Unknown +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +``` + +`mypackage/imported.py`: + +```py +from . import fails + +X: int = 42 +``` + +`mypackage/fails.py`: + +```py +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +reveal_type(imported.X) # revealed: int +reveal_type(imported.fails.Y) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Fractal Re-export Nameclash Problems + +This precise configuration of: + +- a subpackage that defines a submodule with its own name +- that in turn defines a function/class with its own name +- and re-exporting that name through every layer using `from` imports and `__all__` + +Can easily result in the typechecker getting "confused" and thinking imports of the name from the +top-level package are referring to the subpackage and not the function/class. This issue can be +found with the `lobpcg` function in `scipy.sparse.linalg`. + +We avoid this by ensuring that the imported name (the right-hand `funcmod` in +`from .funcmod import funcmod`) overwrites the submodule attribute (the left-hand `funcmod`), as it +does at runtime. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/funcmod.pyi`: + +```pyi +__all__ = ["funcmod"] + +def funcmod(x: int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/__init__.py`: + +```py +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/funcmod.py`: + +```py +__all__ = ["funcmod"] + +def funcmod(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +## Re-export Nameclash Problems In Functions + +`from` imports in an `__init__.py` at file scope should be visible to functions defined in the file: + +`mypackage/__init__.py`: + +```py +from .funcmod import funcmod + +funcmod(1) + +def run(): + funcmod(2) +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## Re-export Nameclash Problems In Try-Blocks + +`from` imports in an `__init__.py` at file scope in a `try` block should be visible to functions +defined in the `try` block (regression test for a bug): + +`mypackage/__init__.py`: + +```py +try: + from .funcmod import funcmod + + funcmod(1) + + def run(): + # TODO: this is a bug in how we analyze try-blocks + # error: [call-non-callable] + funcmod(2) + +finally: + x = 1 +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## RHS `from` Imports In Functions + +If a `from` import occurs in a function, the RHS symbols should only be visible in that function. + +`mypackage/__init__.py`: + +```py +def run1(): + from .funcmod import funcmod + + funcmod(1) + +def run2(): + from .funcmod import funcmod + + funcmod(2) + +def run3(): + # error: [unresolved-reference] + funcmod(3) + +# error: [unresolved-reference] +funcmod(4) +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## LHS `from` Imports In Functions + +If a `from` import occurs in a function, we simply ignore its LHS effects to avoid modeling +execution-order-specific behaviour (and to discourage people writing code that has it). + +`mypackage/__init__.py`: + +```py +def run1(): + from .funcmod import other + + # TODO: this would be nice to support + # error: [unresolved-reference] + funcmod.funcmod(1) + +def run2(): + from .funcmod import other + + # TODO: this would be nice to support + # error: [unresolved-reference] + funcmod.funcmod(2) + +def run3(): + # error: [unresolved-reference] + funcmod.funcmod(3) + +# error: [unresolved-reference] +funcmod.funcmod(4) +``` + +`mypackage/funcmod.py`: + +```py +other: int = 1 + +def funcmod(x: int) -> int: + return x +``` + +## LHS `from` Imports Overwrite Locals + +The LHS of a `from..import` introduces a local symbol that overwrites any local with the same name. +This reflects actual runtime behaviour, although we're kinda assuming it hasn't been imported +already. + +`mypackage/__init__.py`: + +```py +funcmod = 0 +from .funcmod import funcmod + +funcmod(1) +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## LHS `from` Imports Overwritten By Local Function + +The LHS of a `from..import` introduces a local symbol that can be overwritten by defining a function +(or class) with the same name. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import other + +def funcmod(x: int) -> int: ... +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +def other(int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .funcmod import other + +def funcmod(x: int) -> int: + return x +``` + +`mypackage/funcmod/__init__.py`: + +```py +def other(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +## LHS `from` Imports Overwritten By Local Assignment + +The LHS of a `from..import` introduces a local symbol that can be overwritten by assigning to it. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import other + +funcmod = other +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +def other(x: int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .funcmod import other + +funcmod = other +``` + +`mypackage/funcmod/__init__.py`: + +```py +def other(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +## LHS `from` Imports Only Apply The First Time + +The LHS of a `from..import` of a submodule introduces a local symbol only the first time it +introduces a direct submodule. The second time does nothing. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import funcmod as funcmod +from .funcmod import other +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +def other(x: int) -> int: ... +def funcmod(x: int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .funcmod import funcmod +from .funcmod import other +``` + +`mypackage/funcmod/__init__.py`: + +```py +def other(x: int) -> int: + return x + +def funcmod(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` diff --git a/crates/ty_python_semantic/resources/mdtest/intersection_types.md b/crates/ty_python_semantic/resources/mdtest/intersection_types.md index 0f5b37eb88..022e09c43b 100644 --- a/crates/ty_python_semantic/resources/mdtest/intersection_types.md +++ b/crates/ty_python_semantic/resources/mdtest/intersection_types.md @@ -444,7 +444,7 @@ def _( reveal_type(i07) # revealed: Never reveal_type(i08) # revealed: Never -# `bool` is final and can not be subclassed, so `type[bool]` is equivalent to `Literal[bool]`, which +# `bool` is final and cannot be subclassed, so `type[bool]` is equivalent to `Literal[bool]`, which # is disjoint from `type[str]`: def example_type_bool_type_str( i: Intersection[type[bool], type[str]], diff --git a/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md b/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md index cc90879401..9594492982 100644 --- a/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md +++ b/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md @@ -104,3 +104,27 @@ from typing import Callable def _(c: Callable[]): reveal_type(c) # revealed: (...) -> Unknown ``` + +### `typing.Tuple` + +```py +from typing import Tuple + +# error: [invalid-syntax] "Expected index or slice expression" +InvalidEmptyTuple = Tuple[] + +def _(t: InvalidEmptyTuple): + reveal_type(t) # revealed: tuple[Unknown] +``` + +### `typing.Union` + +```py +from typing import Union + +# error: [invalid-syntax] "Expected index or slice expression" +InvalidEmptyUnion = Union[] + +def _(u: InvalidEmptyUnion): + reveal_type(u) # revealed: Unknown +``` diff --git a/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md b/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md index 7e1acf4efb..ad5829da1f 100644 --- a/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md +++ b/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md @@ -51,6 +51,6 @@ reveal_type({"a": 1, "b": (1, 2), "c": (1, 2, 3)}) ## Dict comprehensions ```py -# revealed: dict[@Todo(dict comprehension key type), @Todo(dict comprehension value type)] +# revealed: dict[int | Unknown, int | Unknown] reveal_type({x: y for x, y in enumerate(range(42))}) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md b/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md index 15f385fa88..325caba10d 100644 --- a/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md +++ b/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md @@ -41,5 +41,5 @@ reveal_type([1, (1, 2), (1, 2, 3)]) ## List comprehensions ```py -reveal_type([x for x in range(42)]) # revealed: list[@Todo(list comprehension element type)] +reveal_type([x for x in range(42)]) # revealed: list[int | Unknown] ``` diff --git a/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md b/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md index 6c6855e40e..d80112ee84 100644 --- a/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md +++ b/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md @@ -35,5 +35,5 @@ reveal_type({1, (1, 2), (1, 2, 3)}) ## Set comprehensions ```py -reveal_type({x for x in range(42)}) # revealed: set[@Todo(set comprehension element type)] +reveal_type({x for x in range(42)}) # revealed: set[int | Unknown] ``` diff --git a/crates/ty_python_semantic/resources/mdtest/literal_promotion.md b/crates/ty_python_semantic/resources/mdtest/literal_promotion.md new file mode 100644 index 0000000000..f13d3229ee --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/literal_promotion.md @@ -0,0 +1,68 @@ +# Literal promotion + +```toml +[environment] +python-version = "3.12" +``` + +There are certain places where we promote literals to their common supertype: + +```py +reveal_type([1, 2, 3]) # revealed: list[Unknown | int] +reveal_type({"a", "b", "c"}) # revealed: set[Unknown | str] +``` + +This promotion should not take place if the literal type appears in contravariant position: + +```py +from typing import Callable, Literal + +def in_negated_position(non_zero_number: int): + if non_zero_number == 0: + raise ValueError() + + reveal_type(non_zero_number) # revealed: int & ~Literal[0] + + reveal_type([non_zero_number]) # revealed: list[Unknown | (int & ~Literal[0])] + +def in_parameter_position(callback: Callable[[Literal[1]], None]): + reveal_type(callback) # revealed: (Literal[1], /) -> None + + reveal_type([callback]) # revealed: list[Unknown | ((Literal[1], /) -> None)] + +def double_negation(callback: Callable[[Callable[[Literal[1]], None]], None]): + reveal_type(callback) # revealed: ((Literal[1], /) -> None, /) -> None + + reveal_type([callback]) # revealed: list[Unknown | (((int, /) -> None, /) -> None)] +``` + +Literal promotion should also not apply recursively to type arguments in contravariant/invariant +position: + +```py +class Bivariant[T]: + pass + +class Covariant[T]: + def pop(self) -> T: + raise NotImplementedError + +class Contravariant[T]: + def push(self, value: T) -> None: + pass + +class Invariant[T]: + x: T + +def _( + bivariant: Bivariant[Literal[1]], + covariant: Covariant[Literal[1]], + contravariant: Contravariant[Literal[1]], + invariant: Invariant[Literal[1]], +): + reveal_type([bivariant]) # revealed: list[Unknown | Bivariant[int]] + reveal_type([covariant]) # revealed: list[Unknown | Covariant[int]] + + reveal_type([contravariant]) # revealed: list[Unknown | Contravariant[Literal[1]]] + reveal_type([invariant]) # revealed: list[Unknown | Invariant[Literal[1]]] +``` diff --git a/crates/ty_python_semantic/resources/mdtest/mro.md b/crates/ty_python_semantic/resources/mdtest/mro.md index da9a40b4a7..761fb9892d 100644 --- a/crates/ty_python_semantic/resources/mdtest/mro.md +++ b/crates/ty_python_semantic/resources/mdtest/mro.md @@ -291,6 +291,20 @@ class Foo(x): ... reveal_mro(Foo) # revealed: (, Unknown, ) ``` +## `UnionType` instances are now allowed as a base + +This is not legal: + +```py +class A: ... +class B: ... + +EitherOr = A | B + +# error: [invalid-base] "Invalid class base with type `types.UnionType`" +class Foo(EitherOr): ... +``` + ## `__bases__` is a union of a dynamic type and valid bases If a dynamic type such as `Any` or `Unknown` is one of the elements in the union, and all other diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md b/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md index f692c59835..5786b465cd 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md @@ -206,7 +206,7 @@ dd: defaultdict[int, int] = defaultdict(int) dd[0] = 0 cm: ChainMap[int, int] = ChainMap({1: 1}, {0: 0}) cm[0] = 0 -reveal_type(cm) # revealed: ChainMap[Unknown | int, Unknown | int] +reveal_type(cm) # revealed: ChainMap[int | Unknown, int | Unknown] reveal_type(l[0]) # revealed: Literal[0] reveal_type(d[0]) # revealed: Literal[0] diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md b/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md index 479238d617..96b12d845d 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md @@ -58,6 +58,15 @@ d.x = 1 reveal_type(d.x) # revealed: Literal[1] d.x = unknown() reveal_type(d.x) # revealed: Unknown + +class E: + x: int | None = None + +e = E() + +if e.x is not None: + class _: + reveal_type(e.x) # revealed: int ``` Narrowing can be "reset" by assigning to the attribute: diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md index 60ec2fa844..b7e49971a0 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md @@ -70,6 +70,102 @@ def _(flag: bool): reveal_type(x) # revealed: Literal["a"] ``` +## `classinfo` is a PEP-604 union of types + +```toml +[environment] +python-version = "3.10" +``` + +```py +def _(x: int | str | bytes | memoryview | range): + if isinstance(x, int | str): + reveal_type(x) # revealed: int | str + elif isinstance(x, bytes | memoryview): + reveal_type(x) # revealed: bytes | memoryview[Unknown] + else: + reveal_type(x) # revealed: range +``` + +Although `isinstance()` usually only works if all elements in the `UnionType` are class objects, at +runtime a special exception is made for `None` so that `isinstance(x, int | None)` can work: + +```py +def _(x: int | str | bytes | range | None): + if isinstance(x, int | str | None): + reveal_type(x) # revealed: int | str | None + else: + reveal_type(x) # revealed: bytes | range +``` + +## `classinfo` is an invalid PEP-604 union of types + +Except for the `None` special case mentioned above, narrowing can only take place if all elements in +the PEP-604 union are class literals. If any elements are generic aliases or other types, the +`isinstance()` call may fail at runtime, so no narrowing can take place: + + + +```toml +[environment] +python-version = "3.10" +``` + +```py +from typing import Any, Literal, NamedTuple + +def _(x: int | list[int] | bytes): + # error: [invalid-argument-type] + if isinstance(x, list[int] | int): + reveal_type(x) # revealed: int | list[int] | bytes + # error: [invalid-argument-type] + elif isinstance(x, Literal[42] | list[int] | bytes): + reveal_type(x) # revealed: int | list[int] | bytes + # error: [invalid-argument-type] + elif isinstance(x, Any | NamedTuple | list[int]): + reveal_type(x) # revealed: int | list[int] | bytes + else: + reveal_type(x) # revealed: int | list[int] | bytes +``` + +## PEP-604 unions on Python \<3.10 + +PEP-604 unions were added in Python 3.10, so attempting to use them on Python 3.9 does not lead to +any type narrowing. + +```toml +[environment] +python-version = "3.9" +``` + +```py +def _(x: int | str | bytes): + # error: [unsupported-operator] + if isinstance(x, int | str): + reveal_type(x) # revealed: (int & Unknown) | (str & Unknown) | (bytes & Unknown) + else: + reveal_type(x) # revealed: (int & Unknown) | (str & Unknown) | (bytes & Unknown) +``` + +## `classinfo` is a `typing.py` special form + +Certain special forms in `typing.py` are aliases to classes elsewhere in the standard library; these +can be used in `isinstance()` and `issubclass()` checks. We support narrowing using them: + +```py +import typing as t + +def f(x: dict[str, int] | list[str], y: object): + if isinstance(x, t.Dict): + reveal_type(x) # revealed: dict[str, int] + else: + reveal_type(x) # revealed: list[str] + + if isinstance(y, t.Callable): + # TODO: a better top-materialization for `Callable`s (https://github.com/astral-sh/ty/issues/1426) + reveal_type(y) # revealed: () -> object +``` + ## Class types ```py @@ -146,13 +242,11 @@ def _(flag: bool): def _(flag: bool): x = 1 if flag else "a" - # TODO: this should cause us to emit a diagnostic during - # type checking + # error: [invalid-argument-type] "Argument to function `isinstance` is incorrect: Expected `type | UnionType | tuple[Unknown, ...]`, found `Literal["a"]" if isinstance(x, "a"): reveal_type(x) # revealed: Literal[1, "a"] - # TODO: this should cause us to emit a diagnostic during - # type checking + # error: [invalid-argument-type] "Argument to function `isinstance` is incorrect: Expected `type | UnionType | tuple[Unknown, ...]`, found `Literal["int"]" if isinstance(x, "int"): reveal_type(x) # revealed: Literal[1, "a"] ``` diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md index ce77126d32..139c479843 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md @@ -131,6 +131,75 @@ def _(flag1: bool, flag2: bool): reveal_type(t) # revealed: ``` +## `classinfo` is a PEP-604 union of types + +```toml +[environment] +python-version = "3.10" +``` + +```py +def f(x: type[int | str | bytes | range]): + if issubclass(x, int | str): + reveal_type(x) # revealed: type[int] | type[str] + elif issubclass(x, bytes | memoryview): + reveal_type(x) # revealed: type[bytes] + else: + reveal_type(x) # revealed: +``` + +Although `issubclass()` usually only works if all elements in the `UnionType` are class objects, at +runtime a special exception is made for `None` so that `issubclass(x, int | None)` can work: + +```py +def _(x: type): + if issubclass(x, int | str | None): + reveal_type(x) # revealed: type[int] | type[str] | + else: + reveal_type(x) # revealed: type & ~type[int] & ~type[str] & ~ +``` + +## `classinfo` is an invalid PEP-604 union of types + +Except for the `None` special case mentioned above, narrowing can only take place if all elements in +the PEP-604 union are class literals. If any elements are generic aliases or other types, the +`issubclass()` call may fail at runtime, so no narrowing can take place: + + + +```toml +[environment] +python-version = "3.10" +``` + +```py +def _(x: type[int | list | bytes]): + # error: [invalid-argument-type] + if issubclass(x, int | list[int]): + reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] + else: + reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +``` + +## PEP-604 unions on Python \<3.10 + +PEP-604 unions were added in Python 3.10, so attempting to use them on Python 3.9 does not lead to +any type narrowing. + +```toml +[environment] +python-version = "3.9" +``` + +```py +def _(x: type[int | str | bytes]): + # error: [unsupported-operator] + if issubclass(x, int | str): + reveal_type(x) # revealed: (type[int] & Unknown) | (type[str] & Unknown) | (type[bytes] & Unknown) + else: + reveal_type(x) # revealed: (type[int] & Unknown) | (type[str] & Unknown) | (type[bytes] & Unknown) +``` + ## Special cases ### Emit a diagnostic if the first argument is of wrong type @@ -214,8 +283,7 @@ def flag() -> bool: t = int if flag() else str -# TODO: this should cause us to emit a diagnostic during -# type checking +# error: [invalid-argument-type] "Argument to function `issubclass` is incorrect: Expected `type | UnionType | tuple[Unknown, ...]`, found `Literal["str"]" if issubclass(t, "str"): reveal_type(t) # revealed: | diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/match.md b/crates/ty_python_semantic/resources/mdtest/narrow/match.md index ee51d50af2..f0c107851b 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/match.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/match.md @@ -69,6 +69,81 @@ match x: reveal_type(x) # revealed: object ``` +## Class patterns with generic classes + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import assert_never + +class Covariant[T]: + def get(self) -> T: + raise NotImplementedError + +def f(x: Covariant[int]): + match x: + case Covariant(): + reveal_type(x) # revealed: Covariant[int] + case _: + reveal_type(x) # revealed: Never + assert_never(x) +``` + +## Class patterns with generic `@final` classes + +These work the same as non-`@final` classes. + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import assert_never, final + +@final +class Covariant[T]: + def get(self) -> T: + raise NotImplementedError + +def f(x: Covariant[int]): + match x: + case Covariant(): + reveal_type(x) # revealed: Covariant[int] + case _: + reveal_type(x) # revealed: Never + assert_never(x) +``` + +## Class patterns where the class pattern does not resolve to a class + +In general this does not allow for narrowing, but we make an exception for `Any`. This is to support +[real ecosystem code](https://github.com/jax-ml/jax/blob/d2ce04b6c3d03ae18b145965b8b8b92e09e8009c/jax/_src/pallas/mosaic_gpu/lowering.py#L3372-L3387) +found in `jax`. + +```py +from typing import Any + +X = Any + +def f(obj: object): + match obj: + case int(): + reveal_type(obj) # revealed: int + case X(): + reveal_type(obj) # revealed: Any & ~int + +def g(obj: object, Y: Any): + match obj: + case int(): + reveal_type(obj) # revealed: int + case Y(): + reveal_type(obj) # revealed: Any & ~int +``` + ## Value patterns Value patterns are evaluated by equality, which is overridable. Therefore successfully matching on diff --git a/crates/ty_python_semantic/resources/mdtest/paramspec.md b/crates/ty_python_semantic/resources/mdtest/paramspec.md new file mode 100644 index 0000000000..4ebc336d7f --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/paramspec.md @@ -0,0 +1,159 @@ +# `ParamSpec` + +## Definition + +### Valid + +```py +from typing import ParamSpec + +P = ParamSpec("P") +reveal_type(type(P)) # revealed: +reveal_type(P) # revealed: typing.ParamSpec +reveal_type(P.__name__) # revealed: Literal["P"] +``` + +The paramspec name can also be provided as a keyword argument: + +```py +from typing import ParamSpec + +P = ParamSpec(name="P") +reveal_type(P.__name__) # revealed: Literal["P"] +``` + +### Must be directly assigned to a variable + +```py +from typing import ParamSpec + +P = ParamSpec("P") +# error: [invalid-paramspec] +P1: ParamSpec = ParamSpec("P1") + +# error: [invalid-paramspec] +tuple_with_typevar = ("foo", ParamSpec("W")) +reveal_type(tuple_with_typevar[1]) # revealed: ParamSpec +``` + +```py +from typing_extensions import ParamSpec + +T = ParamSpec("T") +# error: [invalid-paramspec] +P1: ParamSpec = ParamSpec("P1") + +# error: [invalid-paramspec] +tuple_with_typevar = ("foo", ParamSpec("P2")) +reveal_type(tuple_with_typevar[1]) # revealed: ParamSpec +``` + +### `ParamSpec` parameter must match variable name + +```py +from typing import ParamSpec + +P1 = ParamSpec("P1") + +# error: [invalid-paramspec] +P2 = ParamSpec("P3") +``` + +### Accepts only a single `name` argument + +> The runtime should accept bounds and covariant and contravariant arguments in the declaration just +> as typing.TypeVar does, but for now we will defer the standardization of the semantics of those +> options to a later PEP. + +```py +from typing import ParamSpec + +# error: [invalid-paramspec] +P1 = ParamSpec("P1", bound=int) +# error: [invalid-paramspec] +P2 = ParamSpec("P2", int, str) +# error: [invalid-paramspec] +P3 = ParamSpec("P3", covariant=True) +# error: [invalid-paramspec] +P4 = ParamSpec("P4", contravariant=True) +``` + +### Defaults + +```toml +[environment] +python-version = "3.13" +``` + +The default value for a `ParamSpec` can be either a list of types, `...`, or another `ParamSpec`. + +```py +from typing import ParamSpec + +P1 = ParamSpec("P1", default=[int, str]) +P2 = ParamSpec("P2", default=...) +P3 = ParamSpec("P3", default=P2) +``` + +Other values are invalid. + +```py +# error: [invalid-paramspec] +P4 = ParamSpec("P4", default=int) +``` + +### PEP 695 + +```toml +[environment] +python-version = "3.12" +``` + +#### Valid + +```py +def foo1[**P]() -> None: + reveal_type(P) # revealed: typing.ParamSpec + +def foo2[**P = ...]() -> None: + reveal_type(P) # revealed: typing.ParamSpec + +def foo3[**P = [int, str]]() -> None: + reveal_type(P) # revealed: typing.ParamSpec + +def foo4[**P, **Q = P](): + reveal_type(P) # revealed: typing.ParamSpec + reveal_type(Q) # revealed: typing.ParamSpec +``` + +#### Invalid + +ParamSpec, when defined using the new syntax, does not allow defining bounds or constraints. + +This results in a lot of syntax errors mainly because the AST doesn't accept them in this position. +The parser could do a better job in recovering from these errors. + + + +```py +# error: [invalid-syntax] +# error: [invalid-syntax] +# error: [invalid-syntax] +# error: [invalid-syntax] +# error: [invalid-syntax] +# error: [invalid-syntax] +def foo[**P: int]() -> None: + # error: [invalid-syntax] + # error: [invalid-syntax] + pass +``` + + + +#### Invalid default + +```py +# error: [invalid-paramspec] +def foo[**P = int]() -> None: + pass +``` diff --git a/crates/ty_python_semantic/resources/mdtest/properties.md b/crates/ty_python_semantic/resources/mdtest/properties.md index 90634eba39..b4c2abae9f 100644 --- a/crates/ty_python_semantic/resources/mdtest/properties.md +++ b/crates/ty_python_semantic/resources/mdtest/properties.md @@ -49,6 +49,40 @@ c.my_property = 2 c.my_property = "a" ``` +## Properties returning `Self` + +A property that returns `Self` refers to an instance of the class: + +```py +from typing_extensions import Self + +class Path: + @property + def parent(self) -> Self: + raise NotImplementedError + +reveal_type(Path().parent) # revealed: Path +``` + +This also works when a setter is defined: + +```py +class Node: + @property + def parent(self) -> Self: + raise NotImplementedError + + @parent.setter + def parent(self, value: Self) -> None: + pass + +root = Node() +child = Node() +child.parent = root + +reveal_type(child.parent) # revealed: Node +``` + ## `property.getter` `property.getter` can be used to overwrite the getter method of a property. This does not overwrite diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_(d3d47de65fb3bad).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_(d3d47de65fb3bad).snap new file mode 100644 index 0000000000..6081e0f5d9 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_(d3d47de65fb3bad).snap @@ -0,0 +1,31 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid key type +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | config: dict[str, int] = {} +2 | config[0] = 3 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal[0]` and a value of type `Literal[3]` on object of type `dict[str, int]` + --> src/mdtest_snippet.py:2:1 + | +1 | config: dict[str, int] = {} +2 | config[0] = 3 # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_for…_(815dae276e2fd2b7).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_for…_(815dae276e2fd2b7).snap new file mode 100644 index 0000000000..2fbfb5323f --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_for…_(815dae276e2fd2b7).snap @@ -0,0 +1,36 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid key type for `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import TypedDict +2 | +3 | class Config(TypedDict): +4 | retries: int +5 | +6 | def _(config: Config) -> None: +7 | config[0] = 3 # error: [invalid-key] +``` + +# Diagnostics + +``` +error[invalid-key]: Cannot access `Config` with a key of type `Literal[0]`. Only string literals are allowed as keys on TypedDicts. + --> src/mdtest_snippet.py:7:12 + | +6 | def _(config: Config) -> None: +7 | config[0] = 3 # error: [invalid-key] + | ^ + | +info: rule `invalid-key` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_(f87bd015df018509).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_(f87bd015df018509).snap new file mode 100644 index 0000000000..125bcdfac1 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_(f87bd015df018509).snap @@ -0,0 +1,31 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid value type +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | config: dict[str, int] = {} +2 | config["retries"] = "three" # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal["three"]` on object of type `dict[str, int]` + --> src/mdtest_snippet.py:2:1 + | +1 | config: dict[str, int] = {} +2 | config["retries"] = "three" # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_f…_(155d53762388f9ad).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_f…_(155d53762388f9ad).snap new file mode 100644 index 0000000000..c2821569ce --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_f…_(155d53762388f9ad).snap @@ -0,0 +1,48 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid value type for `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import TypedDict +2 | +3 | class Config(TypedDict): +4 | retries: int +5 | +6 | def _(config: Config) -> None: +7 | config["retries"] = "three" # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Invalid assignment to key "retries" with declared type `int` on TypedDict `Config` + --> src/mdtest_snippet.py:7:5 + | +6 | def _(config: Config) -> None: +7 | config["retries"] = "three" # error: [invalid-assignment] + | ------ --------- ^^^^^^^ value of type `Literal["three"]` + | | | + | | key has declared type `int` + | TypedDict `Config` + | +info: Item declaration + --> src/mdtest_snippet.py:4:5 + | +3 | class Config(TypedDict): +4 | retries: int + | ------------ Item declared here +5 | +6 | def _(config: Config) -> None: + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap new file mode 100644 index 0000000000..e6036f32e0 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap @@ -0,0 +1,38 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Misspelled key for `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import TypedDict +2 | +3 | class Config(TypedDict): +4 | retries: int +5 | +6 | def _(config: Config) -> None: +7 | config["Retries"] = 30.0 # error: [invalid-key] +``` + +# Diagnostics + +``` +error[invalid-key]: Invalid key for TypedDict `Config` + --> src/mdtest_snippet.py:7:5 + | +6 | def _(config: Config) -> None: +7 | config["Retries"] = 30.0 # error: [invalid-key] + | ------ ^^^^^^^^^ Unknown key "Retries" - did you mean "retries"? + | | + | TypedDict `Config` + | +info: rule `invalid-key` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap new file mode 100644 index 0000000000..f9c43e5882 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap @@ -0,0 +1,35 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - No `__setitem__` method +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | class ReadOnlyDict: +2 | def __getitem__(self, key: str) -> int: +3 | return 42 +4 | +5 | config = ReadOnlyDict() +6 | config["retries"] = 3 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Cannot assign to a subscript on an object of type `ReadOnlyDict` with no `__setitem__` method + --> src/mdtest_snippet.py:6:1 + | +5 | config = ReadOnlyDict() +6 | config["retries"] = 3 # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap new file mode 100644 index 0000000000..a12bb7c666 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap @@ -0,0 +1,32 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Possibly missing `__setitem__` method +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(config: dict[str, int] | None) -> None: +2 | config["retries"] = 3 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Cannot assign to a subscript on an object of type `None` with no `__setitem__` method + --> src/mdtest_snippet.py:2:5 + | +1 | def _(config: dict[str, int] | None) -> None: +2 | config["retries"] = 3 # error: [invalid-assignment] + | ^^^^^^ + | +info: The full type of the subscripted object is `dict[str, int] | None` +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap new file mode 100644 index 0000000000..2e7bbcfe4d --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap @@ -0,0 +1,60 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Unknown key for all elemens of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import TypedDict + 2 | + 3 | class Person(TypedDict): + 4 | name: str + 5 | + 6 | class Animal(TypedDict): + 7 | name: str + 8 | legs: int + 9 | +10 | def _(being: Person | Animal) -> None: +11 | # error: [invalid-key] +12 | # error: [invalid-key] +13 | being["surname"] = "unknown" +``` + +# Diagnostics + +``` +error[invalid-key]: Invalid key for TypedDict `Person` + --> src/mdtest_snippet.py:13:5 + | +11 | # error: [invalid-key] +12 | # error: [invalid-key] +13 | being["surname"] = "unknown" + | ----- ^^^^^^^^^ Unknown key "surname" - did you mean "name"? + | | + | TypedDict `Person` in union type `Person | Animal` + | +info: rule `invalid-key` is enabled by default + +``` + +``` +error[invalid-key]: Invalid key for TypedDict `Animal` + --> src/mdtest_snippet.py:13:5 + | +11 | # error: [invalid-key] +12 | # error: [invalid-key] +13 | being["surname"] = "unknown" + | ----- ^^^^^^^^^ Unknown key "surname" - did you mean "name"? + | | + | TypedDict `Animal` in union type `Person | Animal` + | +info: rule `invalid-key` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap new file mode 100644 index 0000000000..6c919e6937 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap @@ -0,0 +1,42 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Unknown key for one element of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import TypedDict + 2 | + 3 | class Person(TypedDict): + 4 | name: str + 5 | + 6 | class Animal(TypedDict): + 7 | name: str + 8 | legs: int + 9 | +10 | def _(being: Person | Animal) -> None: +11 | being["legs"] = 4 # error: [invalid-key] +``` + +# Diagnostics + +``` +error[invalid-key]: Invalid key for TypedDict `Person` + --> src/mdtest_snippet.py:11:5 + | +10 | def _(being: Person | Animal) -> None: +11 | being["legs"] = 4 # error: [invalid-key] + | ----- ^^^^^^ Unknown key "legs" + | | + | TypedDict `Person` in union type `Person | Animal` + | +info: rule `invalid-key` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap new file mode 100644 index 0000000000..0f603931aa --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap @@ -0,0 +1,32 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Wrong value type for one element of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(config: dict[str, int] | dict[str, str]) -> None: +2 | config["retries"] = 3 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, str].__setitem__(key: str, value: str, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal[3]` on object of type `dict[str, str]` + --> src/mdtest_snippet.py:2:5 + | +1 | def _(config: dict[str, int] | dict[str, str]) -> None: +2 | config["retries"] = 3 # error: [invalid-assignment] + | ^^^^^^ + | +info: The full type of the subscripted object is `dict[str, int] | dict[str, str]` +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap new file mode 100644 index 0000000000..635a402c9b --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap @@ -0,0 +1,49 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Wrong value type for all elements of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(config: dict[str, int] | dict[str, str]) -> None: +2 | # error: [invalid-assignment] +3 | # error: [invalid-assignment] +4 | config["retries"] = 3.0 +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, int]` + --> src/mdtest_snippet.py:4:5 + | +2 | # error: [invalid-assignment] +3 | # error: [invalid-assignment] +4 | config["retries"] = 3.0 + | ^^^^^^ + | +info: The full type of the subscripted object is `dict[str, int] | dict[str, str]` +info: rule `invalid-assignment` is enabled by default + +``` + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, str].__setitem__(key: str, value: str, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, str]` + --> src/mdtest_snippet.py:4:5 + | +2 | # error: [invalid-assignment] +3 | # error: [invalid-assignment] +4 | config["retries"] = 3.0 + | ^^^^^^ + | +info: The full type of the subscripted object is `dict[str, int] | dict[str, str]` +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap new file mode 100644 index 0000000000..822f9319a1 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap @@ -0,0 +1,88 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: isinstance.md - Narrowing for `isinstance` checks - `classinfo` is an invalid PEP-604 union of types +mdtest path: crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import Any, Literal, NamedTuple + 2 | + 3 | def _(x: int | list[int] | bytes): + 4 | # error: [invalid-argument-type] + 5 | if isinstance(x, list[int] | int): + 6 | reveal_type(x) # revealed: int | list[int] | bytes + 7 | # error: [invalid-argument-type] + 8 | elif isinstance(x, Literal[42] | list[int] | bytes): + 9 | reveal_type(x) # revealed: int | list[int] | bytes +10 | # error: [invalid-argument-type] +11 | elif isinstance(x, Any | NamedTuple | list[int]): +12 | reveal_type(x) # revealed: int | list[int] | bytes +13 | else: +14 | reveal_type(x) # revealed: int | list[int] | bytes +``` + +# Diagnostics + +``` +error[invalid-argument-type]: Invalid second argument to `isinstance` + --> src/mdtest_snippet.py:5:8 + | +3 | def _(x: int | list[int] | bytes): +4 | # error: [invalid-argument-type] +5 | if isinstance(x, list[int] | int): + | ^^^^^^^^^^^^^^---------------^ + | | + | This `UnionType` instance contains non-class elements +6 | reveal_type(x) # revealed: int | list[int] | bytes +7 | # error: [invalid-argument-type] + | +info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects +info: Element `` in the union is not a class object +info: rule `invalid-argument-type` is enabled by default + +``` + +``` +error[invalid-argument-type]: Invalid second argument to `isinstance` + --> src/mdtest_snippet.py:8:10 + | + 6 | reveal_type(x) # revealed: int | list[int] | bytes + 7 | # error: [invalid-argument-type] + 8 | elif isinstance(x, Literal[42] | list[int] | bytes): + | ^^^^^^^^^^^^^^-------------------------------^ + | | + | This `UnionType` instance contains non-class elements + 9 | reveal_type(x) # revealed: int | list[int] | bytes +10 | # error: [invalid-argument-type] + | +info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects +info: Elements `` and `` in the union are not class objects +info: rule `invalid-argument-type` is enabled by default + +``` + +``` +error[invalid-argument-type]: Invalid second argument to `isinstance` + --> src/mdtest_snippet.py:11:10 + | + 9 | reveal_type(x) # revealed: int | list[int] | bytes +10 | # error: [invalid-argument-type] +11 | elif isinstance(x, Any | NamedTuple | list[int]): + | ^^^^^^^^^^^^^^----------------------------^ + | | + | This `UnionType` instance contains non-class elements +12 | reveal_type(x) # revealed: int | list[int] | bytes +13 | else: + | +info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects +info: Element `typing.Any` in the union, and 2 more elements, are not class objects +info: rule `invalid-argument-type` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/issubclass.md_-_Narrowing_for_`issub…_-_`classinfo`_is_an_in…_(7bb66a0f412caac1).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/issubclass.md_-_Narrowing_for_`issub…_-_`classinfo`_is_an_in…_(7bb66a0f412caac1).snap new file mode 100644 index 0000000000..27318dfe2b --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/issubclass.md_-_Narrowing_for_`issub…_-_`classinfo`_is_an_in…_(7bb66a0f412caac1).snap @@ -0,0 +1,42 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: issubclass.md - Narrowing for `issubclass` checks - `classinfo` is an invalid PEP-604 union of types +mdtest path: crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(x: type[int | list | bytes]): +2 | # error: [invalid-argument-type] +3 | if issubclass(x, int | list[int]): +4 | reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +5 | else: +6 | reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +``` + +# Diagnostics + +``` +error[invalid-argument-type]: Invalid second argument to `issubclass` + --> src/mdtest_snippet.py:3:8 + | +1 | def _(x: type[int | list | bytes]): +2 | # error: [invalid-argument-type] +3 | if issubclass(x, int | list[int]): + | ^^^^^^^^^^^^^^---------------^ + | | + | This `UnionType` instance contains non-class elements +4 | reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +5 | else: + | +info: A `UnionType` instance can only be used as the second argument to `issubclass` if all elements are class objects +info: Element `` in the union is not a class object +info: rule `invalid-argument-type` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_The_base_of_a_`NewTy…_(9847ea9eddc316b4).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_The_base_of_a_`NewTy…_(9847ea9eddc316b4).snap new file mode 100644 index 0000000000..ee47accc9d --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_The_base_of_a_`NewTy…_(9847ea9eddc316b4).snap @@ -0,0 +1,58 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: new_types.md - NewType - The base of a `NewType` can't be a protocol class or a `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/annotations/new_types.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import NewType, Protocol, TypedDict + 2 | + 3 | class Id(Protocol): + 4 | code: int + 5 | + 6 | UserId = NewType("UserId", Id) # error: [invalid-newtype] + 7 | + 8 | class Foo(TypedDict): + 9 | a: int +10 | +11 | Bar = NewType("Bar", Foo) # error: [invalid-newtype] +``` + +# Diagnostics + +``` +error[invalid-newtype]: invalid base for `typing.NewType` + --> src/mdtest_snippet.py:6:28 + | +4 | code: int +5 | +6 | UserId = NewType("UserId", Id) # error: [invalid-newtype] + | ^^ type `Id` +7 | +8 | class Foo(TypedDict): + | +info: The base of a `NewType` is not allowed to be a protocol class. +info: rule `invalid-newtype` is enabled by default + +``` + +``` +error[invalid-newtype]: invalid base for `typing.NewType` + --> src/mdtest_snippet.py:11:22 + | + 9 | a: int +10 | +11 | Bar = NewType("Bar", Foo) # error: [invalid-newtype] + | ^^^ type `Foo` + | +info: The base of a `NewType` is not allowed to be a `TypedDict`. +info: rule `invalid-newtype` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_Trying_to_subclass_a…_(fd3c73e2a9f04).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_Trying_to_subclass_a…_(fd3c73e2a9f04).snap new file mode 100644 index 0000000000..9e4eac091a --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_Trying_to_subclass_a…_(fd3c73e2a9f04).snap @@ -0,0 +1,37 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: new_types.md - NewType - Trying to subclass a `NewType` produces an error matching CPython +mdtest path: crates/ty_python_semantic/resources/mdtest/annotations/new_types.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import NewType +2 | +3 | X = NewType("X", int) +4 | +5 | class Foo(X): ... # error: [invalid-base] +``` + +# Diagnostics + +``` +error[invalid-base]: Cannot subclass an instance of NewType + --> src/mdtest_snippet.py:5:11 + | +3 | X = NewType("X", int) +4 | +5 | class Foo(X): ... # error: [invalid-base] + | ^ + | +info: Perhaps you were looking for: `Foo = NewType('Foo', X)` +info: Definition of class `Foo` will raise `TypeError` at runtime +info: rule `invalid-base` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap index 245c95d394..4a1c008f9f 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap @@ -46,7 +46,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/class/super.md 32 | reveal_type(super(C, C()).aa) # revealed: int 33 | reveal_type(super(C, C()).bb) # revealed: int 34 | import types - 35 | from typing_extensions import Callable, TypeIs, Literal, TypedDict + 35 | from typing_extensions import Callable, TypeIs, Literal, NewType, TypedDict 36 | 37 | def f(): ... 38 | @@ -61,59 +61,63 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/class/super.md 47 | x: int 48 | y: bytes 49 | - 50 | # revealed: , FunctionType> - 51 | reveal_type(super(object, f)) - 52 | # revealed: , WrapperDescriptorType> - 53 | reveal_type(super(object, types.FunctionType.__get__)) - 54 | # revealed: , GenericAlias> - 55 | reveal_type(super(object, Foo[int])) - 56 | # revealed: , _SpecialForm> - 57 | reveal_type(super(object, Literal)) - 58 | # revealed: , TypeAliasType> - 59 | reveal_type(super(object, Alias)) - 60 | # revealed: , MethodType> - 61 | reveal_type(super(object, Foo().method)) - 62 | # revealed: , property> - 63 | reveal_type(super(object, Foo.some_property)) - 64 | - 65 | def g(x: object) -> TypeIs[list[object]]: - 66 | return isinstance(x, list) - 67 | - 68 | def _(x: object, y: SomeTypedDict, z: Callable[[int, str], bool]): - 69 | if hasattr(x, "bar"): - 70 | # revealed: - 71 | reveal_type(x) - 72 | # error: [invalid-super-argument] - 73 | # revealed: Unknown - 74 | reveal_type(super(object, x)) - 75 | - 76 | # error: [invalid-super-argument] - 77 | # revealed: Unknown - 78 | reveal_type(super(object, z)) + 50 | N = NewType("N", int) + 51 | + 52 | # revealed: , FunctionType> + 53 | reveal_type(super(object, f)) + 54 | # revealed: , WrapperDescriptorType> + 55 | reveal_type(super(object, types.FunctionType.__get__)) + 56 | # revealed: , GenericAlias> + 57 | reveal_type(super(object, Foo[int])) + 58 | # revealed: , _SpecialForm> + 59 | reveal_type(super(object, Literal)) + 60 | # revealed: , TypeAliasType> + 61 | reveal_type(super(object, Alias)) + 62 | # revealed: , MethodType> + 63 | reveal_type(super(object, Foo().method)) + 64 | # revealed: , property> + 65 | reveal_type(super(object, Foo.some_property)) + 66 | # revealed: , int> + 67 | reveal_type(super(object, N(42))) + 68 | + 69 | def g(x: object) -> TypeIs[list[object]]: + 70 | return isinstance(x, list) + 71 | + 72 | def _(x: object, y: SomeTypedDict, z: Callable[[int, str], bool]): + 73 | if hasattr(x, "bar"): + 74 | # revealed: + 75 | reveal_type(x) + 76 | # error: [invalid-super-argument] + 77 | # revealed: Unknown + 78 | reveal_type(super(object, x)) 79 | - 80 | is_list = g(x) - 81 | # revealed: TypeIs[list[object] @ x] - 82 | reveal_type(is_list) - 83 | # revealed: , bool> - 84 | reveal_type(super(object, is_list)) - 85 | - 86 | # revealed: , dict[Literal["x", "y"], int | bytes]> - 87 | reveal_type(super(object, y)) - 88 | - 89 | # The first argument to `super()` must be an actual class object; - 90 | # instances of `GenericAlias` are not accepted at runtime: - 91 | # - 92 | # error: [invalid-super-argument] - 93 | # revealed: Unknown - 94 | reveal_type(super(list[int], [])) - 95 | class Super: - 96 | def method(self) -> int: - 97 | return 42 - 98 | - 99 | class Sub(Super): -100 | def method(self: Sub) -> int: -101 | # revealed: , Sub> -102 | return reveal_type(super(self.__class__, self)).method() + 80 | # error: [invalid-super-argument] + 81 | # revealed: Unknown + 82 | reveal_type(super(object, z)) + 83 | + 84 | is_list = g(x) + 85 | # revealed: TypeIs[list[object] @ x] + 86 | reveal_type(is_list) + 87 | # revealed: , bool> + 88 | reveal_type(super(object, is_list)) + 89 | + 90 | # revealed: , dict[Literal["x", "y"], int | bytes]> + 91 | reveal_type(super(object, y)) + 92 | + 93 | # The first argument to `super()` must be an actual class object; + 94 | # instances of `GenericAlias` are not accepted at runtime: + 95 | # + 96 | # error: [invalid-super-argument] + 97 | # revealed: Unknown + 98 | reveal_type(super(list[int], [])) + 99 | class Super: +100 | def method(self) -> int: +101 | return 42 +102 | +103 | class Sub(Super): +104 | def method(self: Sub) -> int: +105 | # revealed: , Sub> +106 | return reveal_type(super(self.__class__, self)).method() ``` # Diagnostics @@ -206,14 +210,14 @@ info: rule `unresolved-attribute` is enabled by default ``` error[invalid-super-argument]: `` is an abstract/structural type in `super(, )` call - --> src/mdtest_snippet.py:74:21 + --> src/mdtest_snippet.py:78:21 | -72 | # error: [invalid-super-argument] -73 | # revealed: Unknown -74 | reveal_type(super(object, x)) +76 | # error: [invalid-super-argument] +77 | # revealed: Unknown +78 | reveal_type(super(object, x)) | ^^^^^^^^^^^^^^^^ -75 | -76 | # error: [invalid-super-argument] +79 | +80 | # error: [invalid-super-argument] | info: rule `invalid-super-argument` is enabled by default @@ -221,14 +225,14 @@ info: rule `invalid-super-argument` is enabled by default ``` error[invalid-super-argument]: `(int, str, /) -> bool` is an abstract/structural type in `super(, (int, str, /) -> bool)` call - --> src/mdtest_snippet.py:78:17 + --> src/mdtest_snippet.py:82:17 | -76 | # error: [invalid-super-argument] -77 | # revealed: Unknown -78 | reveal_type(super(object, z)) +80 | # error: [invalid-super-argument] +81 | # revealed: Unknown +82 | reveal_type(super(object, z)) | ^^^^^^^^^^^^^^^^ -79 | -80 | is_list = g(x) +83 | +84 | is_list = g(x) | info: rule `invalid-super-argument` is enabled by default @@ -236,15 +240,15 @@ info: rule `invalid-super-argument` is enabled by default ``` error[invalid-super-argument]: `types.GenericAlias` instance `list[int]` is not a valid class - --> src/mdtest_snippet.py:94:13 - | -92 | # error: [invalid-super-argument] -93 | # revealed: Unknown -94 | reveal_type(super(list[int], [])) - | ^^^^^^^^^^^^^^^^^^^^ -95 | class Super: -96 | def method(self) -> int: - | + --> src/mdtest_snippet.py:98:13 + | + 96 | # error: [invalid-super-argument] + 97 | # revealed: Unknown + 98 | reveal_type(super(list[int], [])) + | ^^^^^^^^^^^^^^^^^^^^ + 99 | class Super: +100 | def method(self) -> int: + | info: rule `invalid-super-argument` is enabled by default ``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap index b80700fa08..51b0f0ce69 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap @@ -37,20 +37,27 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/typed_dict.md 23 | 24 | def write_to_non_literal_string_key(person: Person, str_key: str): 25 | person[str_key] = "Alice" # error: [invalid-key] -26 | from typing_extensions import ReadOnly -27 | -28 | class Employee(TypedDict): -29 | id: ReadOnly[int] -30 | name: str -31 | -32 | def write_to_readonly_key(employee: Employee): -33 | employee["id"] = 42 # error: [invalid-assignment] +26 | +27 | def create_with_invalid_string_key(): +28 | # error: [invalid-key] +29 | alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"} +30 | +31 | # error: [invalid-key] +32 | bob = Person(name="Bob", age=25, unknown="Bar") +33 | from typing_extensions import ReadOnly +34 | +35 | class Employee(TypedDict): +36 | id: ReadOnly[int] +37 | name: str +38 | +39 | def write_to_readonly_key(employee: Employee): +40 | employee["id"] = 42 # error: [invalid-assignment] ``` # Diagnostics ``` -error[invalid-key]: Invalid key access on TypedDict `Person` +error[invalid-key]: Invalid key for TypedDict `Person` --> src/mdtest_snippet.py:8:5 | 7 | def access_invalid_literal_string_key(person: Person): @@ -66,7 +73,7 @@ info: rule `invalid-key` is enabled by default ``` ``` -error[invalid-key]: Invalid key access on TypedDict `Person` +error[invalid-key]: Invalid key for TypedDict `Person` --> src/mdtest_snippet.py:13:5 | 12 | def access_invalid_key(person: Person): @@ -82,7 +89,7 @@ info: rule `invalid-key` is enabled by default ``` ``` -error[invalid-key]: TypedDict `Person` cannot be indexed with a key of type `str` +error[invalid-key]: Invalid key of type `str` for TypedDict `Person` --> src/mdtest_snippet.py:16:12 | 15 | def access_with_str_key(person: Person, str_key: str): @@ -123,7 +130,7 @@ info: rule `invalid-assignment` is enabled by default ``` ``` -error[invalid-key]: Invalid key access on TypedDict `Person` +error[invalid-key]: Invalid key for TypedDict `Person` --> src/mdtest_snippet.py:22:5 | 21 | def write_to_non_existing_key(person: Person): @@ -145,7 +152,39 @@ error[invalid-key]: Cannot access `Person` with a key of type `str`. Only string 24 | def write_to_non_literal_string_key(person: Person, str_key: str): 25 | person[str_key] = "Alice" # error: [invalid-key] | ^^^^^^^ -26 | from typing_extensions import ReadOnly +26 | +27 | def create_with_invalid_string_key(): + | +info: rule `invalid-key` is enabled by default + +``` + +``` +error[invalid-key]: Invalid key for TypedDict `Person` + --> src/mdtest_snippet.py:29:21 + | +27 | def create_with_invalid_string_key(): +28 | # error: [invalid-key] +29 | alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"} + | -----------------------------^^^^^^^^^-------- + | | | + | | Unknown key "unknown" + | TypedDict `Person` +30 | +31 | # error: [invalid-key] + | +info: rule `invalid-key` is enabled by default + +``` + +``` +error[invalid-key]: Invalid key for TypedDict `Person` + --> src/mdtest_snippet.py:32:11 + | +31 | # error: [invalid-key] +32 | bob = Person(name="Bob", age=25, unknown="Bar") + | ------ TypedDict `Person` ^^^^^^^^^^^^^ Unknown key "unknown" +33 | from typing_extensions import ReadOnly | info: rule `invalid-key` is enabled by default @@ -153,21 +192,21 @@ info: rule `invalid-key` is enabled by default ``` error[invalid-assignment]: Cannot assign to key "id" on TypedDict `Employee` - --> src/mdtest_snippet.py:33:5 + --> src/mdtest_snippet.py:40:5 | -32 | def write_to_readonly_key(employee: Employee): -33 | employee["id"] = 42 # error: [invalid-assignment] +39 | def write_to_readonly_key(employee: Employee): +40 | employee["id"] = 42 # error: [invalid-assignment] | -------- ^^^^ key is marked read-only | | | TypedDict `Employee` | info: Item declaration - --> src/mdtest_snippet.py:29:5 + --> src/mdtest_snippet.py:36:5 | -28 | class Employee(TypedDict): -29 | id: ReadOnly[int] +35 | class Employee(TypedDict): +36 | id: ReadOnly[int] | ----------------- Read-only item declared here -30 | name: str +37 | name: str | info: rule `invalid-assignment` is enabled by default diff --git a/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md b/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md new file mode 100644 index 0000000000..e4959e3627 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md @@ -0,0 +1,121 @@ +# Subscript assignment diagnostics + + + +## Invalid value type + +```py +config: dict[str, int] = {} +config["retries"] = "three" # error: [invalid-assignment] +``` + +## Invalid key type + +```py +config: dict[str, int] = {} +config[0] = 3 # error: [invalid-assignment] +``` + +## Invalid value type for `TypedDict` + +```py +from typing import TypedDict + +class Config(TypedDict): + retries: int + +def _(config: Config) -> None: + config["retries"] = "three" # error: [invalid-assignment] +``` + +## Invalid key type for `TypedDict` + +```py +from typing import TypedDict + +class Config(TypedDict): + retries: int + +def _(config: Config) -> None: + config[0] = 3 # error: [invalid-key] +``` + +## Misspelled key for `TypedDict` + +```py +from typing import TypedDict + +class Config(TypedDict): + retries: int + +def _(config: Config) -> None: + config["Retries"] = 30.0 # error: [invalid-key] +``` + +## No `__setitem__` method + +```py +class ReadOnlyDict: + def __getitem__(self, key: str) -> int: + return 42 + +config = ReadOnlyDict() +config["retries"] = 3 # error: [invalid-assignment] +``` + +## Possibly missing `__setitem__` method + +```py +def _(config: dict[str, int] | None) -> None: + config["retries"] = 3 # error: [invalid-assignment] +``` + +## Unknown key for one element of a union + +```py +from typing import TypedDict + +class Person(TypedDict): + name: str + +class Animal(TypedDict): + name: str + legs: int + +def _(being: Person | Animal) -> None: + being["legs"] = 4 # error: [invalid-key] +``` + +## Unknown key for all elemens of a union + +```py +from typing import TypedDict + +class Person(TypedDict): + name: str + +class Animal(TypedDict): + name: str + legs: int + +def _(being: Person | Animal) -> None: + # error: [invalid-key] + # error: [invalid-key] + being["surname"] = "unknown" +``` + +## Wrong value type for one element of a union + +```py +def _(config: dict[str, int] | dict[str, str]) -> None: + config["retries"] = 3 # error: [invalid-assignment] +``` + +## Wrong value type for all elements of a union + +```py +def _(config: dict[str, int] | dict[str, str]) -> None: + # error: [invalid-assignment] + # error: [invalid-assignment] + config["retries"] = 3.0 +``` diff --git a/crates/ty_python_semantic/resources/mdtest/subscript/instance.md b/crates/ty_python_semantic/resources/mdtest/subscript/instance.md index 7d1ad7f183..b15ec4abc1 100644 --- a/crates/ty_python_semantic/resources/mdtest/subscript/instance.md +++ b/crates/ty_python_semantic/resources/mdtest/subscript/instance.md @@ -76,7 +76,7 @@ a[0] = 0 class NoSetitem: ... a = NoSetitem() -a[0] = 0 # error: "Cannot assign to object of type `NoSetitem` with no `__setitem__` method" +a[0] = 0 # error: "Cannot assign to a subscript on an object of type `NoSetitem` with no `__setitem__` method" ``` ## `__setitem__` not callable diff --git a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md index ba88851015..4ff580954e 100644 --- a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md +++ b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md @@ -99,7 +99,7 @@ static_assert(is_assignable_to(int, Unknown)) def explicit_unknown(x: Unknown, y: tuple[str, Unknown], z: Unknown = 1) -> None: reveal_type(x) # revealed: Unknown reveal_type(y) # revealed: tuple[str, Unknown] - reveal_type(z) # revealed: Unknown | Literal[1] + reveal_type(z) # revealed: Unknown ``` `Unknown` can be subclassed, just like `Any`: @@ -390,7 +390,7 @@ static_assert(not is_single_valued(Literal["a"] | Literal["b"])) We use `TypeOf` to get the inferred type of an expression. This is useful when we want to refer to it in a type expression. For example, if we want to make sure that the class literal type `str` is a -subtype of `type[str]`, we can not use `is_subtype_of(str, type[str])`, as that would test if the +subtype of `type[str]`, we cannot use `is_subtype_of(str, type[str])`, as that would test if the type `str` itself is a subtype of `type[str]`. Instead, we can use `TypeOf[str]` to get the type of the expression `str`: diff --git a/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md b/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md index a0de2576b9..255e744af9 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md +++ b/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md @@ -54,7 +54,7 @@ class Small(Medium): ... static_assert(is_assignable_to(Any | Medium, Big)) static_assert(is_assignable_to(Any | Medium, Medium)) -# `Any | Medium` is at least as large as `Medium`, so we can not assign it to `Small`: +# `Any | Medium` is at least as large as `Medium`, so we cannot assign it to `Small`: static_assert(not is_assignable_to(Any | Medium, Small)) ``` @@ -84,7 +84,7 @@ static_assert(is_assignable_to(Small, Intersection[Any, Medium])) static_assert(is_assignable_to(Medium, Intersection[Any, Medium])) ``` -`Any & Medium` is no larger than `Medium`, so we can not assign `Big` to it. There is no possible +`Any & Medium` is no larger than `Medium`, so we cannot assign `Big` to it. There is no possible materialization of `Any & Medium` that would make it as big as `Big`: ```py diff --git a/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md b/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md index d8d42ae7ad..66b759b9ac 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md +++ b/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md @@ -32,8 +32,8 @@ static_assert(not is_singleton(Literal[1])) static_assert(not is_singleton(Literal[54165])) ``` -This has implications for type-narrowing. For example, you can not use the `is not` operator to -check whether a variable has a specific integer literal type, but this is not a recommended practice +This has implications for type-narrowing. For example, you cannot use the `is not` operator to check +whether a variable has a specific integer literal type, but this is not a recommended practice anyway. ```py @@ -44,7 +44,7 @@ def f(x: int): reveal_type(x) # revealed: Literal[54165] if x is not 54165: - # But here, we can not narrow the type (to `int & ~Literal[54165]`), because `x` might also + # But here, we cannot narrow the type (to `int & ~Literal[54165]`), because `x` might also # have the value `54165`, but a different object identity. reveal_type(x) # revealed: int ``` diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md b/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md index 00a3e2837f..f677298c51 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md @@ -258,6 +258,50 @@ def _[T]() -> None: reveal_type(ConstraintSet.range(SubSub, T, Sub) & ConstraintSet.range(Unrelated, T, object)) ``` +Expanding on this, when intersecting two upper bounds constraints (`(T ≤ Base) ∧ (T ≤ Other)`), we +intersect the upper bounds. Any type that satisfies both `T ≤ Base` and `T ≤ Other` must necessarily +satisfy their intersection `T ≤ Base & Other`, and vice versa. + +```py +from typing import Never +from ty_extensions import Intersection, static_assert + +# This is not final, so it's possible for a subclass to inherit from both Base and Other. +class Other: ... + +def upper_bounds[T](): + intersection_type = ConstraintSet.range(Never, T, Intersection[Base, Other]) + # revealed: ty_extensions.ConstraintSet[(T@upper_bounds ≤ Base & Other)] + reveal_type(intersection_type) + + intersection_constraint = ConstraintSet.range(Never, T, Base) & ConstraintSet.range(Never, T, Other) + # revealed: ty_extensions.ConstraintSet[(T@upper_bounds ≤ Base & Other)] + reveal_type(intersection_constraint) + + # The two constraint sets are equivalent; each satisfies the other. + static_assert(intersection_type.satisfies(intersection_constraint)) + static_assert(intersection_constraint.satisfies(intersection_type)) +``` + +For an intersection of two lower bounds constraints (`(Base ≤ T) ∧ (Other ≤ T)`), we union the lower +bounds. Any type that satisfies both `Base ≤ T` and `Other ≤ T` must necessarily satisfy their union +`Base | Other ≤ T`, and vice versa. + +```py +def lower_bounds[T](): + union_type = ConstraintSet.range(Base | Other, T, object) + # revealed: ty_extensions.ConstraintSet[(Base | Other ≤ T@lower_bounds)] + reveal_type(union_type) + + intersection_constraint = ConstraintSet.range(Base, T, object) & ConstraintSet.range(Other, T, object) + # revealed: ty_extensions.ConstraintSet[(Base | Other ≤ T@lower_bounds)] + reveal_type(intersection_constraint) + + # The two constraint sets are equivalent; each satisfies the other. + static_assert(union_type.satisfies(intersection_constraint)) + static_assert(intersection_constraint.satisfies(union_type)) +``` + ### Intersection of a range and a negated range The bounds of the range constraint provide a range of types that should be included; the bounds of @@ -335,7 +379,7 @@ def _[T]() -> None: reveal_type(~ConstraintSet.range(Sub, T, Super) & ~ConstraintSet.range(Sub, T, Super)) ``` -Otherwise, the union cannot be simplified. +Otherwise, the intersection cannot be simplified. ```py def _[T]() -> None: @@ -350,13 +394,14 @@ def _[T]() -> None: In particular, the following does not simplify, even though it seems like it could simplify to `¬(SubSub ≤ T@_ ≤ Super)`. The issue is that there are types that are within the bounds of `SubSub ≤ T@_ ≤ Super`, but which are not comparable to `Base` or `Sub`, and which therefore should -be included in the union. An example would be the type that contains all instances of `Super`, -`Base`, and `SubSub` (but _not_ including instances of `Sub`). (We don't have a way to spell that -type at the moment, but it is a valid type.) That type is not in `SubSub ≤ T ≤ Base`, since it -includes `Super`, which is outside the range. It's also not in `Sub ≤ T ≤ Super`, because it does -not include `Sub`. That means it should be in the union. (Remember that for negated range -constraints, the lower and upper bounds define the "hole" of types that are _not_ allowed.) Since -that type _is_ in `SubSub ≤ T ≤ Super`, it is not correct to simplify the union in this way. +be included in the intersection. An example would be the type that contains all instances of +`Super`, `Base`, and `SubSub` (but _not_ including instances of `Sub`). (We don't have a way to +spell that type at the moment, but it is a valid type.) That type is not in `SubSub ≤ T ≤ Base`, +since it includes `Super`, which is outside the range. It's also not in `Sub ≤ T ≤ Super`, because +it does not include `Sub`. That means it should be in the intersection. (Remember that for negated +range constraints, the lower and upper bounds define the "hole" of types that are _not_ allowed.) +Since that type _is_ in `SubSub ≤ T ≤ Super`, it is not correct to simplify the intersection in this +way. ```py def _[T]() -> None: @@ -441,6 +486,65 @@ def _[T]() -> None: reveal_type(ConstraintSet.range(SubSub, T, Base) | ConstraintSet.range(Sub, T, Super)) ``` +The union of two upper bound constraints (`(T ≤ Base) ∨ (T ≤ Other)`) is different than the single +range constraint involving the corresponding union type (`T ≤ Base | Other`). There are types (such +as `T = Base | Other`) that satisfy the union type, but not the union constraint. But every type +that satisfies the union constraint satisfies the union type. + +```py +from typing import Never +from ty_extensions import static_assert + +# This is not final, so it's possible for a subclass to inherit from both Base and Other. +class Other: ... + +def union[T](): + union_type = ConstraintSet.range(Never, T, Base | Other) + # revealed: ty_extensions.ConstraintSet[(T@union ≤ Base | Other)] + reveal_type(union_type) + + union_constraint = ConstraintSet.range(Never, T, Base) | ConstraintSet.range(Never, T, Other) + # revealed: ty_extensions.ConstraintSet[(T@union ≤ Base) ∨ (T@union ≤ Other)] + reveal_type(union_constraint) + + # (T = Base | Other) satisfies (T ≤ Base | Other) but not (T ≤ Base ∨ T ≤ Other) + specialization = ConstraintSet.range(Base | Other, T, Base | Other) + # revealed: ty_extensions.ConstraintSet[(T@union = Base | Other)] + reveal_type(specialization) + static_assert(specialization.satisfies(union_type)) + static_assert(not specialization.satisfies(union_constraint)) + + # Every specialization that satisfies (T ≤ Base ∨ T ≤ Other) also satisfies + # (T ≤ Base | Other) + static_assert(union_constraint.satisfies(union_type)) +``` + +These relationships are reversed for unions involving lower bounds. `T = Base` is an example that +satisfies the union constraint (`(Base ≤ T) ∨ (Other ≤ T)`) but not the union type +(`Base | Other ≤ T`). And every type that satisfies the union type satisfies the union constraint. + +```py +def union[T](): + union_type = ConstraintSet.range(Base | Other, T, object) + # revealed: ty_extensions.ConstraintSet[(Base | Other ≤ T@union)] + reveal_type(union_type) + + union_constraint = ConstraintSet.range(Base, T, object) | ConstraintSet.range(Other, T, object) + # revealed: ty_extensions.ConstraintSet[(Base ≤ T@union) ∨ (Other ≤ T@union)] + reveal_type(union_constraint) + + # (T = Base) satisfies (Base ≤ T ∨ Other ≤ T) but not (Base | Other ≤ T) + specialization = ConstraintSet.range(Base, T, Base) + # revealed: ty_extensions.ConstraintSet[(T@union = Base)] + reveal_type(specialization) + static_assert(not specialization.satisfies(union_type)) + static_assert(specialization.satisfies(union_constraint)) + + # Every specialization that satisfies (Base | Other ≤ T) also satisfies + # (Base ≤ T ∨ Other ≤ T) + static_assert(union_type.satisfies(union_constraint)) +``` + ### Union of a range and a negated range The bounds of the range constraint provide a range of types that should be included; the bounds of @@ -729,3 +833,52 @@ def f[T](): # revealed: ty_extensions.ConstraintSet[(T@f ≤ int | str)] reveal_type(ConstraintSet.range(Never, T, int | str)) ``` + +### Constraints on the same typevar + +Any particular specialization maps each typevar to one type. That means it's not useful to constrain +a typevar with itself as an upper or lower bound. No matter what type the typevar is specialized to, +that type is always a subtype of itself. (Remember that typevars are only specialized to fully +static types.) + +```py +from typing import Never +from ty_extensions import ConstraintSet + +def same_typevar[T](): + # revealed: ty_extensions.ConstraintSet[always] + reveal_type(ConstraintSet.range(Never, T, T)) + # revealed: ty_extensions.ConstraintSet[always] + reveal_type(ConstraintSet.range(T, T, object)) + # revealed: ty_extensions.ConstraintSet[always] + reveal_type(ConstraintSet.range(T, T, T)) +``` + +This is also true when the typevar appears in a union in the upper bound, or in an intersection in +the lower bound. (Note that this lines up with how we simplify the intersection of two constraints, +as shown above.) + +```py +from ty_extensions import Intersection + +def same_typevar[T](): + # revealed: ty_extensions.ConstraintSet[always] + reveal_type(ConstraintSet.range(Never, T, T | None)) + # revealed: ty_extensions.ConstraintSet[always] + reveal_type(ConstraintSet.range(Intersection[T, None], T, object)) + # revealed: ty_extensions.ConstraintSet[always] + reveal_type(ConstraintSet.range(Intersection[T, None], T, T | None)) +``` + +Similarly, if the lower bound is an intersection containing the _negation_ of the typevar, then the +constraint set can never be satisfied, since every type is disjoint with its negation. + +```py +from ty_extensions import Not + +def same_typevar[T](): + # revealed: ty_extensions.ConstraintSet[never] + reveal_type(ConstraintSet.range(Intersection[Not[T], None], T, object)) + # revealed: ty_extensions.ConstraintSet[never] + reveal_type(ConstraintSet.range(Not[T], T, object)) +``` diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md index a39b6a6f16..1386a9e158 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md @@ -1171,9 +1171,7 @@ class EggsLegacy(Generic[T, P]): ... static_assert(not is_assignable_to(Spam, Callable[..., Any])) static_assert(not is_assignable_to(SpamLegacy, Callable[..., Any])) static_assert(not is_assignable_to(Eggs, Callable[..., Any])) - -# TODO: should pass -static_assert(not is_assignable_to(EggsLegacy, Callable[..., Any])) # error: [static-assert-error] +static_assert(not is_assignable_to(EggsLegacy, Callable[..., Any])) ``` ### Classes with `__call__` as attribute diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md index d80a2b5b82..db4b0f5f98 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md @@ -45,7 +45,7 @@ class C(B1, B2): ... # ... which lies in their intersection: static_assert(is_subtype_of(C, Intersection[B1, B2])) -# However, if a class is marked final, it can not be subclassed ... +# However, if a class is marked final, it cannot be subclassed ... @final class FinalSubclass(A): ... @@ -87,6 +87,31 @@ static_assert(is_disjoint_from(memoryview, Foo)) static_assert(is_disjoint_from(type[memoryview], type[Foo])) ``` +## Specialized `@final` types + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import final +from ty_extensions import static_assert, is_disjoint_from + +@final +class Foo[T]: + def get(self) -> T: + raise NotImplementedError + +class A: ... +class B: ... + +static_assert(not is_disjoint_from(Foo[A], Foo[B])) + +# TODO: `int` and `str` are disjoint bases, so these should be disjoint. +static_assert(not is_disjoint_from(Foo[int], Foo[str])) +``` + ## "Disjoint base" builtin types Most other builtins can be subclassed and can even be used in multiple inheritance. However, builtin diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md index 41c7f562bb..fe846ee213 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md @@ -607,23 +607,33 @@ module: `module2.py`: ```py -import importlib -import importlib.abc +import imported +import imported.abc +``` + +`imported/__init__.pyi`: + +```pyi +``` + +`imported/abc.pyi`: + +```pyi ``` `main2.py`: ```py -import importlib -from module2 import importlib as other_importlib +import imported +from module2 import imported as other_imported from ty_extensions import TypeOf, static_assert, is_equivalent_to -# error: [unresolved-attribute] "Module `importlib` has no member `abc`" -reveal_type(importlib.abc) # revealed: Unknown +# error: [unresolved-attribute] "Module `imported` has no member `abc`" +reveal_type(imported.abc) # revealed: Unknown -reveal_type(other_importlib.abc) # revealed: +reveal_type(other_imported.abc) # revealed: -static_assert(not is_equivalent_to(TypeOf[importlib], TypeOf[other_importlib])) +static_assert(not is_equivalent_to(TypeOf[imported], TypeOf[other_imported])) ``` [materializations]: https://typing.python.org/en/latest/spec/glossary.html#term-materialize diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md b/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md new file mode 100644 index 0000000000..865cfa8395 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md @@ -0,0 +1,482 @@ +# Constraint set satisfaction + +```toml +[environment] +python-version = "3.12" +``` + +Constraint sets exist to help us check assignability and subtyping of types in the presence of +typevars. We construct a constraint set describing the conditions under which assignability holds +between the two types. Then we check whether that constraint set is satisfied for the valid +specializations of the relevant typevars. This file tests that final step. + +## Inferable vs non-inferable typevars + +Typevars can appear in _inferable_ or _non-inferable_ positions. + +When a typevar is in an inferable position, the constraint set only needs to be satisfied for _some_ +valid specialization. The most common inferable position occurs when invoking a generic function: +all of the function's typevars are inferable, because we want to use the argument types to infer +which specialization is being invoked. + +When a typevar is in a non-inferable position, the constraint set must be satisfied for _every_ +valid specialization. The most common non-inferable position occurs in the body of a generic +function or class: here we don't know in advance what type the typevar will be specialized to, and +so we have to ensure that the body is valid for all possible specializations. + +```py +def f[T](t: T) -> T: + # In the function body, T is non-inferable. All assignability checks involving T must be + # satisfied for _all_ valid specializations of T. + return t + +# When invoking the function, T is inferable — we attempt to infer a specialization that is valid +# for the particular arguments that are passed to the function. Assignability checks (in particular, +# that the argument type is assignable to the parameter type) only need to succeed for _at least +# one_ specialization. +f(1) +``` + +In all of the examples below, for ease of reproducibility, we explicitly list the typevars that are +inferable in each `satisfied_by_all_typevars` call; any typevar not listed is assumed to be +non-inferable. + +## Unbounded typevar + +If a typevar has no bound or constraints, then it can specialize to any type. In an inferable +position, that means we just need a single type (any type at all!) that satisfies the constraint +set. In a non-inferable position, that means the constraint set must be satisfied for every possible +type. + +```py +from typing import final, Never +from ty_extensions import ConstraintSet, static_assert + +class Super: ... +class Base(Super): ... +class Sub(Base): ... + +@final +class Unrelated: ... + +def unbounded[T](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # (T = Never) is a valid specialization, which satisfies (T ≤ Unrelated). + static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Unrelated). + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Super). + static_assert(not ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Base). + static_assert(not ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + + # (T = Sub) is a valid specialization, which satisfies (T ≤ Sub). + static_assert(ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Sub). + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars()) +``` + +## Typevar with an upper bound + +If a typevar has an upper bound, then it must specialize to a type that is a subtype of that bound. +For an inferable typevar, that means we need a single type that satisfies both the constraint set +and the upper bound. For a non-inferable typevar, that means the constraint set must be satisfied +for every type that satisfies the upper bound. + +```py +from typing import final, Never +from ty_extensions import ConstraintSet, static_assert + +class Super: ... +class Base(Super): ... +class Sub(Base): ... + +@final +class Unrelated: ... + +def bounded[T: Base](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T])) + # Every valid specialization satisfies (T ≤ Base). Since (Base ≤ Super), every valid + # specialization also satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # Every valid specialization satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + + # (T = Sub) is a valid specialization, which satisfies (T ≤ Sub). + static_assert(ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Sub). + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars()) + + # (T = Never) is a valid specialization, which satisfies (T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) + + # Never is the only type that satisfies both (T ≤ Base) and (T ≤ Unrelated). So there is no + # valid specialization that satisfies (T ≤ Unrelated ∧ T ≠ Never). + constraints = constraints & ~ConstraintSet.range(Never, T, Never) + static_assert(not constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not constraints.satisfied_by_all_typevars()) +``` + +If the upper bound is a gradual type, we are free to choose any materialization of the upper bound +that makes the test succeed. In non-inferable positions, it is most helpful to choose the bottom +materialization as the upper bound. That is the most restrictive possible choice, which minimizes +the number of valid specializations that must satisfy the constraint set. In inferable positions, +the opposite is true: it is most helpful to choose the top materialization. That is the most +permissive possible choice, which maximizes the number of valid specializations that might satisfy +the constraint set. + +```py +from typing import Any + +def bounded_by_gradual[T: Any](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # If we choose Base as the materialization for the upper bound, then (T = Base) is a valid + # specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # We are free to choose any materialization of the upper bound, and only have to show that the + # constraint set holds for that one materialization. Having chosen one materialization, we then + # have to show that the constraint set holds for all valid specializations of that + # materialization. If we choose Never as the materialization, then all valid specializations + # must satisfy (T ≤ Never). That means there is only one valid specialization, (T = Never), + # which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + + # If we choose Unrelated as the materialization, then (T = Unrelated) is a valid specialization, + # which satisfies (T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Never as the materialization, then (T = Never) is the only valid specialization, + # which satisfies (T ≤ Unrelated). + static_assert(constraints.satisfied_by_all_typevars()) + + # If we choose Unrelated as the materialization, then (T = Unrelated) is a valid specialization, + # which satisfies (T ≤ Unrelated ∧ T ≠ Never). + constraints = constraints & ~ConstraintSet.range(Never, T, Never) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # There is no upper bound that we can choose to satisfy this constraint set in non-inferable + # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy + # (T ≤ Unrelated ∧ T ≠ Never). + static_assert(not constraints.satisfied_by_all_typevars()) +``` + +When the upper bound is a more complex gradual type, we are still free to choose any materialization +that causes the check to succeed, and we will still choose the bottom materialization in +non-inferable position, and the top materialization in inferable position. The variance of the +typevar does not affect whether there is a materialization we can choose. Below, we test the most +restrictive variance (i.e., invariance), but we get the same results for other variances as well. + +```py +def bounded_by_gradual[T: list[Any]](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # If we choose list[Base] as the materialization of the upper bound, then (T = list[Base]) is a + # valid specialization, which satisfies (T ≤ list[Base]). + static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Base as the materialization, then all valid specializations must satisfy + # (T ≤ list[Base]). + # We are free to choose any materialization of the upper bound, and only have to show that the + # constraint set holds for that one materialization. Having chosen one materialization, we then + # have to show that the constraint set holds for all valid specializations of that + # materialization. If we choose list[Base] as the materialization, then all valid specializations + # must satisfy (T ≤ list[Base]), which is exactly the constraint set that we need to satisfy. + static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars()) + + # If we choose Unrelated as the materialization, then (T = list[Unrelated]) is a valid + # specialization, which satisfies (T ≤ list[Unrelated]). + constraints = ConstraintSet.range(Never, T, list[Unrelated]) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Unrelated as the materialization, then all valid specializations must satisfy + # (T ≤ list[Unrelated]). + static_assert(constraints.satisfied_by_all_typevars()) + + # If we choose Unrelated as the materialization, then (T = list[Unrelated]) is a valid + # specialization, which satisfies (T ≤ list[Unrelated] ∧ T ≠ Never). + constraints = constraints & ~ConstraintSet.range(Never, T, Never) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # There is no upper bound that we can choose to satisfy this constraint set in non-inferable + # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy + # (T ≤ list[Unrelated] ∧ T ≠ Never). + static_assert(not constraints.satisfied_by_all_typevars()) +``` + +## Constrained typevar + +If a typevar has constraints, then it must specialize to one of those specific types. (Not to a +subtype of one of those types!) For an inferable typevar, that means we need the constraint set to +be satisfied by any one of the constraints. For a non-inferable typevar, that means we need the +constraint set to be satisfied by all of those constraints. + +```py +from typing import final, Never +from ty_extensions import ConstraintSet, static_assert + +class Super: ... +class Base(Super): ... +class Sub(Base): ... + +@final +class Unrelated: ... + +def constrained[T: (Base, Unrelated)](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T ≤ Unrelated). + static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Unrelated). + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Super). + static_assert(not ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Base). + static_assert(not ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + + # Neither (T = Base) nor (T = Unrelated) satisfy (T ≤ Sub). + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars()) + + # (T = Base) and (T = Unrelated) both satisfy (T ≤ Super ∨ T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Super) | ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(constraints.satisfied_by_all_typevars()) + + # (T = Base) and (T = Unrelated) both satisfy (T ≤ Base ∨ T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Base) | ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(constraints.satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T ≤ Sub ∨ T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Sub) | ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Sub ∨ T ≤ Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T = Super ∨ T = Unrelated). + constraints = ConstraintSet.range(Super, T, Super) | ConstraintSet.range(Unrelated, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T = Super ∨ T = Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) + + # (T = Base) and (T = Unrelated) both satisfy (T = Base ∨ T = Unrelated). + constraints = ConstraintSet.range(Base, T, Base) | ConstraintSet.range(Unrelated, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(constraints.satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T = Sub ∨ T = Unrelated). + constraints = ConstraintSet.range(Sub, T, Sub) | ConstraintSet.range(Unrelated, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T = Sub ∨ T = Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) +``` + +If any of the constraints is a gradual type, we are free to choose any materialization of that +constraint that makes the test succeed. In non-inferable positions, it is most helpful to choose the +bottom materialization as the constraint. That is the most restrictive possible choice, which +minimizes the number of valid specializations that must satisfy the constraint set. In inferable +positions, the opposite is true: it is most helpful to choose the top materialization. That is the +most permissive possible choice, which maximizes the number of valid specializations that might +satisfy the constraint set. + +```py +from typing import Any + +def constrained_by_gradual[T: (Base, Any)](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # If we choose Unrelated as the materialization of the gradual constraint, then (T = Unrelated) + # is a valid specialization, which satisfies (T ≤ Unrelated). + static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + # No matter which materialization we choose, (T = Base) is a valid specialization, which does + # not satisfy (T ≤ Unrelated). + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # If we choose Super as the materialization, then (T = Super) is a valid specialization, which + # satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Never as the materialization, then (T = Base) and (T = Never) are the only valid + # specializations, both of which satisfy (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars()) + + # If we choose Base as the materialization, then (T = Base) is a valid specialization, which + # satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Never as the materialization, then (T = Base) and (T = Never) are the only valid + # specializations, both of which satisfy (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + +def constrained_by_two_gradual[T: (Any, Any)](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # If we choose Unrelated as the materialization of either constraint, then (T = Unrelated) is a + # valid specialization, which satisfies (T ≤ Unrelated). + static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Unrelated as the materialization of both constraints, then (T = Unrelated) is the + # only valid specialization, which satisfies (T ≤ Unrelated). + static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # If we choose Base as the materialization of either constraint, then (T = Base) is a valid + # specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Never as the materialization of both constraints, then (T = Never) is the only + # valid specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) +``` + +When a constraint is a more complex gradual type, we are still free to choose any materialization +that causes the check to succeed, and we will still choose the bottom materialization in +non-inferable position, and the top materialization in inferable position. The variance of the +typevar does not affect whether there is a materialization we can choose. Below, we test the most +restrictive variance (i.e., invariance), but we get the same results for other variances as well. + +```py +def constrained_by_gradual[T: (list[Base], list[Any])](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # No matter which materialization we choose, every valid specialization will be of the form + # (T = list[X]). Because Unrelated is final, it is disjoint from all lists. There is therefore + # no materialization or specialization that satisfies (T ≤ Unrelated). + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # If we choose list[Super] as the materialization, then (T = list[Super]) is a valid + # specialization, which satisfies (T ≤ list[Super]). + static_assert(ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars(inferable=tuple[T])) + # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which + # does not satisfy (T ≤ list[Super]). + static_assert(not ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars()) + + # If we choose list[Base] as the materialization, then (T = list[Base]) is a valid + # specialization, which satisfies (T ≤ list[Base]). + static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose list[Base] as the materialization, then all valid specializations must satisfy + # (T ≤ list[Base]). + static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars()) + + # If we choose list[Sub] as the materialization, then (T = list[Sub]) is a valid specialization, + # which # satisfies (T ≤ list[Sub]). + static_assert(ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars(inferable=tuple[T])) + # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which + # does not satisfy (T ≤ list[Sub]). + static_assert(not ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars()) + + # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid + # specialization, which satisfies (T ≤ list[Unrelated]). + constraints = ConstraintSet.range(Never, T, list[Unrelated]) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which + # does not satisfy (T ≤ list[Unrelated]). + static_assert(not constraints.satisfied_by_all_typevars()) + + # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid + # specialization, which satisfies (T ≤ list[Unrelated] ∧ T ≠ Never). + constraints = constraints & ~ConstraintSet.range(Never, T, Never) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # There is no materialization that we can choose to satisfy this constraint set in non-inferable + # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy + # (T ≤ list[Unrelated] ∧ T ≠ Never). + static_assert(not constraints.satisfied_by_all_typevars()) + +def constrained_by_two_gradual[T: (list[Any], list[Any])](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # No matter which materialization we choose, every valid specialization will be of the form + # (T = list[X]). Because Unrelated is final, it is disjoint from all lists. There is therefore + # no materialization or specialization that satisfies (T ≤ Unrelated). + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # If we choose list[Super] as the materialization, then (T = list[Super]) is a valid + # specialization, which satisfies (T ≤ list[Super]). + static_assert(ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars(inferable=tuple[T])) + # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which + # does not satisfy (T ≤ list[Super]). + static_assert(ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars()) + + # If we choose list[Base] as the materialization, then (T = list[Base]) is a valid + # specialization, which satisfies (T ≤ list[Base]). + static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars(inferable=tuple[T])) + # If we choose Base as the materialization, then all valid specializations must satisfy + # (T ≤ list[Base]). + static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars()) + + # If we choose list[Sub] as the materialization, then (T = list[Sub]) is a valid specialization, + # which satisfies (T ≤ list[Sub]). + static_assert(ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars(inferable=tuple[T])) + # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which + # does not satisfy (T ≤ list[Sub]). + static_assert(ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars()) + + # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid + # specialization, which satisfies (T ≤ list[Unrelated]). + constraints = ConstraintSet.range(Never, T, list[Unrelated]) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which + # does not satisfy (T ≤ list[Unrelated]). + static_assert(constraints.satisfied_by_all_typevars()) + + # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid + # specialization, which satisfies (T ≤ list[Unrelated] ∧ T ≠ Never). + constraints = constraints & ~ConstraintSet.range(Never, T, Never) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # There is no constraint that we can choose to satisfy this constraint set in non-inferable + # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy + # (T ≤ list[Unrelated] ∧ T ≠ Never). + static_assert(constraints.satisfied_by_all_typevars()) +``` diff --git a/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md b/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md index 29e3d72ec3..1c42ded723 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md +++ b/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md @@ -88,8 +88,6 @@ class C: self.FINAL_C: Final[int] = 1 self.FINAL_D: Final = 1 self.FINAL_E: Final - # TODO: Should not be an error - # error: [invalid-assignment] "Cannot assign to final attribute `FINAL_E` on type `Self@__init__`" self.FINAL_E = 1 reveal_type(C.FINAL_A) # revealed: int @@ -186,7 +184,6 @@ class C(metaclass=Meta): self.INSTANCE_FINAL_A: Final[int] = 1 self.INSTANCE_FINAL_B: Final = 1 self.INSTANCE_FINAL_C: Final[int] - # error: [invalid-assignment] "Cannot assign to final attribute `INSTANCE_FINAL_C` on type `Self@__init__`" self.INSTANCE_FINAL_C = 1 # error: [invalid-assignment] "Cannot assign to final attribute `META_FINAL_A` on type ``" @@ -282,8 +279,6 @@ class C: def __init__(self): self.LEGAL_H: Final[int] = 1 self.LEGAL_I: Final[int] - # TODO: Should not be an error - # error: [invalid-assignment] self.LEGAL_I = 1 # error: [invalid-type-form] "`Final` is not allowed in function parameter annotations" @@ -392,15 +387,142 @@ class C: # TODO: This should be an error NO_ASSIGNMENT_B: Final[int] - # This is okay. `DEFINED_IN_INIT` is defined in `__init__`. DEFINED_IN_INIT: Final[int] def __init__(self): - # TODO: should not be an error - # error: [invalid-assignment] self.DEFINED_IN_INIT = 1 ``` +## Final attributes with Self annotation in `__init__` + +Issue #1409: Final instance attributes should be assignable in `__init__` even when using `Self` +type annotation. + +```toml +[environment] +python-version = "3.11" +``` + +```py +from typing import Final, Self + +class ClassA: + ID4: Final[int] # OK because initialized in __init__ + + def __init__(self: Self): + self.ID4 = 1 # Should be OK + + def other_method(self: Self): + # error: [invalid-assignment] "Cannot assign to final attribute `ID4` on type `Self@other_method`" + self.ID4 = 2 # Should still error outside __init__ + +class ClassB: + ID5: Final[int] + + def __init__(self): # Without Self annotation + self.ID5 = 1 # Should also be OK + +reveal_type(ClassA().ID4) # revealed: int +reveal_type(ClassB().ID5) # revealed: int +``` + +## Reassignment to Final in `__init__` + +Per PEP 591 and the typing conformance suite, Final attributes can be assigned in `__init__`. +Multiple assignments within `__init__` are allowed (matching mypy and pyright behavior). However, +assignment in `__init__` is not allowed if the attribute already has a value at class level. + +```py +from typing import Final + +# Case 1: Declared in class, assigned once in __init__ - ALLOWED +class DeclaredAssignedInInit: + attr1: Final[int] + + def __init__(self): + self.attr1 = 1 # OK: First and only assignment + +# Case 2: Declared and assigned in class body - ALLOWED (no __init__ assignment) +class DeclaredAndAssignedInClass: + attr2: Final[int] = 10 + +# Case 3: Reassignment when already assigned in class body +class ReassignmentFromClass: + attr3: Final[int] = 10 + + def __init__(self): + # error: [invalid-assignment] + self.attr3 = 20 # Error: already assigned in class body + +# Case 4: Multiple assignments within __init__ itself +# Per conformance suite and PEP 591, all assignments in __init__ are allowed +class MultipleAssignmentsInInit: + attr4: Final[int] + + def __init__(self): + self.attr4 = 1 # OK: Assignment in __init__ + self.attr4 = 2 # OK: Multiple assignments in __init__ are allowed + +class ConditionalAssignment: + X: Final[int] + + def __init__(self, cond: bool): + if cond: + self.X = 42 # OK: Assignment in __init__ + else: + self.X = 56 # OK: Multiple assignments in __init__ are allowed + +# Case 5: Declaration and assignment in __init__ - ALLOWED +class DeclareAndAssignInInit: + def __init__(self): + self.attr5: Final[int] = 1 # OK: Declare and assign in __init__ + +# Case 6: Assignment outside __init__ should still fail +class AssignmentOutsideInit: + attr6: Final[int] + + def other_method(self): + # error: [invalid-assignment] "Cannot assign to final attribute `attr6`" + self.attr6 = 1 # Error: Not in __init__ +``` + +## Final assignment restrictions in `__init__` + +`__init__` can only assign Final attributes on the class it's defining, and only to the first +parameter (`self`). + +```py +from typing import Final + +class C: + x: Final[int] = 100 + +# Assignment from standalone function (even named __init__) +def _(c: C): + # error: [invalid-assignment] "Cannot assign to final attribute `x`" + c.x = 1 # Error: Not in C.__init__ + +def __init__(c: C): + # error: [invalid-assignment] "Cannot assign to final attribute `x`" + c.x = 1 # Error: Not a method + +# Assignment from another class's __init__ +class A: + def __init__(self, c: C): + # error: [invalid-assignment] "Cannot assign to final attribute `x`" + c.x = 1 # Error: Not C's __init__ + +# Assignment to non-self parameter in __init__ +class D: + y: Final[int] + + def __init__(self, other: "D"): + self.y = 1 # OK: Assigning to self + # TODO: Should error - assigning to non-self parameter + # Requires tracking which parameter the base expression refers to + other.y = 2 +``` + ## Full diagnostics diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md index d810a79efe..422711b4c1 100644 --- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md +++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md @@ -29,7 +29,7 @@ alice: Person = {"name": "Alice", "age": 30} reveal_type(alice["name"]) # revealed: str reveal_type(alice["age"]) # revealed: int | None -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "non_existing"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing"" reveal_type(alice["non_existing"]) # revealed: Unknown ``` @@ -41,7 +41,7 @@ bob = Person(name="Bob", age=25) reveal_type(bob["name"]) # revealed: str reveal_type(bob["age"]) # revealed: int | None -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "non_existing"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing"" reveal_type(bob["non_existing"]) # revealed: Unknown ``` @@ -69,7 +69,7 @@ def name_or_age() -> Literal["name", "age"]: carol: Person = {NAME: "Carol", AGE: 20} reveal_type(carol[NAME]) # revealed: str -# error: [invalid-key] "TypedDict `Person` cannot be indexed with a key of type `str`" +# error: [invalid-key] "Invalid key of type `str` for TypedDict `Person`" reveal_type(carol[non_literal()]) # revealed: Unknown reveal_type(carol[name_or_age()]) # revealed: str | int | None @@ -81,7 +81,7 @@ def _(): CAPITALIZED_NAME = "Name" -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "Name" - did you mean "name"?" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "Name" - did you mean "name"?" # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" dave: Person = {CAPITALIZED_NAME: "Dave", "age": 20} @@ -96,18 +96,30 @@ The construction of a `TypedDict` is checked for type correctness: ```py # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`" eve1a: Person = {"name": b"Eve", "age": None} + # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`" eve1b = Person(name=b"Eve", age=None) +reveal_type(eve1a) # revealed: Person +reveal_type(eve1b) # revealed: Person + # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" eve2a: Person = {"age": 22} + # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" eve2b = Person(age=22) -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +reveal_type(eve2a) # revealed: Person +reveal_type(eve2b) # revealed: Person + +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" eve3a: Person = {"name": "Eve", "age": 25, "extra": True} -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" + +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" eve3b = Person(name="Eve", age=25, extra=True) + +reveal_type(eve3a) # revealed: Person +reveal_type(eve3b) # revealed: Person ``` Also, the value types ​​declared in a `TypedDict` affect generic call inference: @@ -157,10 +169,10 @@ bob["name"] = None Assignments to non-existing keys are disallowed: ```py -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" alice["extra"] = True -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" bob["extra"] = True ``` @@ -185,10 +197,10 @@ alice: Person = {"inner": {"name": "Alice", "age": 30}} reveal_type(alice["inner"]["name"]) # revealed: str reveal_type(alice["inner"]["age"]) # revealed: int | None -# error: [invalid-key] "Invalid key access on TypedDict `Inner`: Unknown key "non_existing"" +# error: [invalid-key] "Invalid key for TypedDict `Inner`: Unknown key "non_existing"" reveal_type(alice["inner"]["non_existing"]) # revealed: Unknown -# error: [invalid-key] "Invalid key access on TypedDict `Inner`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Inner`: Unknown key "extra"" alice: Person = {"inner": {"name": "Alice", "age": 30, "extra": 1}} ``` @@ -226,15 +238,19 @@ All of these are missing the required `age` field: ```py # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor" alice2: Person = {"name": "Alice"} + # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor" Person(name="Alice") + # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor" Person({"name": "Alice"}) # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor" +# error: [invalid-argument-type] accepts_person({"name": "Alice"}) -# TODO: this should be an error, similar to the above +# TODO: this should be an invalid-key error, similar to the above +# error: [invalid-assignment] house.owner = {"name": "Alice"} a_person: Person @@ -247,19 +263,25 @@ All of these have an invalid type for the `name` field: ```py # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" alice3: Person = {"name": None, "age": 30} + # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" Person(name=None, age=30) + # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" Person({"name": None, "age": 30}) # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" +# error: [invalid-argument-type] accepts_person({"name": None, "age": 30}) -# TODO: this should be an error, similar to the above + +# TODO: this should be an invalid-key error +# error: [invalid-assignment] house.owner = {"name": None, "age": 30} a_person: Person # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" a_person = {"name": None, "age": 30} + # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" (a_person := {"name": None, "age": 30}) ``` @@ -267,22 +289,28 @@ a_person = {"name": None, "age": 30} All of these have an extra field that is not defined in the `TypedDict`: ```py -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" alice4: Person = {"name": "Alice", "age": 30, "extra": True} -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" + +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" Person(name="Alice", age=30, extra=True) -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" + +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" Person({"name": "Alice", "age": 30, "extra": True}) -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" +# error: [invalid-argument-type] accepts_person({"name": "Alice", "age": 30, "extra": True}) -# TODO: this should be an error + +# TODO: this should be an invalid-key error +# error: [invalid-assignment] house.owner = {"name": "Alice", "age": 30, "extra": True} a_person: Person -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" a_person = {"name": "Alice", "age": 30, "extra": True} -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" + +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" (a_person := {"name": "Alice", "age": 30, "extra": True}) ``` @@ -323,7 +351,7 @@ user2 = User({"name": "Bob"}) # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `User`: value of type `None`" user3 = User({"name": None, "age": 25}) -# error: [invalid-key] "Invalid key access on TypedDict `User`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `User`: Unknown key "extra"" user4 = User({"name": "Charlie", "age": 30, "extra": True}) ``` @@ -360,7 +388,7 @@ invalid = OptionalPerson(name=123) Extra fields are still not allowed, even with `total=False`: ```py -# error: [invalid-key] "Invalid key access on TypedDict `OptionalPerson`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `OptionalPerson`: Unknown key "extra"" invalid_extra = OptionalPerson(name="George", extra=True) ``` @@ -478,6 +506,15 @@ dangerous(alice) reveal_type(alice["name"]) # revealed: str ``` +Likewise, `dict`s are not assignable to typed dictionaries: + +```py +alice: dict[str, str] = {"name": "Alice"} + +# error: [invalid-assignment] "Object of type `dict[str, str]` is not assignable to `Person`" +alice: Person = alice +``` + ## Key-based access ### Reading @@ -489,10 +526,20 @@ class Person(TypedDict): name: str age: int | None +class Animal(TypedDict): + name: str + NAME_FINAL: Final = "name" AGE_FINAL: Final[Literal["age"]] = "age" -def _(person: Person, literal_key: Literal["age"], union_of_keys: Literal["age", "name"], str_key: str, unknown_key: Any) -> None: +def _( + person: Person, + being: Person | Animal, + literal_key: Literal["age"], + union_of_keys: Literal["age", "name"], + str_key: str, + unknown_key: Any, +) -> None: reveal_type(person["name"]) # revealed: str reveal_type(person["age"]) # revealed: int | None @@ -503,26 +550,38 @@ def _(person: Person, literal_key: Literal["age"], union_of_keys: Literal["age", reveal_type(person[union_of_keys]) # revealed: int | None | str - # error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "non_existing"" + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing"" reveal_type(person["non_existing"]) # revealed: Unknown - # error: [invalid-key] "TypedDict `Person` cannot be indexed with a key of type `str`" + # error: [invalid-key] "Invalid key of type `str` for TypedDict `Person`" reveal_type(person[str_key]) # revealed: Unknown # No error here: reveal_type(person[unknown_key]) # revealed: Unknown + + reveal_type(being["name"]) # revealed: str + + # TODO: A type of `int | None | Unknown` might be better here. The `str` is mixed in + # because `Animal.__getitem__` can only return `str`. + # error: [invalid-key] "Invalid key for TypedDict `Animal`" + reveal_type(being["age"]) # revealed: int | None | str ``` ### Writing ```py from typing_extensions import TypedDict, Final, Literal, LiteralString, Any +from ty_extensions import Intersection class Person(TypedDict): name: str surname: str age: int | None +class Animal(TypedDict): + name: str + legs: int + NAME_FINAL: Final = "name" AGE_FINAL: Final[Literal["age"]] = "age" @@ -530,7 +589,7 @@ def _(person: Person): person["name"] = "Alice" person["age"] = 30 - # error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "naem" - did you mean "name"?" + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "naem" - did you mean "name"?" person["naem"] = "Alice" def _(person: Person): @@ -543,13 +602,32 @@ def _(person: Person, literal_key: Literal["age"]): def _(person: Person, union_of_keys: Literal["name", "surname"]): person[union_of_keys] = "unknown" - # error: [invalid-assignment] "Cannot assign value of type `Literal[1]` to key of type `Literal["name", "surname"]` on TypedDict `Person`" + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`" + # error: [invalid-assignment] "Invalid assignment to key "surname" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`" person[union_of_keys] = 1 +def _(being: Person | Animal): + being["name"] = "Being" + + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`" + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Animal`: value of type `Literal[1]`" + being["name"] = 1 + + # error: [invalid-key] "Invalid key for TypedDict `Animal`: Unknown key "surname" - did you mean "name"?" + being["surname"] = "unknown" + +def _(centaur: Intersection[Person, Animal]): + centaur["name"] = "Chiron" + centaur["age"] = 100 + centaur["legs"] = 4 + + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "unknown"" + centaur["unknown"] = "value" + def _(person: Person, union_of_keys: Literal["name", "age"], unknown_value: Any): person[union_of_keys] = unknown_value - # error: [invalid-assignment] "Cannot assign value of type `None` to key of type `Literal["name", "age"]` on TypedDict `Person`" + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" person[union_of_keys] = None def _(person: Person, str_key: str, literalstr_key: LiteralString): @@ -646,7 +724,7 @@ def _(p: Person) -> None: reveal_type(p.setdefault("name", "Alice")) # revealed: str reveal_type(p.setdefault("extra", "default")) # revealed: str - # error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extraz" - did you mean "extra"?" + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extraz" - did you mean "extra"?" reveal_type(p.setdefault("extraz", "value")) # revealed: Unknown ``` @@ -668,7 +746,7 @@ def _(p: Person) -> None: reveal_type(p.__class__) # revealed: ``` -Also, the "attributes" on the class definition can not be accessed. Neither on the class itself, nor +Also, the "attributes" on the class definition cannot be accessed. Neither on the class itself, nor on inhabitants of the type defined by the class: ```py @@ -702,7 +780,7 @@ reveal_type(Person.__required_keys__) # revealed: frozenset[str] reveal_type(Person.__optional_keys__) # revealed: frozenset[str] ``` -These attributes can not be accessed on inhabitants: +These attributes cannot be accessed on inhabitants: ```py def _(person: Person) -> None: @@ -711,7 +789,7 @@ def _(person: Person) -> None: person.__optional_keys__ # error: [unresolved-attribute] ``` -Also, they can not be accessed on `type(person)`, as that would be `dict` at runtime: +Also, they cannot be accessed on `type(person)`, as that would be `dict` at runtime: ```py def _(person: Person) -> None: @@ -965,7 +1043,7 @@ class Person(TypedDict): name: str age: int | None -# TODO: this should be an error +# error: [invalid-assignment] "Object of type `MyDict` is not assignable to `Person`" x: Person = MyDict({"name": "Alice", "age": 30}) ``` @@ -1015,6 +1093,13 @@ def write_to_non_existing_key(person: Person): def write_to_non_literal_string_key(person: Person, str_key: str): person[str_key] = "Alice" # error: [invalid-key] + +def create_with_invalid_string_key(): + # error: [invalid-key] + alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"} + + # error: [invalid-key] + bob = Person(name="Bob", age=25, unknown="Bar") ``` Assignment to `ReadOnly` keys: diff --git a/crates/ty_python_semantic/resources/mdtest/unreachable.md b/crates/ty_python_semantic/resources/mdtest/unreachable.md index 7321ed9b01..73e174f6a1 100644 --- a/crates/ty_python_semantic/resources/mdtest/unreachable.md +++ b/crates/ty_python_semantic/resources/mdtest/unreachable.md @@ -187,8 +187,8 @@ python-platform = "all" If `python-platform` is set to `all`, we treat the platform as unspecified. This means that we do not infer a literal type like `Literal["win32"]` for `sys.platform`, but instead fall back to -`LiteralString` (the `typeshed` annotation for `sys.platform`). This means that we can not -statically determine the truthiness of a branch like `sys.platform == "win32"`. +`LiteralString` (the `typeshed` annotation for `sys.platform`). This means that we cannot statically +determine the truthiness of a branch like `sys.platform == "win32"`. See for a plan on how this could be improved. diff --git a/crates/ty_python_semantic/src/ast_node_ref.rs b/crates/ty_python_semantic/src/ast_node_ref.rs index ed28bc396b..14916ec807 100644 --- a/crates/ty_python_semantic/src/ast_node_ref.rs +++ b/crates/ty_python_semantic/src/ast_node_ref.rs @@ -85,6 +85,7 @@ where /// /// This method may panic or produce unspecified results if the provided module is from a /// different file or Salsa revision than the module to which the node belongs. + #[track_caller] pub fn node<'ast>(&self, module_ref: &'ast ParsedModuleRef) -> &'ast T { #[cfg(debug_assertions)] assert_eq!(module_ref.module().addr(), self.module_addr); diff --git a/crates/ty_python_semantic/src/diagnostic.rs b/crates/ty_python_semantic/src/diagnostic.rs index 5936d0874d..f58c90191c 100644 --- a/crates/ty_python_semantic/src/diagnostic.rs +++ b/crates/ty_python_semantic/src/diagnostic.rs @@ -88,10 +88,10 @@ pub fn add_inferred_python_version_hint_to_diagnostic( or in a configuration file", ); } - crate::PythonVersionSource::PythonVSCodeExtension => { + crate::PythonVersionSource::Editor => { diagnostic.info(format_args!( "Python {version} was assumed when {action} \ - because it's the version of the selected Python interpreter in the VS Code Python extension", + because it's the version of the selected Python interpreter in your editor", )); } crate::PythonVersionSource::InstallationDirectoryLayout { diff --git a/crates/ty_python_semantic/src/module_name.rs b/crates/ty_python_semantic/src/module_name.rs index e1aa4509bd..b257d0a6df 100644 --- a/crates/ty_python_semantic/src/module_name.rs +++ b/crates/ty_python_semantic/src/module_name.rs @@ -295,6 +295,7 @@ impl ModuleName { Self::from_identifier_parts(db, importing_file, module.as_deref(), *level) } + /// Computes the absolute module name from the LHS components of `from LHS import RHS` pub(crate) fn from_identifier_parts( db: &dyn Db, importing_file: File, @@ -309,6 +310,16 @@ impl ModuleName { .ok_or(ModuleNameResolutionError::InvalidSyntax) } } + + /// Computes the absolute module name for the package this file belongs to. + /// + /// i.e. this resolves `.` + pub(crate) fn package_for_file( + db: &dyn Db, + importing_file: File, + ) -> Result { + Self::from_identifier_parts(db, importing_file, None, 1) + } } impl Deref for ModuleName { diff --git a/crates/ty_python_semantic/src/module_resolver/resolver.rs b/crates/ty_python_semantic/src/module_resolver/resolver.rs index 0787859049..349d685862 100644 --- a/crates/ty_python_semantic/src/module_resolver/resolver.rs +++ b/crates/ty_python_semantic/src/module_resolver/resolver.rs @@ -452,15 +452,12 @@ pub(crate) fn dynamic_resolution_paths<'db>( let site_packages_dir = site_packages_search_path .as_system_path() .expect("Expected site package path to be a system path"); - let site_packages_dir = system - .canonicalize_path(site_packages_dir) - .unwrap_or_else(|_| site_packages_dir.to_path_buf()); - if !existing_paths.insert(Cow::Owned(site_packages_dir.clone())) { + if !existing_paths.insert(Cow::Borrowed(site_packages_dir)) { continue; } - let site_packages_root = files.expect_root(db, &site_packages_dir); + let site_packages_root = files.expect_root(db, site_packages_dir); // This query needs to be re-executed each time a `.pth` file // is added, modified or removed from the `site-packages` directory. @@ -477,7 +474,7 @@ pub(crate) fn dynamic_resolution_paths<'db>( // containing a (relative or absolute) path. // Each of these paths may point to an editable install of a package, // so should be considered an additional search path. - let pth_file_iterator = match PthFileIterator::new(db, &site_packages_dir) { + let pth_file_iterator = match PthFileIterator::new(db, site_packages_dir) { Ok(iterator) => iterator, Err(error) => { tracing::warn!( diff --git a/crates/ty_python_semantic/src/place.rs b/crates/ty_python_semantic/src/place.rs index 3989942b04..c0b3428345 100644 --- a/crates/ty_python_semantic/src/place.rs +++ b/crates/ty_python_semantic/src/place.rs @@ -733,7 +733,7 @@ pub(crate) fn place_by_id<'db>( }; // If a symbol is undeclared, but qualified with `typing.Final`, we use the right-hand side - // inferred type, without unioning with `Unknown`, because it can not be modified. + // inferred type, without unioning with `Unknown`, because it cannot be modified. if let Some(qualifiers) = declared.is_bare_final() { let bindings = all_considered_bindings(); return place_from_bindings_impl(db, bindings, requires_explicit_reexport) diff --git a/crates/ty_python_semantic/src/program.rs b/crates/ty_python_semantic/src/program.rs index 8f06527951..1a977de985 100644 --- a/crates/ty_python_semantic/src/program.rs +++ b/crates/ty_python_semantic/src/program.rs @@ -113,8 +113,11 @@ pub enum PythonVersionSource { /// long argument (`--extra-paths`) or `--config key=value`. Cli, - /// The value comes from the Python VS Code extension (the selected interpreter). - PythonVSCodeExtension, + /// The value comes from the user's editor, + /// while it's left open if specified as a setting + /// or if the value was auto-discovered by the editor + /// (e.g., the Python environment) + Editor, /// We fell back to a default value because the value was not specified via the CLI or a config file. #[default] diff --git a/crates/ty_python_semantic/src/python_platform.rs b/crates/ty_python_semantic/src/python_platform.rs index b21424ee33..04f7fa3598 100644 --- a/crates/ty_python_semantic/src/python_platform.rs +++ b/crates/ty_python_semantic/src/python_platform.rs @@ -24,7 +24,7 @@ impl From for PythonPlatform { fn from(platform: String) -> Self { match platform.as_str() { "all" => PythonPlatform::All, - _ => PythonPlatform::Identifier(platform.to_string()), + _ => PythonPlatform::Identifier(platform.clone()), } } } diff --git a/crates/ty_python_semantic/src/semantic_index.rs b/crates/ty_python_semantic/src/semantic_index.rs index 558243f59c..f4ab765f08 100644 --- a/crates/ty_python_semantic/src/semantic_index.rs +++ b/crates/ty_python_semantic/src/semantic_index.rs @@ -1,4 +1,4 @@ -use std::iter::FusedIterator; +use std::iter::{FusedIterator, once}; use std::sync::Arc; use ruff_db::files::File; @@ -75,15 +75,9 @@ pub(crate) fn place_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc(db: &'db dyn Db, file: File) -> Arc> { semantic_index(db, file).imported_modules.clone() @@ -154,29 +148,56 @@ pub(crate) fn attribute_declarations<'db, 's>( /// /// Only call this when doing type inference on the same file as `class_body_scope`, otherwise it /// introduces a direct dependency on that file's AST. -pub(crate) fn attribute_scopes<'db, 's>( +pub(crate) fn attribute_scopes<'db>( db: &'db dyn Db, class_body_scope: ScopeId<'db>, -) -> impl Iterator + use<'s, 'db> { +) -> impl Iterator + 'db { let file = class_body_scope.file(db); let index = semantic_index(db, file); let class_scope_id = class_body_scope.file_scope_id(db); + ChildrenIter::new(&index.scopes, class_scope_id) + .filter_map(move |(child_scope_id, scope)| { + let (function_scope_id, function_scope) = + if scope.node().scope_kind() == ScopeKind::TypeParams { + // This could be a generic method with a type-params scope. + // Go one level deeper to find the function scope. The first + // descendant is the (potential) function scope. + let function_scope_id = scope.descendants().start; + (function_scope_id, index.scope(function_scope_id)) + } else { + (child_scope_id, scope) + }; + function_scope.node().as_function()?; + Some(function_scope_id) + }) + .flat_map(move |func_id| { + // Add any descendent scope that is eager and have eager scopes between the scope + // and the method scope. Since attributes can be defined in this scope. + let nested = index.descendent_scopes(func_id).filter_map(move |(id, s)| { + let is_eager = s.kind().is_eager(); + let parents_are_eager = { + let mut all_parents_eager = true; + let mut current = Some(id); - ChildrenIter::new(&index.scopes, class_scope_id).filter_map(move |(child_scope_id, scope)| { - let (function_scope_id, function_scope) = - if scope.node().scope_kind() == ScopeKind::TypeParams { - // This could be a generic method with a type-params scope. - // Go one level deeper to find the function scope. The first - // descendant is the (potential) function scope. - let function_scope_id = scope.descendants().start; - (function_scope_id, index.scope(function_scope_id)) - } else { - (child_scope_id, scope) - }; + while let Some(scope_id) = current { + if scope_id == func_id { + break; + } + let scope = index.scope(scope_id); + if !scope.is_eager() { + all_parents_eager = false; + break; + } + current = scope.parent(); + } - function_scope.node().as_function()?; - Some(function_scope_id) - }) + all_parents_eager + }; + + (parents_are_eager && is_eager).then_some(id) + }); + once(func_id).chain(nested) + }) } /// Returns the module global scope of `file`. diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index 8107f9c122..cc1b1649fa 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -26,8 +26,8 @@ use crate::semantic_index::definition::{ AnnotatedAssignmentDefinitionNodeRef, AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionCategory, DefinitionNodeKey, DefinitionNodeRef, Definitions, ExceptHandlerDefinitionNodeRef, ForStmtDefinitionNodeRef, - ImportDefinitionNodeRef, ImportFromDefinitionNodeRef, MatchPatternDefinitionNodeRef, - StarImportDefinitionNodeRef, WithItemDefinitionNodeRef, + ImportDefinitionNodeRef, ImportFromDefinitionNodeRef, ImportFromSubmoduleDefinitionNodeRef, + MatchPatternDefinitionNodeRef, StarImportDefinitionNodeRef, WithItemDefinitionNodeRef, }; use crate::semantic_index::expression::{Expression, ExpressionKind}; use crate::semantic_index::place::{PlaceExpr, PlaceTableBuilder, ScopedPlaceId}; @@ -111,6 +111,7 @@ pub(super) struct SemanticIndexBuilder<'db, 'ast> { definitions_by_node: FxHashMap>, expressions_by_node: FxHashMap>, imported_modules: FxHashSet, + seen_submodule_imports: FxHashSet, /// Hashset of all [`FileScopeId`]s that correspond to [generator functions]. /// /// [generator functions]: https://docs.python.org/3/glossary.html#term-generator @@ -148,6 +149,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { definitions_by_node: FxHashMap::default(), expressions_by_node: FxHashMap::default(), + seen_submodule_imports: FxHashSet::default(), imported_modules: FxHashSet::default(), generator_functions: FxHashSet::default(), @@ -184,29 +186,34 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { self.current_scope_info().file_scope_id } - /// Returns the scope ID of the surrounding class body scope if the current scope - /// is a method inside a class body. Returns `None` otherwise, e.g. if the current - /// scope is a function body outside of a class, or if the current scope is not a + /// Returns the scope ID of the current scope if the current scope + /// is a method inside a class body or an eagerly executed scope inside a method. + /// Returns `None` otherwise, e.g. if the current scope is a function body outside of a class, or if the current scope is not a /// function body. - fn is_method_of_class(&self) -> Option { - let mut scopes_rev = self.scope_stack.iter().rev(); + fn is_method_or_eagerly_executed_in_method(&self) -> Option { + let mut scopes_rev = self + .scope_stack + .iter() + .rev() + .skip_while(|scope| self.scopes[scope.file_scope_id].is_eager()); let current = scopes_rev.next()?; if self.scopes[current.file_scope_id].kind() != ScopeKind::Function { return None; } + let maybe_method = current.file_scope_id; let parent = scopes_rev.next()?; match self.scopes[parent.file_scope_id].kind() { - ScopeKind::Class => Some(parent.file_scope_id), + ScopeKind::Class => Some(maybe_method), ScopeKind::TypeParams => { // If the function is generic, the parent scope is an annotation scope. // In this case, we need to go up one level higher to find the class scope. let grandparent = scopes_rev.next()?; if self.scopes[grandparent.file_scope_id].kind() == ScopeKind::Class { - Some(grandparent.file_scope_id) + Some(maybe_method) } else { None } @@ -215,6 +222,32 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { } } + /// Checks if a symbol name is bound in any intermediate eager scopes + /// between the current scope and the specified method scope. + /// + fn is_symbol_bound_in_intermediate_eager_scopes( + &self, + symbol_name: &str, + method_scope_id: FileScopeId, + ) -> bool { + for scope_info in self.scope_stack.iter().rev() { + let scope_id = scope_info.file_scope_id; + + if scope_id == method_scope_id { + break; + } + + if let Some(symbol_id) = self.place_tables[scope_id].symbol_id(symbol_name) { + let symbol = self.place_tables[scope_id].symbol(symbol_id); + if symbol.is_bound() { + return true; + } + } + } + + false + } + /// Push a new loop, returning the outer loop, if any. fn push_loop(&mut self) -> Option { self.current_scope_info_mut() @@ -281,6 +314,9 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { // Records snapshots of the place states visible from the current eager scope. fn record_eager_snapshots(&mut self, popped_scope_id: FileScopeId) { + let popped_scope = &self.scopes[popped_scope_id]; + let popped_scope_is_annotation_scope = popped_scope.kind().is_annotation(); + // If the scope that we just popped off is an eager scope, we need to "lock" our view of // which bindings reach each of the uses in the scope. Loop through each enclosing scope, // looking for any that bind each place. @@ -295,6 +331,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { // ``` for enclosing_scope_info in self.scope_stack.iter().rev() { let enclosing_scope_id = enclosing_scope_info.file_scope_id; + let is_immediately_enclosing_scope = popped_scope.parent() == Some(enclosing_scope_id); let enclosing_scope_kind = self.scopes[enclosing_scope_id].kind(); let enclosing_place_table = &self.place_tables[enclosing_scope_id]; @@ -322,6 +359,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { enclosing_place_id, enclosing_scope_kind, enclosing_place, + popped_scope_is_annotation_scope && is_immediately_enclosing_scope, ); self.enclosing_snapshots.insert(key, eager_snapshot); } @@ -396,6 +434,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { enclosed_symbol_id.into(), enclosing_scope_kind, enclosing_place.into(), + false, ); self.enclosing_snapshots.insert(key, lazy_snapshot); } @@ -1447,6 +1486,53 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { self.current_use_def_map_mut() .record_node_reachability(NodeKey::from_node(node)); + // If we see: + // + // * `from .x.y import z` (or `from whatever.thispackage.x.y`) + // * And we are in an `__init__.py(i)` (hereafter `thispackage`) + // * And this is the first time we've seen `from .x` in this module + // * And we're in the global scope + // + // We introduce a local definition `x = ` that occurs + // before the `z = ...` declaration the import introduces. This models the fact + // that the *first* time that you import 'thispackage.x' the python runtime creates + // `x` as a variable in the global scope of `thispackage`. + // + // This is not a perfect simulation of actual runtime behaviour for *various* + // reasons but it works well for most practical purposes. In particular it's nice + // that `x` can be freely overwritten, and that we don't assume that an import + // in one function is visible in another function. + let mut is_self_import = false; + if self.file.is_package(self.db) + && let Ok(module_name) = ModuleName::from_identifier_parts( + self.db, + self.file, + node.module.as_deref(), + node.level, + ) + && let Ok(thispackage) = ModuleName::package_for_file(self.db, self.file) + { + // Record whether this is equivalent to `from . import ...` + is_self_import = module_name == thispackage; + + if node.module.is_some() + && let Some(relative_submodule) = module_name.relative_to(&thispackage) + && let Some(direct_submodule) = relative_submodule.components().next() + && !self.seen_submodule_imports.contains(direct_submodule) + && self.current_scope().is_global() + { + self.seen_submodule_imports + .insert(direct_submodule.to_owned()); + + let direct_submodule_name = Name::new(direct_submodule); + let symbol = self.add_symbol(direct_submodule_name); + self.add_definition( + symbol.into(), + ImportFromSubmoduleDefinitionNodeRef { node }, + ); + } + } + let mut found_star = false; for (alias_index, alias) in node.names.iter().enumerate() { if &alias.name == "*" { @@ -1553,9 +1639,13 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { } let (symbol_name, is_reexported) = if let Some(asname) = &alias.asname { + // It's re-exported if it's `from ... import x as x` (&asname.id, asname.id == alias.name.id) } else { - (&alias.name.id, false) + // As a non-standard rule to handle stubs in the wild, we consider + // `from . import x` and `from whatever.thispackage import x` in an + // `__init__.pyi` to re-export `x` (as long as it wasn't renamed) + (&alias.name.id, is_self_import) }; // Look for imports `from __future__ import annotations`, ignore `as ...` @@ -1647,7 +1737,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { self.visit_expr(&node.annotation); if let Some(value) = &node.value { self.visit_expr(value); - if self.is_method_of_class().is_some() { + if self.is_method_or_eagerly_executed_in_method().is_some() { // Record the right-hand side of the assignment as a standalone expression // if we're inside a method. This allows type inference to infer the type // of the value for annotated assignments like `self.CONSTANT: Final = 1`, @@ -2319,14 +2409,21 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { | ast::Expr::Attribute(ast::ExprAttribute { ctx, .. }) | ast::Expr::Subscript(ast::ExprSubscript { ctx, .. }) => { if let Some(mut place_expr) = PlaceExpr::try_from_expr(expr) { - if self.is_method_of_class().is_some() { + if let Some(method_scope_id) = self.is_method_or_eagerly_executed_in_method() { if let PlaceExpr::Member(member) = &mut place_expr { if member.is_instance_attribute_candidate() { // We specifically mark attribute assignments to the first parameter of a method, // i.e. typically `self` or `cls`. - let accessed_object_refers_to_first_parameter = self - .current_first_parameter_name - .is_some_and(|first| member.symbol_name() == first); + // However, we must check that the symbol hasn't been shadowed by an intermediate + // scope (e.g., a comprehension variable: `for self in [...]`). + let accessed_object_refers_to_first_parameter = + self.current_first_parameter_name.is_some_and(|first| { + member.symbol_name() == first + && !self.is_symbol_bound_in_intermediate_eager_scopes( + first, + method_scope_id, + ) + }); if accessed_object_refers_to_first_parameter { member.mark_instance_attribute(); @@ -2697,6 +2794,12 @@ impl SemanticSyntaxContext for SemanticIndexBuilder<'_, '_> { None } + // We handle the one syntax error that relies on this method (`NonlocalWithoutBinding`) directly + // in `TypeInferenceBuilder::infer_nonlocal_statement`, so this just returns `true`. + fn has_nonlocal_binding(&self, _name: &str) -> bool { + true + } + fn in_async_context(&self) -> bool { for scope_info in self.scope_stack.iter().rev() { let scope = &self.scopes[scope_info.file_scope_id]; diff --git a/crates/ty_python_semantic/src/semantic_index/definition.rs b/crates/ty_python_semantic/src/semantic_index/definition.rs index 81af22d314..70a6039fd1 100644 --- a/crates/ty_python_semantic/src/semantic_index/definition.rs +++ b/crates/ty_python_semantic/src/semantic_index/definition.rs @@ -209,6 +209,7 @@ impl<'db> DefinitionState<'db> { pub(crate) enum DefinitionNodeRef<'ast, 'db> { Import(ImportDefinitionNodeRef<'ast>), ImportFrom(ImportFromDefinitionNodeRef<'ast>), + ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef<'ast>), ImportStar(StarImportDefinitionNodeRef<'ast>), For(ForStmtDefinitionNodeRef<'ast, 'db>), Function(&'ast ast::StmtFunctionDef), @@ -290,6 +291,12 @@ impl<'ast> From> for DefinitionNodeRef<'ast, ' } } +impl<'ast> From> for DefinitionNodeRef<'ast, '_> { + fn from(node_ref: ImportFromSubmoduleDefinitionNodeRef<'ast>) -> Self { + Self::ImportFromSubmodule(node_ref) + } +} + impl<'ast, 'db> From> for DefinitionNodeRef<'ast, 'db> { fn from(value: ForStmtDefinitionNodeRef<'ast, 'db>) -> Self { Self::For(value) @@ -357,7 +364,10 @@ pub(crate) struct ImportFromDefinitionNodeRef<'ast> { pub(crate) alias_index: usize, pub(crate) is_reexported: bool, } - +#[derive(Copy, Clone, Debug)] +pub(crate) struct ImportFromSubmoduleDefinitionNodeRef<'ast> { + pub(crate) node: &'ast ast::StmtImportFrom, +} #[derive(Copy, Clone, Debug)] pub(crate) struct AssignmentDefinitionNodeRef<'ast, 'db> { pub(crate) unpack: Option<(UnpackPosition, Unpack<'db>)>, @@ -427,7 +437,6 @@ impl<'db> DefinitionNodeRef<'_, 'db> { alias_index, is_reexported, }), - DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index, @@ -437,6 +446,11 @@ impl<'db> DefinitionNodeRef<'_, 'db> { alias_index, is_reexported, }), + DefinitionNodeRef::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { + node, + }) => DefinitionKind::ImportFromSubmodule(ImportFromSubmoduleDefinitionKind { + node: AstNodeRef::new(parsed, node), + }), DefinitionNodeRef::ImportStar(star_import) => { let StarImportDefinitionNodeRef { node, symbol_id } = star_import; DefinitionKind::StarImport(StarImportDefinitionKind { @@ -562,7 +576,7 @@ impl<'db> DefinitionNodeRef<'_, 'db> { alias_index, is_reexported: _, }) => (&node.names[alias_index]).into(), - + Self::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { node }) => node.into(), // INVARIANT: for an invalid-syntax statement such as `from foo import *, bar, *`, // we only create a `StarImportDefinitionKind` for the *first* `*` alias in the names list. Self::ImportStar(StarImportDefinitionNodeRef { node, symbol_id: _ }) => node @@ -661,6 +675,7 @@ impl DefinitionCategory { pub enum DefinitionKind<'db> { Import(ImportDefinitionKind), ImportFrom(ImportFromDefinitionKind), + ImportFromSubmodule(ImportFromSubmoduleDefinitionKind), StarImport(StarImportDefinitionKind), Function(AstNodeRef), Class(AstNodeRef), @@ -687,6 +702,7 @@ impl DefinitionKind<'_> { match self { DefinitionKind::Import(import) => import.is_reexported(), DefinitionKind::ImportFrom(import) => import.is_reexported(), + DefinitionKind::ImportFromSubmodule(_) => false, _ => true, } } @@ -704,6 +720,7 @@ impl DefinitionKind<'_> { DefinitionKind::Import(_) | DefinitionKind::ImportFrom(_) | DefinitionKind::StarImport(_) + | DefinitionKind::ImportFromSubmodule(_) ) } @@ -719,6 +736,7 @@ impl DefinitionKind<'_> { match self { DefinitionKind::Import(import) => import.alias(module).range(), DefinitionKind::ImportFrom(import) => import.alias(module).range(), + DefinitionKind::ImportFromSubmodule(import) => import.import(module).range(), DefinitionKind::StarImport(import) => import.alias(module).range(), DefinitionKind::Function(function) => function.node(module).name.range(), DefinitionKind::Class(class) => class.node(module).name.range(), @@ -756,6 +774,7 @@ impl DefinitionKind<'_> { match self { DefinitionKind::Import(import) => import.alias(module).range(), DefinitionKind::ImportFrom(import) => import.alias(module).range(), + DefinitionKind::ImportFromSubmodule(import) => import.import(module).range(), DefinitionKind::StarImport(import) => import.import(module).range(), DefinitionKind::Function(function) => function.node(module).range(), DefinitionKind::Class(class) => class.node(module).range(), @@ -846,6 +865,7 @@ impl DefinitionKind<'_> { | DefinitionKind::Comprehension(_) | DefinitionKind::WithItem(_) | DefinitionKind::MatchPattern(_) + | DefinitionKind::ImportFromSubmodule(_) | DefinitionKind::ExceptHandler(_) => DefinitionCategory::Binding, } } @@ -991,6 +1011,16 @@ impl ImportFromDefinitionKind { self.is_reexported } } +#[derive(Clone, Debug, get_size2::GetSize)] +pub struct ImportFromSubmoduleDefinitionKind { + node: AstNodeRef, +} + +impl ImportFromSubmoduleDefinitionKind { + pub fn import<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::StmtImportFrom { + self.node.node(module) + } +} #[derive(Clone, Debug, get_size2::GetSize)] pub struct AssignmentDefinitionKind<'db> { @@ -1004,7 +1034,7 @@ impl<'db> AssignmentDefinitionKind<'db> { self.target_kind } - pub(crate) fn value<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::Expr { + pub fn value<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::Expr { self.value.node(module) } @@ -1121,6 +1151,12 @@ impl From<&ast::Alias> for DefinitionNodeKey { } } +impl From<&ast::StmtImportFrom> for DefinitionNodeKey { + fn from(node: &ast::StmtImportFrom) -> Self { + Self(NodeKey::from_node(node)) + } +} + impl From<&ast::StmtFunctionDef> for DefinitionNodeKey { fn from(node: &ast::StmtFunctionDef) -> Self { Self(NodeKey::from_node(node)) diff --git a/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs b/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs index af3ef642e3..9e6d60668f 100644 --- a/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs +++ b/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs @@ -13,7 +13,7 @@ //! of `test`. When evaluating a constraint, there are three possible outcomes: always true, always //! false, or ambiguous. For a simple constraint like this, always-true and always-false correspond //! to the case in which we can infer that the type of `test` is `Literal[True]` or `Literal[False]`. -//! In any other case, like if the type of `test` is `bool` or `Unknown`, we can not statically +//! In any other case, like if the type of `test` is `bool` or `Unknown`, we cannot statically //! determine whether `test` is truthy or falsy, so the outcome would be "ambiguous". //! //! @@ -29,7 +29,7 @@ //! Here, we would accumulate a reachability constraint of `test1 AND test2`. We can statically //! determine that this position is *always* reachable only if both `test1` and `test2` are //! always true. On the other hand, we can statically determine that this position is *never* -//! reachable if *either* `test1` or `test2` is always false. In any other case, we can not +//! reachable if *either* `test1` or `test2` is always false. In any other case, we cannot //! determine whether this position is reachable or not, so the outcome is "ambiguous". This //! corresponds to a ternary *AND* operation in [Kleene] logic: //! @@ -60,7 +60,7 @@ //! The third branch ends in a terminal statement [^1]. When we merge control flow, we need to consider //! the reachability through either the first or the second branch. The current position is only //! *definitely* unreachable if both `test1` and `test2` are always false. It is definitely -//! reachable if *either* `test1` or `test2` is always true. In any other case, we can not statically +//! reachable if *either* `test1` or `test2` is always true. In any other case, we cannot statically //! determine whether it is reachable or not. This operation corresponds to a ternary *OR* operation: //! //! ```text @@ -91,7 +91,7 @@ //! ## Explicit ambiguity //! //! In some cases, we explicitly record an “ambiguous” constraint. We do this when branching on -//! something that we can not (or intentionally do not want to) analyze statically. `for` loops are +//! something that we cannot (or intentionally do not want to) analyze statically. `for` loops are //! one example: //! ```py //! def _(): @@ -771,8 +771,9 @@ impl ReachabilityConstraints { truthiness } PatternPredicateKind::Class(class_expr, kind) => { - let class_ty = - infer_expression_type(db, *class_expr, TypeContext::default()).to_instance(db); + let class_ty = infer_expression_type(db, *class_expr, TypeContext::default()) + .as_class_literal() + .map(|class| Type::instance(db, class.top_materialization(db))); class_ty.map_or(Truthiness::Ambiguous, |class_ty| { if subject_ty.is_subtype_of(db, class_ty) { diff --git a/crates/ty_python_semantic/src/semantic_index/use_def.rs b/crates/ty_python_semantic/src/semantic_index/use_def.rs index 39f3a1a8ec..05fa369521 100644 --- a/crates/ty_python_semantic/src/semantic_index/use_def.rs +++ b/crates/ty_python_semantic/src/semantic_index/use_def.rs @@ -233,7 +233,7 @@ //! have two live bindings of `x`: `x = 3` and `x = 4`. //! //! Another piece of information that the `UseDefMap` needs to provide are reachability constraints. -//! See [`reachability_constraints.rs`] for more details, in particular how they apply to bindings. +//! See `reachability_constraints.rs` for more details, in particular how they apply to bindings. //! //! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a //! snapshot, and merging a snapshot into the current state. The logic using these methods lives in @@ -761,6 +761,7 @@ pub(crate) struct DeclarationsIterator<'map, 'db> { inner: LiveDeclarationsIterator<'map>, } +#[derive(Debug)] pub(crate) struct DeclarationWithConstraint<'db> { pub(crate) declaration: DefinitionState<'db>, pub(crate) reachability_constraint: ScopedReachabilityConstraintId, @@ -1186,17 +1187,21 @@ impl<'db> UseDefMapBuilder<'db> { pub(super) fn snapshot_enclosing_state( &mut self, enclosing_place: ScopedPlaceId, - scope: ScopeKind, + enclosing_scope: ScopeKind, enclosing_place_expr: PlaceExprRef, + is_parent_of_annotation_scope: bool, ) -> ScopedEnclosingSnapshotId { let bindings = match enclosing_place { ScopedPlaceId::Symbol(symbol) => self.symbol_states[symbol].bindings(), ScopedPlaceId::Member(member) => self.member_states[member].bindings(), }; - // Names bound in class scopes are never visible to nested scopes (but attributes/subscripts are visible), - // so we never need to save eager scope bindings in a class scope. - if (scope.is_class() && enclosing_place.is_symbol()) || !enclosing_place_expr.is_bound() { + let is_class_symbol = enclosing_scope.is_class() && enclosing_place.is_symbol(); + // Names bound in class scopes are never visible to nested scopes (but + // attributes/subscripts are visible), so we never need to save eager scope bindings in a + // class scope. There is one exception to this rule: annotation scopes can see names + // defined in an immediately-enclosing class scope. + if (is_class_symbol && !is_parent_of_annotation_scope) || !enclosing_place_expr.is_bound() { self.enclosing_snapshots.push(EnclosingSnapshot::Constraint( bindings.unbound_narrowing_constraint(), )) diff --git a/crates/ty_python_semantic/src/site_packages.rs b/crates/ty_python_semantic/src/site_packages.rs index 9062228363..8418db6124 100644 --- a/crates/ty_python_semantic/src/site_packages.rs +++ b/crates/ty_python_semantic/src/site_packages.rs @@ -62,6 +62,15 @@ impl SitePackagesPaths { self.0.extend(other.0); } + /// Concatenate two instances of [`SitePackagesPaths`]. + #[must_use] + pub fn concatenate(mut self, other: Self) -> Self { + for path in other { + self.0.insert(path); + } + self + } + /// Tries to detect the version from the layout of the `site-packages` directory. pub fn python_version_from_layout(&self) -> Option { if cfg!(windows) { @@ -111,6 +120,12 @@ impl SitePackagesPaths { } } +impl fmt::Display for SitePackagesPaths { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.0.iter()).finish() + } +} + impl From<[SystemPathBuf; N]> for SitePackagesPaths { fn from(paths: [SystemPathBuf; N]) -> Self { Self(IndexSet::from(paths)) @@ -246,6 +261,13 @@ impl PythonEnvironment { Self::System(env) => env.real_stdlib_directory(system), } } + + pub fn origin(&self) -> &SysPrefixPathOrigin { + match self { + Self::Virtual(env) => &env.root_path.origin, + Self::System(env) => &env.root_path.origin, + } + } } /// Enumeration of the subdirectories of `sys.prefix` that could contain a @@ -543,7 +565,7 @@ System site-packages will not be used for module resolution.", } tracing::debug!( - "Resolved site-packages directories for this virtual environment are: {site_packages_directories:?}" + "Resolved site-packages directories for this virtual environment are: {site_packages_directories}" ); Ok(site_packages_directories) } @@ -823,7 +845,7 @@ impl SystemEnvironment { )?; tracing::debug!( - "Resolved site-packages directories for this environment are: {site_packages_directories:?}" + "Resolved site-packages directories for this environment are: {site_packages_directories}" ); Ok(site_packages_directories) } @@ -1387,15 +1409,15 @@ impl SysPrefixPath { ) -> SitePackagesDiscoveryResult { let sys_prefix = if !origin.must_point_directly_to_sys_prefix() && system.is_file(unvalidated_path) - && unvalidated_path - .file_name() - .is_some_and(|name| name.starts_with("python")) - { - // It looks like they passed us a path to a Python executable, e.g. `.venv/bin/python3`. - // Try to figure out the `sys.prefix` value from the Python executable. + && unvalidated_path.file_name().is_some_and(|name| { + name.starts_with("python") + || name.eq_ignore_ascii_case(&format!("ty{}", std::env::consts::EXE_SUFFIX)) + }) { + // It looks like they passed us a path to an executable, e.g. `.venv/bin/python3`. Try + // to figure out the `sys.prefix` value from the Python executable. let sys_prefix = if cfg!(windows) { - // On Windows, the relative path to the Python executable from `sys.prefix` - // is different depending on whether it's a virtual environment or a system installation. + // On Windows, the relative path to the executable from `sys.prefix` is different + // depending on whether it's a virtual environment or a system installation. // System installations have their executable at `/python.exe`, // whereas virtual environments have their executable at `/Scripts/python.exe`. unvalidated_path.parent().and_then(|parent| { @@ -1567,8 +1589,8 @@ pub enum SysPrefixPathOrigin { ConfigFileSetting(Arc, Option), /// The `sys.prefix` path came from a `--python` CLI flag PythonCliFlag, - /// The selected interpreter in the VS Code's Python extension. - PythonVSCodeExtension, + /// The selected interpreter in the user's editor. + Editor, /// The `sys.prefix` path came from the `VIRTUAL_ENV` environment variable VirtualEnvVar, /// The `sys.prefix` path came from the `CONDA_PREFIX` environment variable @@ -1580,6 +1602,8 @@ pub enum SysPrefixPathOrigin { /// A `.venv` directory was found in the current working directory, /// and the `sys.prefix` path is the path to that virtual environment. LocalVenv, + /// The `sys.prefix` path came from the environment ty is installed in. + SelfEnvironment, } impl SysPrefixPathOrigin { @@ -1590,9 +1614,16 @@ impl SysPrefixPathOrigin { Self::LocalVenv | Self::VirtualEnvVar => true, Self::ConfigFileSetting(..) | Self::PythonCliFlag - | Self::PythonVSCodeExtension + | Self::Editor | Self::DerivedFromPyvenvCfg | Self::CondaPrefixVar => false, + // It's not strictly true that the self environment must be virtual, e.g., ty could be + // installed in a system Python environment and users may expect us to respect + // dependencies installed alongside it. However, we're intentionally excluding support + // for this to start. Note a change here has downstream implications, i.e., we probably + // don't want the packages in a system environment to take precedence over those in a + // virtual environment and would need to reverse the ordering in that case. + Self::SelfEnvironment => true, } } @@ -1602,15 +1633,31 @@ impl SysPrefixPathOrigin { /// the `sys.prefix` directory, e.g. the `--python` CLI flag. pub(crate) const fn must_point_directly_to_sys_prefix(&self) -> bool { match self { - Self::PythonCliFlag | Self::ConfigFileSetting(..) | Self::PythonVSCodeExtension => { - false - } + Self::PythonCliFlag + | Self::ConfigFileSetting(..) + | Self::Editor + | Self::SelfEnvironment => false, Self::VirtualEnvVar | Self::CondaPrefixVar | Self::DerivedFromPyvenvCfg | Self::LocalVenv => true, } } + + /// Whether paths with this origin should allow combination with paths with a + /// [`SysPrefixPathOrigin::SelfEnvironment`] origin. + pub const fn allows_concatenation_with_self_environment(&self) -> bool { + match self { + Self::SelfEnvironment + | Self::CondaPrefixVar + | Self::VirtualEnvVar + | Self::Editor + | Self::DerivedFromPyvenvCfg + | Self::ConfigFileSetting(..) + | Self::PythonCliFlag => false, + Self::LocalVenv => true, + } + } } impl std::fmt::Display for SysPrefixPathOrigin { @@ -1622,9 +1669,8 @@ impl std::fmt::Display for SysPrefixPathOrigin { Self::CondaPrefixVar => f.write_str("`CONDA_PREFIX` environment variable"), Self::DerivedFromPyvenvCfg => f.write_str("derived `sys.prefix` path"), Self::LocalVenv => f.write_str("local virtual environment"), - Self::PythonVSCodeExtension => { - f.write_str("selected interpreter in the VS Code Python extension") - } + Self::Editor => f.write_str("selected interpreter in your editor"), + Self::SelfEnvironment => f.write_str("ty environment"), } } } @@ -2377,4 +2423,15 @@ mod tests { assert_eq!(&pyvenv_cfg[version.1], version.0); assert_eq!(parsed.implementation, PythonImplementation::PyPy); } + + #[test] + fn site_packages_paths_display() { + let paths = SitePackagesPaths::default(); + assert_eq!(paths.to_string(), "[]"); + + let mut paths = SitePackagesPaths::default(); + paths.insert(SystemPathBuf::from("/path/to/site/packages")); + + assert_eq!(paths.to_string(), r#"["/path/to/site/packages"]"#); + } } diff --git a/crates/ty_python_semantic/src/subscript.rs b/crates/ty_python_semantic/src/subscript.rs index b7ea13db10..b51a9e597b 100644 --- a/crates/ty_python_semantic/src/subscript.rs +++ b/crates/ty_python_semantic/src/subscript.rs @@ -27,7 +27,7 @@ fn from_negative_i32(index: i32) -> usize { static_assertions::const_assert!(usize::BITS >= 32); index.checked_neg().map(from_nonnegative_i32).unwrap_or({ - // 'checked_neg' only fails for i32::MIN. We can not + // 'checked_neg' only fails for i32::MIN. We cannot // represent -i32::MIN as a i32, but we can represent // it as a usize, since usize is at least 32 bits. from_nonnegative_i32(i32::MAX) + 1 diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 6b48499e9b..4284b15278 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -64,6 +64,7 @@ use crate::types::generics::{ use crate::types::infer::infer_unpack_types; use crate::types::mro::{Mro, MroError, MroIterator}; pub(crate) use crate::types::narrow::infer_narrowing_constraint; +use crate::types::newtype::NewType; use crate::types::signatures::{ParameterForm, walk_signature}; use crate::types::tuple::{TupleSpec, TupleSpecBuilder}; pub(crate) use crate::types::typed_dict::{TypedDictParams, TypedDictType, walk_typed_dict_type}; @@ -96,6 +97,7 @@ mod instance; mod member; mod mro; mod narrow; +mod newtype; mod protocol_class; mod signatures; mod special_form; @@ -295,7 +297,7 @@ impl AttributeKind { /// When invoked on a class object, the fallback type (a class attribute) can shadow a /// non-data descriptor of the meta-type (the class's metaclass). However, this is not /// true for instances. When invoked on an instance, the fallback type (an attribute on -/// the instance) can not completely shadow a non-data descriptor of the meta-type (the +/// the instance) cannot completely shadow a non-data descriptor of the meta-type (the /// class), because we do not currently attempt to statically infer if an instance /// attribute is definitely defined (i.e. to check whether a particular method has been /// called). @@ -781,6 +783,13 @@ pub enum Type<'db> { TypedDict(TypedDictType<'db>), /// An aliased type (lazily not-yet-unpacked to its value type). TypeAlias(TypeAliasType<'db>), + /// The set of Python objects that belong to a `typing.NewType` subtype. Note that + /// `typing.NewType` itself is a `Type::ClassLiteral` with `KnownClass::NewType`, and the + /// identity callables it returns (which behave like subtypes in type expressions) are of + /// `Type::KnownInstance` with `KnownInstanceType::NewType`. This `Type` refers to the objects + /// wrapped/returned by a specific one of those identity callables, or by another that inherits + /// from it. + NewTypeInstance(NewType<'db>), } #[salsa::tracked] @@ -815,13 +824,11 @@ impl<'db> Type<'db> { } fn is_none(&self, db: &'db dyn Db) -> bool { - self.as_nominal_instance() - .is_some_and(|instance| instance.has_known_class(db, KnownClass::NoneType)) + self.is_instance_of(db, KnownClass::NoneType) } fn is_bool(&self, db: &'db dyn Db) -> bool { - self.as_nominal_instance() - .is_some_and(|instance| instance.has_known_class(db, KnownClass::Bool)) + self.is_instance_of(db, KnownClass::Bool) } fn is_enum(&self, db: &'db dyn Db) -> bool { @@ -855,15 +862,14 @@ impl<'db> Type<'db> { } pub(crate) fn is_notimplemented(&self, db: &'db dyn Db) -> bool { - self.as_nominal_instance() - .is_some_and(|instance| instance.has_known_class(db, KnownClass::NotImplementedType)) + self.is_instance_of(db, KnownClass::NotImplementedType) } pub(crate) const fn is_todo(&self) -> bool { matches!(self, Type::Dynamic(DynamicType::Todo(_))) } - pub(crate) const fn is_generic_alias(&self) -> bool { + pub const fn is_generic_alias(&self) -> bool { matches!(self, Type::GenericAlias(_)) } @@ -871,6 +877,10 @@ impl<'db> Type<'db> { matches!(self, Type::Dynamic(_)) } + const fn is_non_divergent_dynamic(&self) -> bool { + self.is_dynamic() && !self.is_divergent() + } + /// Is a value of this type only usable in typing contexts? pub(crate) fn is_type_check_only(&self, db: &'db dyn Db) -> bool { match self { @@ -882,20 +892,31 @@ impl<'db> Type<'db> { } } - // If the type is a specialized instance of the given `KnownClass`, returns the specialization. + /// If the type is a specialized instance of the given `KnownClass`, returns the specialization. pub(crate) fn known_specialization( &self, db: &'db dyn Db, known_class: KnownClass, ) -> Option> { let class_literal = known_class.try_to_class_literal(db)?; - self.specialization_of(db, Some(class_literal)) + self.specialization_of(db, class_literal) } - // If the type is a specialized instance of the given class, returns the specialization. - // - // If no class is provided, returns the specialization of any class instance. + /// If this type is a class instance, returns its specialization. + pub(crate) fn class_specialization(self, db: &'db dyn Db) -> Option> { + self.specialization_of_optional(db, None) + } + + /// If the type is a specialized instance of the given class, returns the specialization. pub(crate) fn specialization_of( + self, + db: &'db dyn Db, + expected_class: ClassLiteral<'_>, + ) -> Option> { + self.specialization_of_optional(db, Some(expected_class)) + } + + fn specialization_of_optional( self, db: &'db dyn Db, expected_class: Option>, @@ -1007,6 +1028,13 @@ impl<'db> Type<'db> { any_over_type(db, self, &|ty| matches!(ty, Type::TypeVar(_)), false) } + pub(crate) const fn as_special_form(self) -> Option { + match self { + Type::SpecialForm(special_form) => Some(special_form), + _ => None, + } + } + pub(crate) const fn as_class_literal(self) -> Option> { match self { Type::ClassLiteral(class_type) => Some(class_type), @@ -1052,12 +1080,11 @@ impl<'db> Type<'db> { .expect("Expected a Type::ClassLiteral variant") } - pub(crate) const fn is_subclass_of(&self) -> bool { + pub const fn is_subclass_of(&self) -> bool { matches!(self, Type::SubclassOf(..)) } - #[cfg(test)] - pub(crate) const fn is_class_literal(&self) -> bool { + pub const fn is_class_literal(&self) -> bool { matches!(self, Type::ClassLiteral(..)) } @@ -1097,6 +1124,22 @@ impl<'db> Type<'db> { } } + /// If the type is a generic class constructor, returns the class instance type. + pub(crate) fn synthesized_constructor_return_ty(self, db: &'db dyn Db) -> Option> { + // TODO: This does not correctly handle unions or intersections. It also does not handle + // constructors that are not represented as bound methods, e.g. `__new__`, or synthesized + // dataclass initializers. + if let Type::BoundMethod(method) = self + && let Type::NominalInstance(instance) = method.self_instance(db) + && method.function(db).name(db).as_str() == "__init__" + { + let class_ty = instance.class_literal(db).identity_specialization(db); + Some(Type::instance(db, class_ty)) + } else { + None + } + } + pub const fn is_property_instance(&self) -> bool { matches!(self, Type::PropertyInstance(..)) } @@ -1120,6 +1163,10 @@ impl<'db> Type<'db> { } } + pub(crate) const fn is_union(&self) -> bool { + matches!(self, Type::Union(_)) + } + pub(crate) const fn as_union(self) -> Option> { match self { Type::Union(union_type) => Some(union_type), @@ -1127,7 +1174,6 @@ impl<'db> Type<'db> { } } - #[cfg(test)] #[track_caller] pub(crate) const fn expect_union(self) -> UnionType<'db> { self.as_union().expect("Expected a Type::Union variant") @@ -1151,6 +1197,23 @@ impl<'db> Type<'db> { matches!(self, Type::FunctionLiteral(..)) } + /// Detects types which are valid to appear inside a `Literal[…]` type annotation. + pub(crate) fn is_literal_or_union_of_literals(&self, db: &'db dyn Db) -> bool { + match self { + Type::Union(union) => union + .elements(db) + .iter() + .all(|ty| ty.is_literal_or_union_of_literals(db)), + Type::StringLiteral(_) + | Type::BytesLiteral(_) + | Type::IntLiteral(_) + | Type::BooleanLiteral(_) + | Type::EnumLiteral(_) => true, + Type::NominalInstance(_) => self.is_none(db) || self.is_bool(db) || self.is_enum(db), + _ => false, + } + } + pub(crate) fn is_union_of_single_valued(&self, db: &'db dyn Db) -> bool { self.as_union().is_some_and(|union| { union.elements(db).iter().all(|ty| { @@ -1268,7 +1331,11 @@ impl<'db> Type<'db> { /// /// It also avoids literal promotion if a literal type annotation was provided as type context. pub(crate) fn promote_literals(self, db: &'db dyn Db, tcx: TypeContext<'db>) -> Type<'db> { - self.apply_type_mapping(db, &TypeMapping::PromoteLiterals, tcx) + self.apply_type_mapping( + db, + &TypeMapping::PromoteLiterals(PromoteLiteralsMode::On), + tcx, + ) } /// Like [`Type::promote_literals`], but does not recurse into nested types. @@ -1369,6 +1436,13 @@ impl<'db> Type<'db> { self } Type::TypeAlias(alias) => alias.value_type(db).normalized_impl(db, visitor), + Type::NewTypeInstance(newtype) => { + visitor.visit(self, || { + Type::NewTypeInstance(newtype.map_base_class_type(db, |class_type| { + class_type.normalized_impl(db, visitor) + })) + }) + } Type::LiteralString | Type::AlwaysFalsy | Type::AlwaysTruthy @@ -1431,7 +1505,8 @@ impl<'db> Type<'db> { | Type::BoundSuper(_) | Type::TypeIs(_) | Type::TypedDict(_) - | Type::TypeAlias(_) => false, + | Type::TypeAlias(_) + | Type::NewTypeInstance(_) => false, } } @@ -1469,6 +1544,10 @@ impl<'db> Type<'db> { Type::GenericAlias(alias) => Some(ClassType::Generic(alias).into_callable(db)), + Type::NewTypeInstance(newtype) => { + Type::instance(db, newtype.base_class_type(db)).try_upcast_to_callable(db) + } + // TODO: This is unsound so in future we can consider an opt-in option to disable it. Type::SubclassOf(subclass_of_ty) => match subclass_of_ty.subclass_of() { SubclassOfInner::Class(class) => Some(class.into_callable(db)), @@ -1498,6 +1577,15 @@ impl<'db> Type<'db> { false, ))), + Type::KnownInstance(KnownInstanceType::NewType(newtype)) => Some(CallableType::single( + db, + Signature::new( + Parameters::new([Parameter::positional_only(None) + .with_annotated_type(newtype.base(db).instance_type(db))]), + Some(Type::NewTypeInstance(newtype)), + ), + )), + Type::Never | Type::DataclassTransformer(_) | Type::AlwaysTruthy @@ -1672,22 +1760,33 @@ impl<'db> Type<'db> { // holds true if `T` is also a dynamic type or a union that contains a dynamic type. // Similarly, `T <: Any` only holds true if `T` is a dynamic type or an intersection // that contains a dynamic type. - (Type::Dynamic(_), _) => ConstraintSet::from(match relation { - TypeRelation::Subtyping => false, - TypeRelation::Assignability => true, - TypeRelation::Redundancy => match target { - Type::Dynamic(_) => true, - Type::Union(union) => union.elements(db).iter().any(Type::is_dynamic), - _ => false, - }, - }), + (Type::Dynamic(dynamic), _) => { + // If a `Divergent` type is involved, it must not be eliminated. + debug_assert!( + !matches!(dynamic, DynamicType::Divergent(_)), + "DynamicType::Divergent should have been handled in an earlier branch" + ); + ConstraintSet::from(match relation { + TypeRelation::Subtyping => false, + TypeRelation::Assignability => true, + TypeRelation::Redundancy => match target { + Type::Dynamic(_) => true, + Type::Union(union) => union.elements(db).iter().any(Type::is_dynamic), + _ => false, + }, + }) + } (_, Type::Dynamic(_)) => ConstraintSet::from(match relation { TypeRelation::Subtyping => false, TypeRelation::Assignability => true, TypeRelation::Redundancy => match self { Type::Dynamic(_) => true, Type::Intersection(intersection) => { - intersection.positive(db).iter().any(Type::is_dynamic) + // If a `Divergent` type is involved, it must not be eliminated. + intersection + .positive(db) + .iter() + .any(Type::is_non_divergent_dynamic) } _ => false, }, @@ -1952,11 +2051,14 @@ impl<'db> Type<'db> { ConstraintSet::from(false) } - (Type::TypedDict(_), _) | (_, Type::TypedDict(_)) => { + (Type::TypedDict(_), _) => { // TODO: Implement assignability and subtyping for TypedDict ConstraintSet::from(relation.is_assignability()) } + // A non-`TypedDict` cannot subtype a `TypedDict` + (_, Type::TypedDict(_)) => ConstraintSet::from(false), + // Note that the definition of `Type::AlwaysFalsy` depends on the return value of `__bool__`. // If `__bool__` always returns True or False, it can be treated as a subtype of `AlwaysTruthy` or `AlwaysFalsy`, respectively. (left, Type::AlwaysFalsy) => ConstraintSet::from(left.bool(db).is_always_false()), @@ -2364,6 +2466,22 @@ impl<'db> Type<'db> { }) } + (Type::NewTypeInstance(self_newtype), Type::NewTypeInstance(target_newtype)) => { + self_newtype.has_relation_to_impl(db, target_newtype) + } + + ( + Type::NewTypeInstance(self_newtype), + Type::NominalInstance(target_nominal_instance), + ) => self_newtype.base_class_type(db).has_relation_to_impl( + db, + target_nominal_instance.class(db), + inferable, + relation, + relation_visitor, + disjointness_visitor, + ), + (Type::PropertyInstance(_), _) => { KnownClass::Property.to_instance(db).has_relation_to_impl( db, @@ -2383,14 +2501,15 @@ impl<'db> Type<'db> { disjointness_visitor, ), - // Other than the special cases enumerated above, `Instance` types and typevars are - // never subtypes of any other variants + // Other than the special cases enumerated above, nominal-instance types, + // newtype-instance types, and typevars are never subtypes of any other variants (Type::TypeVar(bound_typevar), _) => { // All inferable cases should have been handled above assert!(!bound_typevar.is_inferable(db, inferable)); ConstraintSet::from(false) } (Type::NominalInstance(_), _) => ConstraintSet::from(false), + (Type::NewTypeInstance(_), _) => ConstraintSet::from(false), } } @@ -2464,6 +2583,10 @@ impl<'db> Type<'db> { }) } + (Type::NewTypeInstance(self_newtype), Type::NewTypeInstance(other_newtype)) => { + ConstraintSet::from(self_newtype.is_equivalent_to_impl(db, other_newtype)) + } + (Type::NominalInstance(first), Type::NominalInstance(second)) => { first.is_equivalent_to_impl(db, second, inferable, visitor) } @@ -3223,6 +3346,19 @@ impl<'db> Type<'db> { ) }), + (Type::NewTypeInstance(left), Type::NewTypeInstance(right)) => { + left.is_disjoint_from_impl(db, right) + } + (Type::NewTypeInstance(newtype), other) | (other, Type::NewTypeInstance(newtype)) => { + Type::instance(db, newtype.base_class_type(db)).is_disjoint_from_impl( + db, + other, + inferable, + disjointness_visitor, + relation_visitor, + ) + } + (Type::PropertyInstance(_), other) | (other, Type::PropertyInstance(_)) => { KnownClass::Property.to_instance(db).is_disjoint_from_impl( db, @@ -3367,6 +3503,9 @@ impl<'db> Type<'db> { Type::TypeIs(type_is) => type_is.is_bound(db), Type::TypedDict(_) => false, Type::TypeAlias(alias) => alias.value_type(db).is_singleton(db), + Type::NewTypeInstance(newtype) => { + Type::instance(db, newtype.base_class_type(db)).is_singleton(db) + } } } @@ -3417,6 +3556,9 @@ impl<'db> Type<'db> { } Type::NominalInstance(instance) => instance.is_single_valued(db), + Type::NewTypeInstance(newtype) => { + Type::instance(db, newtype.base_class_type(db)).is_single_valued(db) + } Type::BoundSuper(_) => { // At runtime two super instances never compare equal, even if their arguments are identical. @@ -3580,7 +3722,8 @@ impl<'db> Type<'db> { | Type::ProtocolInstance(_) | Type::PropertyInstance(_) | Type::TypeIs(_) - | Type::TypedDict(_) => None, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => None, } } @@ -3667,6 +3810,7 @@ impl<'db> Type<'db> { Type::Dynamic(_) | Type::Never => Place::bound(self).into(), Type::NominalInstance(instance) => instance.class(db).instance_member(db, name), + Type::NewTypeInstance(newtype) => newtype.base_class_type(db).instance_member(db, name), Type::ProtocolInstance(protocol) => protocol.instance_member(db, name), @@ -4159,6 +4303,22 @@ impl<'db> Type<'db> { )) .into() } + Type::KnownInstance(KnownInstanceType::ConstraintSet(tracked)) + if name == "satisfies" => + { + Place::bound(Type::KnownBoundMethod( + KnownBoundMethodType::ConstraintSetSatisfies(tracked), + )) + .into() + } + Type::KnownInstance(KnownInstanceType::ConstraintSet(tracked)) + if name == "satisfied_by_all_typevars" => + { + Place::bound(Type::KnownBoundMethod( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(tracked), + )) + .into() + } Type::ClassLiteral(class) if name == "__get__" && class.is_known(db, KnownClass::FunctionType) => @@ -4304,8 +4464,26 @@ impl<'db> Type<'db> { .into() } + Type::KnownInstance(KnownInstanceType::TypeVar(typevar)) + if typevar.kind(db).is_paramspec() + && matches!(name.as_str(), "args" | "kwargs") => + { + Place::bound(todo_type!("ParamSpecArgs / ParamSpecKwargs")).into() + } + + Type::NominalInstance(instance) + if matches!(name_str, "value" | "_value_") + && is_single_member_enum(db, instance.class(db).class_literal(db).0) => + { + enum_metadata(db, instance.class(db).class_literal(db).0) + .and_then(|metadata| metadata.members.get_index(0).map(|(_, v)| *v)) + .map_or(Place::Undefined, Place::bound) + .into() + } + Type::NominalInstance(..) | Type::ProtocolInstance(..) + | Type::NewTypeInstance(..) | Type::BooleanLiteral(..) | Type::IntLiteral(..) | Type::StringLiteral(..) @@ -4381,7 +4559,7 @@ impl<'db> Type<'db> { }; if result.is_class_var() && self.is_typed_dict() { - // `ClassVar`s on `TypedDictFallback` can not be accessed on inhabitants of `SomeTypedDict`. + // `ClassVar`s on `TypedDictFallback` cannot be accessed on inhabitants of `SomeTypedDict`. // They can only be accessed on `SomeTypedDict` directly. return Place::Undefined.into(); } @@ -4744,6 +4922,8 @@ impl<'db> Type<'db> { .value_type(db) .try_bool_impl(db, allow_short_circuit, visitor) })?, + Type::NewTypeInstance(newtype) => Type::instance(db, newtype.base_class_type(db)) + .try_bool_impl(db, allow_short_circuit, visitor)?, }; Ok(truthiness) @@ -5430,7 +5610,7 @@ impl<'db> Type<'db> { SubclassOfInner::Class(class) => Type::from(class).bindings(db), }, - Type::NominalInstance(_) | Type::ProtocolInstance(_) => { + Type::NominalInstance(_) | Type::ProtocolInstance(_) | Type::NewTypeInstance(_) => { // Note that for objects that have a (possibly not callable!) `__call__` attribute, // we will get the signature of the `__call__` attribute, but will pass in the type // of the original object as the "callable type". That ensures that we get errors @@ -5483,6 +5663,16 @@ impl<'db> Type<'db> { Type::EnumLiteral(enum_literal) => enum_literal.enum_class_instance(db).bindings(db), + Type::KnownInstance(KnownInstanceType::NewType(newtype)) => Binding::single( + self, + Signature::new( + Parameters::new([Parameter::positional_only(None) + .with_annotated_type(newtype.base(db).instance_type(db))]), + Some(Type::NewTypeInstance(newtype)), + ), + ) + .into(), + Type::KnownInstance(known_instance) => { known_instance.instance_fallback(db).bindings(db) } @@ -5517,7 +5707,7 @@ impl<'db> Type<'db> { ) -> Result, CallError<'db>> { self.bindings(db) .match_parameters(db, argument_types) - .check_types(db, argument_types, &TypeContext::default(), &[]) + .check_types(db, argument_types, TypeContext::default(), &[]) } /// Look up a dunder method on the meta-type of `self` and call it. @@ -5569,7 +5759,8 @@ impl<'db> Type<'db> { let bindings = dunder_callable .bindings(db) .match_parameters(db, argument_types) - .check_types(db, argument_types, &tcx, &[])?; + .check_types(db, argument_types, tcx, &[])?; + if boundness == Definedness::PossiblyUndefined { return Err(CallDunderError::PossiblyUnbound(Box::new(bindings))); } @@ -5617,6 +5808,7 @@ impl<'db> Type<'db> { match ty { Type::NominalInstance(nominal) => nominal.tuple_spec(db), + Type::NewTypeInstance(newtype) => non_async_special_case(db, Type::instance(db, newtype.base_class_type(db))), Type::GenericAlias(alias) if alias.origin(db).is_tuple(db) => { Some(Cow::Owned(TupleSpec::homogeneous(todo_type!( "*tuple[] annotations" @@ -6247,6 +6439,9 @@ impl<'db> Type<'db> { Type::ClassLiteral(class) => Some(Type::instance(db, class.default_specialization(db))), Type::GenericAlias(alias) => Some(Type::instance(db, ClassType::from(alias))), Type::SubclassOf(subclass_of_ty) => Some(subclass_of_ty.to_instance(db)), + Type::KnownInstance(KnownInstanceType::NewType(newtype)) => { + Some(Type::NewTypeInstance(newtype)) + } Type::Union(union) => union.to_instance(db), // If there is no bound or constraints on a typevar `T`, `T: object` implicitly, which // has no instance type. Otherwise, synthesize a typevar with bound or constraints @@ -6277,7 +6472,8 @@ impl<'db> Type<'db> { | Type::AlwaysTruthy | Type::AlwaysFalsy | Type::TypeIs(_) - | Type::TypedDict(_) => None, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => None, } } @@ -6356,6 +6552,7 @@ impl<'db> Type<'db> { Type::KnownInstance(known_instance) => match known_instance { KnownInstanceType::TypeAliasType(alias) => Ok(Type::TypeAlias(*alias)), + KnownInstanceType::NewType(newtype) => Ok(Type::NewTypeInstance(*newtype)), KnownInstanceType::TypeVar(typevar) => { let index = semantic_index(db, scope_id.file(db)); Ok(bind_typevar( @@ -6390,6 +6587,24 @@ impl<'db> Type<'db> { invalid_expressions: smallvec::smallvec_inline![InvalidTypeExpression::Generic], fallback_type: Type::unknown(), }), + KnownInstanceType::UnionType(list) => { + let mut builder = UnionBuilder::new(db); + let inferred_as = list.inferred_as(db); + for element in list.elements(db) { + builder = builder.add(if inferred_as.type_expression() { + *element + } else { + element.in_type_expression(db, scope_id, typevar_binding_context)? + }); + } + Ok(builder.build()) + } + KnownInstanceType::Literal(ty) => Ok(ty.inner(db)), + KnownInstanceType::Annotated(ty) => { + Ok(ty + .inner(db) + .in_type_expression(db, scope_id, typevar_binding_context)?) + } }, Type::SpecialForm(special_form) => match special_form { @@ -6543,6 +6758,7 @@ impl<'db> Type<'db> { Type::Dynamic(_) => Ok(*self), Type::NominalInstance(instance) => match instance.known_class(db) { + Some(KnownClass::NoneType) => Ok(Type::none(db)), Some(KnownClass::TypeVar) => Ok(todo_type!( "Support for `typing.TypeVar` instances in type expressions" )), @@ -6552,15 +6768,9 @@ impl<'db> Type<'db> { Some(KnownClass::TypeVarTuple) => Ok(todo_type!( "Support for `typing.TypeVarTuple` instances in type expressions" )), - Some(KnownClass::NewType) => Ok(todo_type!( - "Support for `typing.NewType` instances in type expressions" - )), Some(KnownClass::GenericAlias) => Ok(todo_type!( "Support for `typing.GenericAlias` instances in type expressions" )), - Some(KnownClass::UnionType) => Ok(todo_type!( - "Support for `types.UnionType` instances in type expressions" - )), _ => Err(InvalidTypeExpressionError { invalid_expressions: smallvec::smallvec_inline![ InvalidTypeExpression::InvalidType(*self, scope_id) @@ -6576,6 +6786,13 @@ impl<'db> Type<'db> { .value_type(db) .in_type_expression(db, scope_id, typevar_binding_context) } + + Type::NewTypeInstance(_) => Err(InvalidTypeExpressionError { + invalid_expressions: smallvec::smallvec_inline![ + InvalidTypeExpression::InvalidType(*self, scope_id) + ], + fallback_type: Type::unknown(), + }), } } @@ -6650,6 +6867,7 @@ impl<'db> Type<'db> { // understand a more specific meta type in order to correctly handle `__getitem__`. Type::TypedDict(typed_dict) => SubclassOfType::from(db, typed_dict.defining_class()), Type::TypeAlias(alias) => alias.value_type(db).to_meta_type(db), + Type::NewTypeInstance(newtype) => Type::from(newtype.base_class_type(db)), } } @@ -6755,12 +6973,12 @@ impl<'db> Type<'db> { self } } - TypeMapping::PromoteLiterals + TypeMapping::PromoteLiterals(_) | TypeMapping::ReplaceParameterDefaults | TypeMapping::BindLegacyTypevars(_) => self, TypeMapping::Materialize(materialization_kind) => { - Type::TypeVar(bound_typevar.materialize_impl(db, *materialization_kind, visitor)) - } + Type::TypeVar(bound_typevar.materialize_impl(db, *materialization_kind, visitor)) + } } Type::KnownInstance(KnownInstanceType::TypeVar(typevar)) => match type_mapping { @@ -6769,7 +6987,7 @@ impl<'db> Type<'db> { } TypeMapping::Specialization(_) | TypeMapping::PartialSpecialization(_) | - TypeMapping::PromoteLiterals | + TypeMapping::PromoteLiterals(_) | TypeMapping::BindSelf(_) | TypeMapping::ReplaceSelf { .. } | TypeMapping::Materialize(_) | @@ -6780,7 +6998,7 @@ impl<'db> Type<'db> { let function = Type::FunctionLiteral(function.apply_type_mapping_impl(db, type_mapping, tcx, visitor)); match type_mapping { - TypeMapping::PromoteLiterals => function.promote_literals_impl(db, tcx), + TypeMapping::PromoteLiterals(PromoteLiteralsMode::On) => function.promote_literals_impl(db, tcx), _ => function } } @@ -6795,6 +7013,12 @@ impl<'db> Type<'db> { instance.apply_type_mapping_impl(db, type_mapping, tcx, visitor) }, + Type::NewTypeInstance(newtype) => visitor.visit(self, || { + Type::NewTypeInstance(newtype.map_base_class_type(db, |class_type| { + class_type.apply_type_mapping_impl(db, type_mapping, tcx, visitor) + })) + }), + Type::ProtocolInstance(instance) => { // TODO: Add tests for materialization once subtyping/assignability is implemented for // protocols. It _might_ require changing the logic here because: @@ -6857,13 +7081,9 @@ impl<'db> Type<'db> { builder = builder.add_positive(positive.apply_type_mapping_impl(db, type_mapping, tcx, visitor)); } - let flipped_mapping = match type_mapping { - TypeMapping::Materialize(materialization_kind) => &TypeMapping::Materialize(materialization_kind.flip()), - _ => type_mapping, - }; for negative in intersection.negative(db) { builder = - builder.add_negative(negative.apply_type_mapping_impl(db, flipped_mapping, tcx, visitor)); + builder.add_negative(negative.apply_type_mapping_impl(db, &type_mapping.flip(), tcx, visitor)); } builder.build() } @@ -6892,8 +7112,9 @@ impl<'db> Type<'db> { TypeMapping::BindSelf(_) | TypeMapping::ReplaceSelf { .. } | TypeMapping::Materialize(_) | - TypeMapping::ReplaceParameterDefaults => self, - TypeMapping::PromoteLiterals => self.promote_literals_impl(db, tcx) + TypeMapping::ReplaceParameterDefaults | + TypeMapping::PromoteLiterals(PromoteLiteralsMode::Off) => self, + TypeMapping::PromoteLiterals(PromoteLiteralsMode::On) => self.promote_literals_impl(db, tcx) } Type::Dynamic(_) => match type_mapping { @@ -6902,7 +7123,7 @@ impl<'db> Type<'db> { TypeMapping::BindLegacyTypevars(_) | TypeMapping::BindSelf(_) | TypeMapping::ReplaceSelf { .. } | - TypeMapping::PromoteLiterals | + TypeMapping::PromoteLiterals(_) | TypeMapping::ReplaceParameterDefaults => self, TypeMapping::Materialize(materialization_kind) => match materialization_kind { MaterializationKind::Top => Type::object(), @@ -6921,6 +7142,8 @@ impl<'db> Type<'db> { | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) ) | Type::DataclassDecorator(_) | Type::DataclassTransformer(_) @@ -6962,7 +7185,7 @@ impl<'db> Type<'db> { Type::TypeVar(bound_typevar) => { if matches!( bound_typevar.typevar(db).kind(db), - TypeVarKind::Legacy | TypeVarKind::TypingSelf + TypeVarKind::Legacy | TypeVarKind::TypingSelf | TypeVarKind::ParamSpec ) && binding_context.is_none_or(|binding_context| { bound_typevar.binding_context(db) == BindingContext::Definition(binding_context) }) { @@ -7037,6 +7260,12 @@ impl<'db> Type<'db> { instance.find_legacy_typevars_impl(db, binding_context, typevars, visitor); } + Type::NewTypeInstance(_) => { + // A newtype can never be constructed from an unspecialized generic class, so it is + // impossible that we could ever find any legacy typevars in a newtype instance or + // its underlying class. + } + Type::SubclassOf(subclass_of) => { subclass_of.find_legacy_typevars_impl(db, binding_context, typevars, visitor); } @@ -7072,7 +7301,9 @@ impl<'db> Type<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) | Type::DataclassDecorator(_) | Type::DataclassTransformer(_) @@ -7190,6 +7421,7 @@ impl<'db> Type<'db> { }, Self::TypeAlias(alias) => alias.value_type(db).definition(db), + Self::NewTypeInstance(newtype) => Some(TypeDefinition::NewType(newtype.definition(db))), Self::StringLiteral(_) | Self::BooleanLiteral(_) @@ -7328,7 +7560,7 @@ impl<'db> From<&Type<'db>> for Type<'db> { impl<'db> VarianceInferable<'db> for Type<'db> { fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance { - tracing::debug!( + tracing::trace!( "Checking variance of '{tvar}' in `{ty:?}`", tvar = typevar.typevar(db).name(db), ty = self.display(db), @@ -7413,10 +7645,11 @@ impl<'db> VarianceInferable<'db> for Type<'db> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypedDict(_) - | Type::TypeAlias(_) => TypeVarVariance::Bivariant, + | Type::TypeAlias(_) + | Type::NewTypeInstance(_) => TypeVarVariance::Bivariant, }; - tracing::debug!( + tracing::trace!( "Result of variance of '{tvar}' in `{ty:?}` is `{v:?}`", tvar = typevar.typevar(db).name(db), ty = self.display(db), @@ -7444,6 +7677,21 @@ fn apply_specialization_cycle_initial<'db>( Type::Never } +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize)] +pub enum PromoteLiteralsMode { + On, + Off, +} + +impl PromoteLiteralsMode { + const fn flip(self) -> Self { + match self { + PromoteLiteralsMode::On => PromoteLiteralsMode::Off, + PromoteLiteralsMode::Off => PromoteLiteralsMode::On, + } + } +} + /// A mapping that can be applied to a type, producing another type. This is applied inductively to /// the components of complex types. /// @@ -7458,7 +7706,7 @@ pub enum TypeMapping<'a, 'db> { PartialSpecialization(PartialSpecialization<'a, 'db>), /// Replaces any literal types with their corresponding promoted type form (e.g. `Literal["string"]` /// to `str`, or `def _() -> int` to `Callable[[], int]`). - PromoteLiterals, + PromoteLiterals(PromoteLiteralsMode), /// Binds a legacy typevar with the generic context (class, function, type alias) that it is /// being used in. BindLegacyTypevars(BindingContext<'db>), @@ -7483,7 +7731,7 @@ impl<'db> TypeMapping<'_, 'db> { match self { TypeMapping::Specialization(_) | TypeMapping::PartialSpecialization(_) - | TypeMapping::PromoteLiterals + | TypeMapping::PromoteLiterals(_) | TypeMapping::BindLegacyTypevars(_) | TypeMapping::Materialize(_) | TypeMapping::ReplaceParameterDefaults => context, @@ -7509,6 +7757,22 @@ impl<'db> TypeMapping<'_, 'db> { ), } } + + /// Returns a new `TypeMapping` that should be applied in contravariant positions. + pub(crate) fn flip(&self) -> Self { + match self { + TypeMapping::Materialize(materialization_kind) => { + TypeMapping::Materialize(materialization_kind.flip()) + } + TypeMapping::PromoteLiterals(mode) => TypeMapping::PromoteLiterals(mode.flip()), + TypeMapping::Specialization(_) + | TypeMapping::PartialSpecialization(_) + | TypeMapping::BindLegacyTypevars(_) + | TypeMapping::BindSelf(_) + | TypeMapping::ReplaceSelf { .. } + | TypeMapping::ReplaceParameterDefaults => self.clone(), + } + } } /// A Salsa-tracked constraint set. This is only needed to have something appropriately small to @@ -7570,6 +7834,20 @@ pub enum KnownInstanceType<'db> { /// A constraint set, which is exposed in mdtests as an instance of /// `ty_extensions.ConstraintSet`. ConstraintSet(TrackedConstraintSet<'db>), + + /// A single instance of `types.UnionType`, which stores the left- and + /// right-hand sides of a PEP 604 union. + UnionType(InternedTypes<'db>), + + /// A single instance of `typing.Literal` + Literal(InternedType<'db>), + + /// A single instance of `typing.Annotated` + Annotated(InternedType<'db>), + + /// An identity callable created with `typing.NewType(name, base)`, which behaves like a + /// subtype of `base` in type expressions. See the `struct NewType` payload for an example. + NewType(NewType<'db>), } fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( @@ -7596,6 +7874,19 @@ fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( visitor.visit_type(db, default_ty); } } + KnownInstanceType::UnionType(list) => { + for element in list.elements(db) { + visitor.visit_type(db, *element); + } + } + KnownInstanceType::Literal(ty) | KnownInstanceType::Annotated(ty) => { + visitor.visit_type(db, ty.inner(db)); + } + KnownInstanceType::NewType(newtype) => { + if let ClassType::Generic(generic_alias) = newtype.base_class_type(db) { + visitor.visit_generic_alias_type(db, generic_alias); + } + } } } @@ -7632,12 +7923,22 @@ impl<'db> KnownInstanceType<'db> { // Nothing to normalize Self::ConstraintSet(set) } + Self::UnionType(list) => Self::UnionType(list.normalized_impl(db, visitor)), + Self::Literal(ty) => Self::Literal(ty.normalized_impl(db, visitor)), + Self::Annotated(ty) => Self::Annotated(ty.normalized_impl(db, visitor)), + Self::NewType(newtype) => Self::NewType( + newtype + .map_base_class_type(db, |class_type| class_type.normalized_impl(db, visitor)), + ), } } fn class(self, db: &'db dyn Db) -> KnownClass { match self { Self::SubscriptedProtocol(_) | Self::SubscriptedGeneric(_) => KnownClass::SpecialForm, + Self::TypeVar(typevar_instance) if typevar_instance.kind(db).is_paramspec() => { + KnownClass::ParamSpec + } Self::TypeVar(_) => KnownClass::TypeVar, Self::TypeAliasType(TypeAliasType::PEP695(alias)) if alias.is_specialized(db) => { KnownClass::GenericAlias @@ -7646,6 +7947,10 @@ impl<'db> KnownInstanceType<'db> { Self::Deprecated(_) => KnownClass::Deprecated, Self::Field(_) => KnownClass::Field, Self::ConstraintSet(_) => KnownClass::ConstraintSet, + Self::UnionType(_) => KnownClass::UnionType, + Self::Literal(_) => KnownClass::GenericAlias, + Self::Annotated(_) => KnownClass::GenericAlias, + Self::NewType(_) => KnownClass::NewType, } } @@ -7702,7 +8007,13 @@ impl<'db> KnownInstanceType<'db> { // This is a legacy `TypeVar` _outside_ of any generic class or function, so we render // it as an instance of `typing.TypeVar`. Inside of a generic class or function, we'll // have a `Type::TypeVar(_)`, which is rendered as the typevar's name. - KnownInstanceType::TypeVar(_) => f.write_str("typing.TypeVar"), + KnownInstanceType::TypeVar(typevar_instance) => { + if typevar_instance.kind(self.db).is_paramspec() { + f.write_str("typing.ParamSpec") + } else { + f.write_str("typing.TypeVar") + } + } KnownInstanceType::Deprecated(_) => f.write_str("warnings.deprecated"), KnownInstanceType::Field(field) => { f.write_str("dataclasses.Field")?; @@ -7719,6 +8030,14 @@ impl<'db> KnownInstanceType<'db> { constraints.display(self.db) ) } + KnownInstanceType::UnionType(_) => f.write_str("types.UnionType"), + KnownInstanceType::Literal(_) => f.write_str(""), + KnownInstanceType::Annotated(_) => { + f.write_str("") + } + KnownInstanceType::NewType(declaration) => { + write!(f, "", declaration.name(self.db)) + } } } } @@ -7757,9 +8076,6 @@ pub enum DynamicType<'db> { /// /// This variant should be created with the `todo_type!` macro. Todo(TodoType), - /// A special Todo-variant for PEP-695 `ParamSpec` types. A temporary variant to detect and special- - /// case the handling of these types in `Callable` annotations. - TodoPEP695ParamSpec, /// A special Todo-variant for type aliases declared using `typing.TypeAlias`. /// A temporary variant to detect and special-case the handling of these aliases in autocomplete suggestions. TodoTypeAlias, @@ -7787,13 +8103,6 @@ impl std::fmt::Display for DynamicType<'_> { // `DynamicType::Todo`'s display should be explicit that is not a valid display of // any other type DynamicType::Todo(todo) => write!(f, "@Todo{todo}"), - DynamicType::TodoPEP695ParamSpec => { - if cfg!(debug_assertions) { - f.write_str("@Todo(ParamSpec)") - } else { - f.write_str("@Todo") - } - } DynamicType::TodoUnpack => { if cfg!(debug_assertions) { f.write_str("@Todo(typing.Unpack)") @@ -7833,6 +8142,10 @@ bitflags! { /// instance attributes that are only implicitly defined via `self.x = …` in /// the body of a class method. const IMPLICIT_INSTANCE_ATTRIBUTE = 1 << 6; + /// A non-standard type qualifier that marks a type returned from a module-level + /// `__getattr__` function. We need this in order to implement precedence of submodules + /// over module-level `__getattr__`, for compatibility with other type checkers. + const FROM_MODULE_GETATTR = 1 << 7; } } @@ -7921,7 +8234,7 @@ impl<'db> InvalidTypeExpressionError<'db> { fn into_fallback_type( self, context: &InferContext, - node: &ast::Expr, + node: &impl Ranged, is_reachable: bool, ) -> Type<'db> { let InvalidTypeExpressionError { @@ -8128,12 +8441,20 @@ pub enum TypeVarKind { Pep695, /// `typing.Self` TypingSelf, + /// `P = ParamSpec("P")` + ParamSpec, + /// `def foo[**P]() -> None: ...` + Pep695ParamSpec, } impl TypeVarKind { const fn is_self(self) -> bool { matches!(self, Self::TypingSelf) } + + const fn is_paramspec(self) -> bool { + matches!(self, Self::ParamSpec | Self::Pep695ParamSpec) + } } /// The identity of a type variable. @@ -8267,7 +8588,7 @@ impl<'db> TypeVarInstance<'db> { self.identity(db).definition(db) } - pub(crate) fn kind(self, db: &'db dyn Db) -> TypeVarKind { + pub fn kind(self, db: &'db dyn Db) -> TypeVarKind { self.identity(db).kind(db) } @@ -8486,6 +8807,15 @@ impl<'db> TypeVarInstance<'db> { let expr = &call_expr.arguments.find_keyword("default")?.value; Some(definition_expression_type(db, definition, expr)) } + // PEP 695 ParamSpec + DefinitionKind::ParamSpec(paramspec) => { + let paramspec_node = paramspec.node(&module); + Some(definition_expression_type( + db, + definition, + paramspec_node.default.as_ref()?, + )) + } _ => None, } } @@ -8838,6 +9168,76 @@ impl<'db> TypeVarBoundOrConstraints<'db> { } } +/// Whether a given type originates from value expression inference or type expression inference. +/// For example, the symbol `int` would be inferred as `` in value expression context, +/// and as `int` (i.e. an instance of the class `int`) in type expression context. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, get_size2::GetSize, salsa::Update)] +pub enum InferredAs { + ValueExpression, + TypeExpression, +} + +impl InferredAs { + pub const fn type_expression(self) -> bool { + matches!(self, InferredAs::TypeExpression) + } +} + +/// A salsa-interned list of types. +/// +/// # Ordering +/// Ordering is based on the context's salsa-assigned id and not on its values. +/// The id may change between runs, or when the context was garbage collected and recreated. +#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] +#[derive(PartialOrd, Ord)] +pub struct InternedTypes<'db> { + #[returns(deref)] + elements: Box<[Type<'db>]>, + inferred_as: InferredAs, +} + +impl get_size2::GetSize for InternedTypes<'_> {} + +impl<'db> InternedTypes<'db> { + pub(crate) fn from_elements( + db: &'db dyn Db, + elements: impl IntoIterator>, + inferred_as: InferredAs, + ) -> InternedTypes<'db> { + InternedTypes::new(db, elements.into_iter().collect::>(), inferred_as) + } + + pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { + InternedTypes::new( + db, + self.elements(db) + .iter() + .map(|ty| ty.normalized_impl(db, visitor)) + .collect::>(), + self.inferred_as(db), + ) + } +} + +/// A salsa-interned `Type` +/// +/// # Ordering +/// Ordering is based on the context's salsa-assigned id and not on its values. +/// The id may change between runs, or when the context was garbage collected and recreated. +#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] +#[derive(PartialOrd, Ord)] +pub struct InternedType<'db> { + inner: Type<'db>, +} + +impl get_size2::GetSize for InternedType<'_> {} + +impl<'db> InternedType<'db> { + pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { + InternedType::new(db, self.inner(db).normalized_impl(db, visitor)) + } +} + /// Error returned if a type is not awaitable. #[derive(Debug)] enum AwaitError<'db> { @@ -9930,6 +10330,10 @@ pub(crate) enum TypeRelation { /// materialization of `Any` and `int | Any` may be the same type (`object`), but the /// two differ in their bottom materializations (`Never` and `int`, respectively). /// + /// Despite the above principles, there is one exceptional type that should never be union-simplified: the `Divergent` type. + /// This is a kind of dynamic type, but it acts as a marker to track recursive type structures. + /// If this type is accidentally eliminated by simplification, the fixed-point iteration will not converge. + /// /// [fully static]: https://typing.python.org/en/latest/spec/glossary.html#term-fully-static-type /// [materializations]: https://typing.python.org/en/latest/spec/glossary.html#term-materialize Redundancy, @@ -10337,6 +10741,8 @@ pub enum KnownBoundMethodType<'db> { ConstraintSetAlways, ConstraintSetNever, ConstraintSetImpliesSubtypeOf(TrackedConstraintSet<'db>), + ConstraintSetSatisfies(TrackedConstraintSet<'db>), + ConstraintSetSatisfiedByAllTypeVars(TrackedConstraintSet<'db>), } pub(super) fn walk_method_wrapper_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( @@ -10364,7 +10770,9 @@ pub(super) fn walk_method_wrapper_type<'db, V: visitor::TypeVisitor<'db> + ?Size | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) => {} + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => {} } } @@ -10432,6 +10840,14 @@ impl<'db> KnownBoundMethodType<'db> { | ( KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + ) + | ( + KnownBoundMethodType::ConstraintSetSatisfies(_), + KnownBoundMethodType::ConstraintSetSatisfies(_), + ) + | ( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) => ConstraintSet::from(true), ( @@ -10444,7 +10860,9 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), KnownBoundMethodType::FunctionTypeDunderGet(_) | KnownBoundMethodType::FunctionTypeDunderCall(_) | KnownBoundMethodType::PropertyDunderGet(_) @@ -10454,7 +10872,9 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) => ConstraintSet::from(false), } } @@ -10507,6 +10927,14 @@ impl<'db> KnownBoundMethodType<'db> { ( KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(left_constraints), KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(right_constraints), + ) + | ( + KnownBoundMethodType::ConstraintSetSatisfies(left_constraints), + KnownBoundMethodType::ConstraintSetSatisfies(right_constraints), + ) + | ( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(left_constraints), + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(right_constraints), ) => left_constraints .constraints(db) .iff(db, right_constraints.constraints(db)), @@ -10521,7 +10949,9 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), KnownBoundMethodType::FunctionTypeDunderGet(_) | KnownBoundMethodType::FunctionTypeDunderCall(_) | KnownBoundMethodType::PropertyDunderGet(_) @@ -10531,7 +10961,9 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) => ConstraintSet::from(false), } } @@ -10555,7 +10987,9 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) => self, + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => self, } } @@ -10571,7 +11005,11 @@ impl<'db> KnownBoundMethodType<'db> { KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) => KnownClass::ConstraintSet, + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfies(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => { + KnownClass::ConstraintSet + } } } @@ -10710,6 +11148,27 @@ impl<'db> KnownBoundMethodType<'db> { Some(KnownClass::ConstraintSet.to_instance(db)), ))) } + + KnownBoundMethodType::ConstraintSetSatisfies(_) => { + Either::Right(std::iter::once(Signature::new( + Parameters::new([Parameter::positional_only(Some(Name::new_static("other"))) + .with_annotated_type(KnownClass::ConstraintSet.to_instance(db))]), + Some(KnownClass::ConstraintSet.to_instance(db)), + ))) + } + + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => { + Either::Right(std::iter::once(Signature::new( + Parameters::new([Parameter::keyword_only(Name::new_static("inferable")) + .type_form() + .with_annotated_type(UnionType::from_elements( + db, + [Type::homogeneous_tuple(db, Type::any()), Type::none(db)], + )) + .with_default_type(Type::none(db))]), + Some(KnownClass::Bool.to_instance(db)), + ))) + } } } } @@ -10830,6 +11289,57 @@ impl<'db> ModuleLiteralType<'db> { self._importing_file(db) } + /// Get the submodule attributes we believe to be defined on this module. + /// + /// Note that `ModuleLiteralType` is per-importing-file, so this analysis + /// includes "imports the importing file has performed". + /// + /// + /// # Danger! Powerful Hammer! + /// + /// These results immediately make the attribute always defined in the importing file, + /// shadowing any other attribute in the module with the same name, even if the + /// non-submodule-attribute is in fact always the one defined in practice. + /// + /// Intuitively this means `available_submodule_attributes` "win all tie-breaks", + /// with the idea that if we're ever confused about complicated code then usually + /// the import is the thing people want in scope. + /// + /// However this "always defined, always shadows" rule if applied too aggressively + /// creates VERY confusing conclusions that break perfectly reasonable code. + /// + /// For instance, consider a package which has a `myfunc` submodule which defines a + /// `myfunc` function (a common idiom). If the package "re-exports" this function + /// (`from .myfunc import myfunc`), then at runtime in python + /// `from mypackage import myfunc` should import the function and not the submodule. + /// + /// However, if we were to consider `from mypackage import myfunc` as introducing + /// the attribute `mypackage.myfunc` in `available_submodule_attributes`, we would + /// fail to ever resolve the function. This is because `available_submodule_attributes` + /// is *so early* and *so powerful* in our analysis that **this conclusion would be + /// used when actually resolving `from mypackage import myfunc`**! + /// + /// This currently cannot be fixed by considering the actual symbols defined in `mypackage`, + /// because `available_submodule_attributes` is an *input* to that analysis. + /// + /// We should therefore avoid marking something as an `available_submodule_attribute` + /// when the import could be importing a non-submodule (a function, class, or value). + /// + /// + /// # Rules + /// + /// Because of the excessive power and danger of this method, we currently have only one rule: + /// + /// * If the importing file includes `import x.y` then `x.y` is defined in the importing file. + /// This is an easy rule to justify because `import` can only ever import a module, and the + /// only reason to do it is to explicitly introduce those submodules and attributes, so it + /// *should* shadow any non-submodule of the same name. + /// + /// `from x.y import z` instances are currently ignored because the `x.y` part may not be a + /// side-effect the user actually cares about, and the `z` component may not be a submodule. + /// + /// We instead prefer handling most other import effects as definitions in the scope of + /// the current file (i.e. [`crate::semantic_index::definition::ImportFromDefinitionNodeRef`]). fn available_submodule_attributes(&self, db: &'db dyn Db) -> impl Iterator { self.importing_file(db) .into_iter() @@ -10858,7 +11368,10 @@ impl<'db> ModuleLiteralType<'db> { db, &CallArguments::positional([Type::string_literal(db, name)]), ) { - return Place::Defined(outcome.return_type(db), origin, boundness).into(); + return PlaceAndQualifiers { + place: Place::Defined(outcome.return_type(db), origin, boundness), + qualifiers: TypeQualifiers::FROM_MODULE_GETATTR, + }; } } } @@ -11901,7 +12414,7 @@ pub(crate) mod tests { assert!(todo1.is_assignable_to(&db, int)); // We lose information when combining several `Todo` types. This is an - // acknowledged limitation of the current implementation. We can not + // acknowledged limitation of the current implementation. We cannot // easily store the meta information of several `Todo`s in a single // variant, as `TodoType` needs to implement `Copy`, meaning it can't // contain `Vec`/`Box`/etc., and can't be boxed itself. @@ -11954,6 +12467,27 @@ pub(crate) mod tests { assert!(div.is_equivalent_to(&db, div)); assert!(!div.is_equivalent_to(&db, Type::unknown())); assert!(!Type::unknown().is_equivalent_to(&db, div)); + assert!(!div.is_redundant_with(&db, Type::unknown())); + assert!(!Type::unknown().is_redundant_with(&db, div)); + + let truthy_div = IntersectionBuilder::new(&db) + .add_positive(div) + .add_negative(Type::AlwaysFalsy) + .build(); + + let union = UnionType::from_elements(&db, [Type::unknown(), truthy_div]); + assert!(!truthy_div.is_redundant_with(&db, Type::unknown())); + assert_eq!( + union.display(&db).to_string(), + "Unknown | (Divergent & ~AlwaysFalsy)" + ); + + let union = UnionType::from_elements(&db, [truthy_div, Type::unknown()]); + assert!(!Type::unknown().is_redundant_with(&db, truthy_div)); + assert_eq!( + union.display(&db).to_string(), + "(Divergent & ~AlwaysFalsy) | Unknown" + ); // The `object` type has a good convergence property, that is, its union with all other types is `object`. // (e.g. `object | tuple[Divergent] == object`, `object | tuple[object] == object`) diff --git a/crates/ty_python_semantic/src/types/bound_super.rs b/crates/ty_python_semantic/src/types/bound_super.rs index 011318db51..24d6573c28 100644 --- a/crates/ty_python_semantic/src/types/bound_super.rs +++ b/crates/ty_python_semantic/src/types/bound_super.rs @@ -404,6 +404,9 @@ impl<'db> BoundSuperType<'db> { .to_specialized_instance(db, [key_builder.build(), value_builder.build()]), ); } + Type::NewTypeInstance(newtype) => { + return delegate_to(Type::instance(db, newtype.base_class_type(db))); + } Type::Callable(callable) if callable.is_function_like(db) => { return delegate_to(KnownClass::FunctionType.to_instance(db)); } diff --git a/crates/ty_python_semantic/src/types/builder.rs b/crates/ty_python_semantic/src/types/builder.rs index 6b555b6fdb..11017d1571 100644 --- a/crates/ty_python_semantic/src/types/builder.rs +++ b/crates/ty_python_semantic/src/types/builder.rs @@ -781,37 +781,6 @@ impl<'db> IntersectionBuilder<'db> { seen_aliases, ) } - Type::EnumLiteral(enum_literal) => { - let enum_class = enum_literal.enum_class(self.db); - let metadata = - enum_metadata(self.db, enum_class).expect("Class of enum literal is an enum"); - - let enum_members_in_negative_part = self - .intersections - .iter() - .flat_map(|intersection| &intersection.negative) - .filter_map(|ty| ty.as_enum_literal()) - .filter(|lit| lit.enum_class(self.db) == enum_class) - .map(|lit| lit.name(self.db)) - .chain(std::iter::once(enum_literal.name(self.db))) - .collect::>(); - - let all_members_are_in_negative_part = metadata - .members - .keys() - .all(|name| enum_members_in_negative_part.contains(name)); - - if all_members_are_in_negative_part { - for inner in &mut self.intersections { - inner.add_negative(self.db, enum_literal.enum_class_instance(self.db)); - } - } else { - for inner in &mut self.intersections { - inner.add_negative(self.db, ty); - } - } - self - } _ => { for inner in &mut self.intersections { inner.add_negative(self.db, ty); @@ -1177,6 +1146,39 @@ impl<'db> InnerIntersectionBuilder<'db> { fn build(mut self, db: &'db dyn Db) -> Type<'db> { self.simplify_constrained_typevars(db); + + // If any typevars are in `self.positive`, speculatively solve all bounded type variables + // to their upper bound and all constrained type variables to the union of their constraints. + // If that speculative intersection simplifies to `Never`, this intersection must also simplify + // to `Never`. + if self.positive.iter().any(|ty| ty.is_type_var()) { + let mut speculative = IntersectionBuilder::new(db); + for pos in &self.positive { + match pos { + Type::TypeVar(type_var) => { + match type_var.typevar(db).bound_or_constraints(db) { + Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { + speculative = speculative.add_positive(bound); + } + Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { + speculative = speculative.add_positive(Type::Union(constraints)); + } + // TypeVars without a bound or constraint implicitly have `object` as their + // upper bound, and it is always a no-op to add `object` to an intersection. + None => {} + } + } + _ => speculative = speculative.add_positive(*pos), + } + } + for neg in &self.negative { + speculative = speculative.add_negative(*neg); + } + if speculative.build().is_never() { + return Type::Never; + } + } + match (self.positive.len(), self.negative.len()) { (0, 0) => Type::object(), (1, 0) => self.positive[0], diff --git a/crates/ty_python_semantic/src/types/call.rs b/crates/ty_python_semantic/src/types/call.rs index e2fb7dac96..084fdbcfbd 100644 --- a/crates/ty_python_semantic/src/types/call.rs +++ b/crates/ty_python_semantic/src/types/call.rs @@ -1,8 +1,8 @@ use super::context::InferContext; use super::{Signature, Type, TypeContext}; use crate::Db; -use crate::types::PropertyInstanceType; use crate::types::call::bind::BindingError; +use crate::types::{MemberLookupPolicy, PropertyInstanceType}; use ruff_python_ast as ast; mod arguments; @@ -16,6 +16,16 @@ impl<'db> Type<'db> { left_ty: Type<'db>, op: ast::Operator, right_ty: Type<'db>, + ) -> Result, CallBinOpError> { + Self::try_call_bin_op_with_policy(db, left_ty, op, right_ty, MemberLookupPolicy::default()) + } + + pub(crate) fn try_call_bin_op_with_policy( + db: &'db dyn Db, + left_ty: Type<'db>, + op: ast::Operator, + right_ty: Type<'db>, + policy: MemberLookupPolicy, ) -> Result, CallBinOpError> { // We either want to call lhs.__op__ or rhs.__rop__. The full decision tree from // the Python spec [1] is: @@ -43,39 +53,43 @@ impl<'db> Type<'db> { && rhs_reflected != left_class.member(db, reflected_dunder).place { return Ok(right_ty - .try_call_dunder( + .try_call_dunder_with_policy( db, reflected_dunder, - CallArguments::positional([left_ty]), + &mut CallArguments::positional([left_ty]), TypeContext::default(), + policy, ) .or_else(|_| { - left_ty.try_call_dunder( + left_ty.try_call_dunder_with_policy( db, op.dunder(), - CallArguments::positional([right_ty]), + &mut CallArguments::positional([right_ty]), TypeContext::default(), + policy, ) })?); } } - let call_on_left_instance = left_ty.try_call_dunder( + let call_on_left_instance = left_ty.try_call_dunder_with_policy( db, op.dunder(), - CallArguments::positional([right_ty]), + &mut CallArguments::positional([right_ty]), TypeContext::default(), + policy, ); call_on_left_instance.or_else(|_| { if left_ty == right_ty { Err(CallBinOpError::NotSupported) } else { - Ok(right_ty.try_call_dunder( + Ok(right_ty.try_call_dunder_with_policy( db, op.reflected_dunder(), - CallArguments::positional([left_ty]), + &mut CallArguments::positional([left_ty]), TypeContext::default(), + policy, )?) } }) diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index 1b4629b301..ef2f892200 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -9,6 +9,7 @@ use std::fmt; use itertools::{Either, Itertools}; use ruff_db::parsed::parsed_module; use ruff_python_ast::name::Name; +use rustc_hash::FxHashSet; use smallvec::{SmallVec, smallvec, smallvec_inline}; use super::{Argument, CallArguments, CallError, CallErrorKind, InferContext, Signature, Type}; @@ -34,10 +35,11 @@ use crate::types::generics::{ use crate::types::signatures::{Parameter, ParameterForm, ParameterKind, Parameters}; use crate::types::tuple::{TupleLength, TupleType}; use crate::types::{ - BoundMethodType, ClassLiteral, DataclassFlags, DataclassParams, FieldInstance, - KnownBoundMethodType, KnownClass, KnownInstanceType, MemberLookupPolicy, PropertyInstanceType, - SpecialFormType, TrackedConstraintSet, TypeAliasType, TypeContext, UnionBuilder, UnionType, - WrapperDescriptorKind, enums, ide_support, infer_isolated_expression, todo_type, + BoundMethodType, BoundTypeVarIdentity, ClassLiteral, DataclassFlags, DataclassParams, + FieldInstance, KnownBoundMethodType, KnownClass, KnownInstanceType, MemberLookupPolicy, + NominalInstanceType, PropertyInstanceType, SpecialFormType, TrackedConstraintSet, + TypeAliasType, TypeContext, UnionBuilder, UnionType, WrapperDescriptorKind, enums, ide_support, + infer_isolated_expression, todo_type, }; use ruff_db::diagnostic::{Annotation, Diagnostic, SubDiagnostic, SubDiagnosticSeverity}; use ruff_python_ast::{self as ast, ArgOrKeyword, PythonVersion}; @@ -46,7 +48,7 @@ use ruff_python_ast::{self as ast, ArgOrKeyword, PythonVersion}; /// compatible with _all_ of the types in the union for the call to be valid. /// /// It's guaranteed that the wrapped bindings have no errors. -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct Bindings<'db> { /// The type that is (hopefully) callable. callable_type: Type<'db>, @@ -148,9 +150,27 @@ impl<'db> Bindings<'db> { mut self, db: &'db dyn Db, argument_types: &CallArguments<'_, 'db>, - call_expression_tcx: &TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, dataclass_field_specifiers: &[Type<'db>], ) -> Result> { + match self.check_types_impl( + db, + argument_types, + call_expression_tcx, + dataclass_field_specifiers, + ) { + Ok(()) => Ok(self), + Err(err) => Err(CallError(err, Box::new(self))), + } + } + + pub(crate) fn check_types_impl( + &mut self, + db: &'db dyn Db, + argument_types: &CallArguments<'_, 'db>, + call_expression_tcx: TypeContext<'db>, + dataclass_field_specifiers: &[Type<'db>], + ) -> Result<(), CallErrorKind> { for element in &mut self.elements { if let Some(mut updated_argument_forms) = element.check_types(db, argument_types, call_expression_tcx) @@ -195,16 +215,13 @@ impl<'db> Bindings<'db> { } if all_ok { - Ok(self) + Ok(()) } else if any_binding_error { - Err(CallError(CallErrorKind::BindingError, Box::new(self))) + Err(CallErrorKind::BindingError) } else if all_not_callable { - Err(CallError(CallErrorKind::NotCallable, Box::new(self))) + Err(CallErrorKind::NotCallable) } else { - Err(CallError( - CallErrorKind::PossiblyNotCallable, - Box::new(self), - )) + Err(CallErrorKind::PossiblyNotCallable) } } @@ -1174,6 +1191,62 @@ impl<'db> Bindings<'db> { )); } + Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetSatisfies( + tracked, + )) => { + let [Some(other)] = overload.parameter_types() else { + continue; + }; + let Type::KnownInstance(KnownInstanceType::ConstraintSet(other)) = other + else { + continue; + }; + + let result = tracked + .constraints(db) + .implies(db, || other.constraints(db)); + let tracked = TrackedConstraintSet::new(db, result); + overload.set_return_type(Type::KnownInstance( + KnownInstanceType::ConstraintSet(tracked), + )); + } + + Type::KnownBoundMethod( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(tracked), + ) => { + let extract_inferable = |instance: &NominalInstanceType<'db>| { + if instance.has_known_class(db, KnownClass::NoneType) { + // Caller explicitly passed None, so no typevars are inferable. + return Some(FxHashSet::default()); + } + instance + .tuple_spec(db)? + .fixed_elements() + .map(|ty| { + ty.as_typevar() + .map(|bound_typevar| bound_typevar.identity(db)) + }) + .collect() + }; + + let inferable = match overload.parameter_types() { + // Caller did not provide argument, so no typevars are inferable. + [None] => FxHashSet::default(), + [Some(Type::NominalInstance(instance))] => { + match extract_inferable(instance) { + Some(inferable) => inferable, + None => continue, + } + } + _ => continue, + }; + + let result = tracked + .constraints(db) + .satisfied_by_all_typevars(db, InferableTypeVars::One(&inferable)); + overload.set_return_type(Type::BooleanLiteral(result)); + } + Type::ClassLiteral(class) => match class.known(db) { Some(KnownClass::Bool) => match overload.parameter_types() { [Some(arg)] => overload.set_return_type(arg.bool(db).into_type(db)), @@ -1307,7 +1380,7 @@ impl<'db> From> for Bindings<'db> { /// If the arguments cannot be matched to formal parameters, we store information about the /// specific errors that occurred when trying to match them up. If the callable has multiple /// overloads, we store this error information for each overload. -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct CallableBinding<'db> { /// The type that is (hopefully) callable. pub(crate) callable_type: Type<'db>, @@ -1428,7 +1501,7 @@ impl<'db> CallableBinding<'db> { &mut self, db: &'db dyn Db, argument_types: &CallArguments<'_, 'db>, - call_expression_tcx: &TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, ) -> Option { // If this callable is a bound method, prepend the self instance onto the arguments list // before checking. @@ -2209,7 +2282,7 @@ pub(crate) enum MatchingOverloadIndex { Multiple(Vec), } -#[derive(Default, Debug)] +#[derive(Default, Debug, Clone)] struct ArgumentForms { values: Vec>, conflicting: Vec, @@ -2614,7 +2687,8 @@ struct ArgumentTypeChecker<'a, 'db> { arguments: &'a CallArguments<'a, 'db>, argument_matches: &'a [MatchedArgument<'db>], parameter_tys: &'a mut [Option>], - call_expression_tcx: &'a TypeContext<'db>, + callable_type: Type<'db>, + call_expression_tcx: TypeContext<'db>, return_ty: Type<'db>, errors: &'a mut Vec>, @@ -2630,7 +2704,8 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { arguments: &'a CallArguments<'a, 'db>, argument_matches: &'a [MatchedArgument<'db>], parameter_tys: &'a mut [Option>], - call_expression_tcx: &'a TypeContext<'db>, + callable_type: Type<'db>, + call_expression_tcx: TypeContext<'db>, return_ty: Type<'db>, errors: &'a mut Vec>, ) -> Self { @@ -2640,6 +2715,7 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { arguments, argument_matches, parameter_tys, + callable_type, call_expression_tcx, return_ty, errors, @@ -2680,9 +2756,24 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { return; }; + let return_with_tcx = self + .callable_type + .synthesized_constructor_return_ty(self.db) + .or(self.signature.return_ty) + .zip(self.call_expression_tcx.annotation); + self.inferable_typevars = generic_context.inferable_typevars(self.db); let mut builder = SpecializationBuilder::new(self.db, self.inferable_typevars); + // Prefer the declared type of generic classes. + let preferred_type_mappings = return_with_tcx.and_then(|(return_ty, tcx)| { + tcx.filter_union(self.db, |ty| ty.class_specialization(self.db).is_some()) + .class_specialization(self.db)?; + + builder.infer(return_ty, tcx).ok()?; + Some(builder.type_mappings().clone()) + }); + let parameters = self.signature.parameters(); for (argument_index, adjusted_argument_index, _, argument_type) in self.enumerate_argument_types() @@ -2695,9 +2786,21 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { continue; }; - if let Err(error) = builder.infer( + let filter = |declared_ty: BoundTypeVarIdentity<'_>, inferred_ty: Type<'_>| { + // Avoid widening the inferred type if it is already assignable to the + // preferred declared type. + preferred_type_mappings + .as_ref() + .and_then(|types| types.get(&declared_ty)) + .is_none_or(|preferred_ty| { + !inferred_ty.is_assignable_to(self.db, *preferred_ty) + }) + }; + + if let Err(error) = builder.infer_filter( expected_type, variadic_argument_type.unwrap_or(argument_type), + filter, ) { self.errors.push(BindingError::SpecializationError { error, @@ -2707,15 +2810,14 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { } } - // Build the specialization first without inferring the type context. - let isolated_specialization = builder.build(generic_context, *self.call_expression_tcx); + // Build the specialization first without inferring the complete type context. + let isolated_specialization = builder.build(generic_context, self.call_expression_tcx); let isolated_return_ty = self .return_ty .apply_specialization(self.db, isolated_specialization); let mut try_infer_tcx = || { - let return_ty = self.signature.return_ty?; - let call_expression_tcx = self.call_expression_tcx.annotation?; + let (return_ty, call_expression_tcx) = return_with_tcx?; // A type variable is not a useful type-context for expression inference, and applying it // to the return type can lead to confusing unions in nested generic calls. @@ -2723,8 +2825,8 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { return None; } - // If the return type is already assignable to the annotated type, we can ignore the - // type context and prefer the narrower inferred type. + // If the return type is already assignable to the annotated type, we ignore the rest of + // the type context and prefer the narrower inferred type. if isolated_return_ty.is_assignable_to(self.db, call_expression_tcx) { return None; } @@ -2733,8 +2835,8 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { // annotated assignment, to closer match the order of any unions written in the type annotation. builder.infer(return_ty, call_expression_tcx).ok()?; - // Otherwise, build the specialization again after inferring the type context. - let specialization = builder.build(generic_context, *self.call_expression_tcx); + // Otherwise, build the specialization again after inferring the complete type context. + let specialization = builder.build(generic_context, self.call_expression_tcx); let return_ty = return_ty.apply_specialization(self.db, specialization); Some((Some(specialization), return_ty)) @@ -2993,7 +3095,7 @@ impl<'db> MatchedArgument<'db> { pub(crate) struct UnknownParameterNameError; /// Binding information for one of the overloads of a callable. -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct Binding<'db> { pub(crate) signature: Signature<'db>, @@ -3092,7 +3194,7 @@ impl<'db> Binding<'db> { &mut self, db: &'db dyn Db, arguments: &CallArguments<'_, 'db>, - call_expression_tcx: &TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, ) { let mut checker = ArgumentTypeChecker::new( db, @@ -3100,6 +3202,7 @@ impl<'db> Binding<'db> { arguments, &self.argument_matches, &mut self.parameter_tys, + self.callable_type, call_expression_tcx, self.return_ty, &mut self.errors, @@ -3544,6 +3647,36 @@ impl<'db> BindingError<'db> { expected_ty, provided_ty, } => { + // Certain special forms in the typing module are aliases for classes + // elsewhere in the standard library. These special forms are not instances of `type`, + // and you cannot use them in place of their aliased classes in *all* situations: + // for example, `dict()` succeeds at runtime, but `typing.Dict()` fails. However, + // they *can* all be used as the second argument to `isinstance` and `issubclass`. + // We model that specific aspect of their behaviour here. + // + // This is implemented as a special case in call-binding machinery because overriding + // typeshed's signatures for `isinstance()` and `issubclass()` would be complex and + // error-prone, due to the fact that they are annotated with recursive type aliases. + if parameter.index == 1 + && *argument_index == Some(1) + && matches!( + callable_ty + .as_function_literal() + .and_then(|function| function.known(context.db())), + Some(KnownFunction::IsInstance | KnownFunction::IsSubclass) + ) + && provided_ty + .as_special_form() + .is_some_and(SpecialFormType::is_valid_isinstance_target) + { + return; + } + + // TODO: Ideally we would not emit diagnostics for `TypedDict` literal arguments + // here (see `diagnostic::is_invalid_typed_dict_literal`). However, we may have + // silenced diagnostics during overload evaluation, and rely on the assignability + // diagnostic being emitted here. + let range = Self::get_node(node, *argument_index); let Some(builder) = context.report_lint(&INVALID_ARGUMENT_TYPE, range) else { return; diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs index 75190a3c3a..d939dabb01 100644 --- a/crates/ty_python_semantic/src/types/class.rs +++ b/crates/ty_python_semantic/src/types/class.rs @@ -30,7 +30,7 @@ use crate::types::member::{Member, class_member}; use crate::types::signatures::{CallableSignature, Parameter, Parameters, Signature}; use crate::types::tuple::{TupleSpec, TupleType}; use crate::types::typed_dict::typed_dict_params_from_class_def; -use crate::types::visitor::{NonAtomicType, TypeKind, TypeVisitor, walk_non_atomic_type}; +use crate::types::visitor::{TypeCollector, TypeVisitor, walk_type_with_recursion_guard}; use crate::types::{ ApplyTypeMappingVisitor, Binding, BoundSuperType, CallableType, DataclassFlags, DataclassParams, DeprecatedInstance, FindLegacyTypeVarsVisitor, HasRelationToVisitor, @@ -258,7 +258,7 @@ impl<'db> GenericAlias<'db> { ) -> Self { let tcx = tcx .annotation - .and_then(|ty| ty.specialization_of(db, Some(self.origin(db)))) + .and_then(|ty| ty.specialization_of(db, self.origin(db))) .map(|specialization| specialization.types(db)) .unwrap_or(&[]); @@ -294,7 +294,7 @@ impl<'db> From> for Type<'db> { #[salsa::tracked] impl<'db> VarianceInferable<'db> for GenericAlias<'db> { - #[salsa::tracked] + #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)] fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance { let origin = self.origin(db); @@ -358,6 +358,14 @@ pub enum ClassType<'db> { #[salsa::tracked] impl<'db> ClassType<'db> { + /// Return a `ClassType` representing the class `builtins.object` + pub(super) fn object(db: &'db dyn Db) -> Self { + KnownClass::Object + .to_class_literal(db) + .to_class_type(db) + .unwrap() + } + pub(super) const fn is_generic(self) -> bool { matches!(self, Self::Generic(_)) } @@ -637,12 +645,17 @@ impl<'db> ClassType<'db> { return true; } - // Optimisation: if either class is `@final`, we only need to do one `is_subclass_of` call. if self.is_final(db) { - return self.is_subclass_of(db, other); + return self + .iter_mro(db) + .filter_map(ClassBase::into_class) + .any(|class| class.class_literal(db).0 == other.class_literal(db).0); } if other.is_final(db) { - return other.is_subclass_of(db, self); + return other + .iter_mro(db) + .filter_map(ClassBase::into_class) + .any(|class| class.class_literal(db).0 == self.class_literal(db).0); } // Two disjoint bases can only coexist in an MRO if one is a subclass of the other. @@ -1302,9 +1315,7 @@ impl<'db> Field<'db> { /// Returns true if this field is a `dataclasses.KW_ONLY` sentinel. /// pub(crate) fn is_kw_only_sentinel(&self, db: &'db dyn Db) -> bool { - self.declared_ty - .as_nominal_instance() - .is_some_and(|instance| instance.has_known_class(db, KnownClass::KwOnly)) + self.declared_ty.is_instance_of(db, KnownClass::KwOnly) } } @@ -1434,7 +1445,7 @@ impl<'db> ClassLiteral<'db> { #[derive(Default)] struct CollectTypeVars<'db> { typevars: RefCell>>, - seen_types: RefCell>>, + recursion_guard: TypeCollector<'db>, } impl<'db> TypeVisitor<'db> for CollectTypeVars<'db> { @@ -1451,16 +1462,7 @@ impl<'db> ClassLiteral<'db> { } fn visit_type(&self, db: &'db dyn Db, ty: Type<'db>) { - match TypeKind::from(ty) { - TypeKind::Atomic => {} - TypeKind::NonAtomic(non_atomic_type) => { - if !self.seen_types.borrow_mut().insert(non_atomic_type) { - // If we have already seen this type, we can skip it. - return; - } - walk_non_atomic_type(db, non_atomic_type, self); - } - } + walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard); } } @@ -2176,7 +2178,8 @@ impl<'db> ClassLiteral<'db> { }); if member.is_undefined() { - if let Some(synthesized_member) = self.own_synthesized_member(db, specialization, name) + if let Some(synthesized_member) = + self.own_synthesized_member(db, specialization, inherited_generic_context, name) { return Member::definitely_declared(synthesized_member); } @@ -2192,6 +2195,7 @@ impl<'db> ClassLiteral<'db> { self, db: &'db dyn Db, specialization: Option>, + inherited_generic_context: Option>, name: &str, ) -> Option> { let dataclass_params = self.dataclass_params(db); @@ -2320,7 +2324,7 @@ impl<'db> ClassLiteral<'db> { let signature = match name { "__new__" | "__init__" => Signature::new_generic( - self.inherited_generic_context(db), + inherited_generic_context.or_else(|| self.inherited_generic_context(db)), Parameters::new(parameters), return_ty, ), @@ -2702,7 +2706,7 @@ impl<'db> ClassLiteral<'db> { name: &str, policy: MemberLookupPolicy, ) -> PlaceAndQualifiers<'db> { - if let Some(member) = self.own_synthesized_member(db, specialization, name) { + if let Some(member) = self.own_synthesized_member(db, specialization, None, name) { Place::bound(member).into() } else { KnownClass::TypedDictFallback @@ -3115,30 +3119,47 @@ impl<'db> ClassLiteral<'db> { union_of_inferred_types = union_of_inferred_types.add(Type::unknown()); } - for (attribute_assignments, method_scope_id) in + for (attribute_assignments, attribute_binding_scope_id) in attribute_assignments(db, class_body_scope, &name) { - let method_scope = index.scope(method_scope_id); - if !is_valid_scope(method_scope) { + let binding_scope = index.scope(attribute_binding_scope_id); + if !is_valid_scope(binding_scope) { continue; } - // The attribute assignment inherits the reachability of the method which contains it - let is_method_reachable = if let Some(method_def) = method_scope.node().as_function() { - let method = index.expect_single_definition(method_def); - let method_place = class_table - .symbol_id(&method_def.node(&module).name) - .unwrap(); - class_map - .all_reachable_symbol_bindings(method_place) - .find_map(|bind| { - (bind.binding.is_defined_and(|def| def == method)) - .then(|| class_map.binding_reachability(db, &bind)) - }) - .unwrap_or(Truthiness::AlwaysFalse) - } else { - Truthiness::AlwaysFalse + let scope_for_reachability_analysis = { + if binding_scope.node().as_function().is_some() { + binding_scope + } else if binding_scope.is_eager() { + let mut eager_scope_parent = binding_scope; + while eager_scope_parent.is_eager() + && let Some(parent) = eager_scope_parent.parent() + { + eager_scope_parent = index.scope(parent); + } + eager_scope_parent + } else { + binding_scope + } }; + + // The attribute assignment inherits the reachability of the method which contains it + let is_method_reachable = + if let Some(method_def) = scope_for_reachability_analysis.node().as_function() { + let method = index.expect_single_definition(method_def); + let method_place = class_table + .symbol_id(&method_def.node(&module).name) + .unwrap(); + class_map + .all_reachable_symbol_bindings(method_place) + .find_map(|bind| { + (bind.binding.is_defined_and(|def| def == method)) + .then(|| class_map.binding_reachability(db, &bind)) + }) + .unwrap_or(Truthiness::AlwaysFalse) + } else { + Truthiness::AlwaysFalse + }; if is_method_reachable.is_always_false() { continue; } @@ -3542,7 +3563,7 @@ impl<'db> From> for ClassType<'db> { #[salsa::tracked] impl<'db> VarianceInferable<'db> for ClassLiteral<'db> { - #[salsa::tracked(cycle_initial=crate::types::variance_cycle_initial)] + #[salsa::tracked(cycle_initial=crate::types::variance_cycle_initial, heap_size=ruff_memory_usage::heap_size)] fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance { let typevar_in_generic_context = self .generic_context(db) diff --git a/crates/ty_python_semantic/src/types/class_base.rs b/crates/ty_python_semantic/src/types/class_base.rs index 4d43b58d06..9cc09acc0f 100644 --- a/crates/ty_python_semantic/src/types/class_base.rs +++ b/crates/ty_python_semantic/src/types/class_base.rs @@ -49,10 +49,7 @@ impl<'db> ClassBase<'db> { ClassBase::Dynamic(DynamicType::Any) => "Any", ClassBase::Dynamic(DynamicType::Unknown) => "Unknown", ClassBase::Dynamic( - DynamicType::Todo(_) - | DynamicType::TodoPEP695ParamSpec - | DynamicType::TodoTypeAlias - | DynamicType::TodoUnpack, + DynamicType::Todo(_) | DynamicType::TodoTypeAlias | DynamicType::TodoUnpack, ) => "@Todo", ClassBase::Dynamic(DynamicType::Divergent(_)) => "Divergent", ClassBase::Protocol => "Protocol", @@ -140,6 +137,12 @@ impl<'db> ClassBase<'db> { Type::TypeAlias(alias) => Self::try_from_type(db, alias.value_type(db), subclass), + Type::NewTypeInstance(newtype) => ClassBase::try_from_type( + db, + Type::instance(db, newtype.base_class_type(db)), + subclass, + ), + Type::PropertyInstance(_) | Type::BooleanLiteral(_) | Type::FunctionLiteral(_) @@ -170,7 +173,14 @@ impl<'db> ClassBase<'db> { | KnownInstanceType::TypeVar(_) | KnownInstanceType::Deprecated(_) | KnownInstanceType::Field(_) - | KnownInstanceType::ConstraintSet(_) => None, + | KnownInstanceType::ConstraintSet(_) + | KnownInstanceType::UnionType(_) + | KnownInstanceType::Literal(_) + // A class inheriting from a newtype would make intuitive sense, but newtype + // wrappers are just identity callables at runtime, so this sort of inheritance + // doesn't work and isn't allowed. + | KnownInstanceType::NewType(_) => None, + KnownInstanceType::Annotated(ty) => Self::try_from_type(db, ty.inner(db), subclass), }, Type::SpecialForm(special_form) => match special_form { diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index ef7632ff2e..d2520deb08 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -65,7 +65,10 @@ use salsa::plumbing::AsId; use crate::Db; use crate::types::generics::InferableTypeVars; -use crate::types::{BoundTypeVarInstance, IntersectionType, Type, TypeRelation, UnionType}; +use crate::types::{ + BoundTypeVarInstance, IntersectionType, Type, TypeRelation, TypeVarBoundOrConstraints, + UnionType, +}; /// An extension trait for building constraint sets from [`Option`] values. pub(crate) trait OptionConstraintsExtension { @@ -256,6 +259,28 @@ impl<'db> ConstraintSet<'db> { } } + /// Returns whether this constraint set is satisfied by all of the typevars that it mentions. + /// + /// Each typevar has a set of _valid specializations_, which is defined by any upper bound or + /// constraints that the typevar has. + /// + /// Each typevar is also either _inferable_ or _non-inferable_. (You provide a list of the + /// `inferable` typevars; all others are considered non-inferable.) For an inferable typevar, + /// then there must be _some_ valid specialization that satisfies the constraint set. For a + /// non-inferable typevar, then _all_ valid specializations must satisfy it. + /// + /// Note that we don't have to consider typevars that aren't mentioned in the constraint set, + /// since the constraint set cannot be affected by any typevars that it does not mention. That + /// means that those additional typevars trivially satisfy the constraint set, regardless of + /// whether they are inferable or not. + pub(crate) fn satisfied_by_all_typevars( + self, + db: &'db dyn Db, + inferable: InferableTypeVars<'_, 'db>, + ) -> bool { + self.node.satisfied_by_all_typevars(db, inferable) + } + /// Updates this constraint set to hold the union of itself and another constraint set. pub(crate) fn union(&mut self, db: &'db dyn Db, other: Self) -> Self { self.node = self.node.or(db, other.node); @@ -295,6 +320,11 @@ impl<'db> ConstraintSet<'db> { self } + /// Returns a constraint set encoding that this constraint set implies another. + pub(crate) fn implies(self, db: &'db dyn Db, other: impl FnOnce() -> Self) -> Self { + self.negate(db).or(db, other) + } + pub(crate) fn iff(self, db: &'db dyn Db, other: Self) -> Self { ConstraintSet { node: self.node.iff(db, other.node), @@ -356,12 +386,58 @@ impl<'db> ConstrainedTypeVar<'db> { fn new_node( db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>, - lower: Type<'db>, - upper: Type<'db>, + mut lower: Type<'db>, + mut upper: Type<'db>, ) -> Node<'db> { debug_assert_eq!(lower, lower.bottom_materialization(db)); debug_assert_eq!(upper, upper.top_materialization(db)); + // Two identical typevars must always solve to the same type, so it is not useful to have + // an upper or lower bound that is the typevar being constrained. + match lower { + Type::TypeVar(lower_bound_typevar) + if typevar.is_same_typevar_as(db, lower_bound_typevar) => + { + lower = Type::Never; + } + Type::Intersection(intersection) + if intersection.positive(db).iter().any(|element| { + element.as_typevar().is_some_and(|element_bound_typevar| { + typevar.is_same_typevar_as(db, element_bound_typevar) + }) + }) => + { + lower = Type::Never; + } + Type::Intersection(intersection) + if intersection.negative(db).iter().any(|element| { + element.as_typevar().is_some_and(|element_bound_typevar| { + typevar.is_same_typevar_as(db, element_bound_typevar) + }) + }) => + { + return Node::AlwaysFalse; + } + _ => {} + } + match upper { + Type::TypeVar(upper_bound_typevar) + if typevar.is_same_typevar_as(db, upper_bound_typevar) => + { + upper = Type::object(); + } + Type::Union(union) + if union.elements(db).iter().any(|element| { + element.as_typevar().is_some_and(|element_bound_typevar| { + typevar.is_same_typevar_as(db, element_bound_typevar) + }) + }) => + { + upper = Type::object(); + } + _ => {} + } + // If `lower ≰ upper`, then the constraint cannot be satisfied, since there is no type that // is both greater than `lower`, and less than `upper`. if !lower.is_subtype_of(db, upper) { @@ -746,6 +822,13 @@ impl<'db> Node<'db> { .or(db, self.negate(db).and(db, else_node)) } + fn satisfies(self, db: &'db dyn Db, other: Self) -> Self { + let simplified_self = self.simplify(db); + let implication = simplified_self.implies(db, other); + let (simplified, domain) = implication.simplify_and_domain(db); + simplified.and(db, domain) + } + fn when_subtype_of_given( self, db: &'db dyn Db, @@ -767,10 +850,79 @@ impl<'db> Node<'db> { _ => return lhs.when_subtype_of(db, rhs, inferable).node, }; - let simplified_self = self.simplify(db); - let implication = simplified_self.implies(db, constraint); - let (simplified, domain) = implication.simplify_and_domain(db); - simplified.and(db, domain) + self.satisfies(db, constraint) + } + + fn satisfied_by_all_typevars( + self, + db: &'db dyn Db, + inferable: InferableTypeVars<'_, 'db>, + ) -> bool { + match self { + Node::AlwaysTrue => return true, + Node::AlwaysFalse => return false, + Node::Interior(_) => {} + } + + let mut typevars = FxHashSet::default(); + self.for_each_constraint(db, &mut |constraint| { + typevars.insert(constraint.typevar(db)); + }); + + // Returns if some specialization satisfies this constraint set. + let some_specialization_satisfies = move |specializations: Node<'db>| { + let when_satisfied = specializations + .satisfies(db, self) + .and(db, specializations) + .simplify(db); + !when_satisfied.is_never_satisfied() + }; + + // Returns if all specializations satisfy this constraint set. + let all_specializations_satisfy = move |specializations: Node<'db>| { + let when_satisfied = specializations + .satisfies(db, self) + .and(db, specializations) + .simplify(db); + when_satisfied + .iff(db, specializations) + .is_always_satisfied(db) + }; + + for typevar in typevars { + if typevar.is_inferable(db, inferable) { + // If the typevar is in inferable position, we need to verify that some valid + // specialization satisfies the constraint set. + let valid_specializations = typevar.valid_specializations(db); + if !some_specialization_satisfies(valid_specializations) { + return false; + } + } else { + // If the typevar is in non-inferable position, we need to verify that all required + // specializations satisfy the constraint set. Complicating things, the typevar + // might have gradual constraints. For those, we need to know the range of valid + // materializations, but we only need some materialization to satisfy the + // constraint set. + // + // NB: We could also model this by introducing a synthetic typevar for the gradual + // constraint, treating that synthetic typevar as always inferable (so that we only + // need to verify for some materialization), and then update this typevar's + // constraint to refer to the synthetic typevar instead of the original gradual + // constraint. + let (static_specializations, gradual_constraints) = + typevar.required_specializations(db); + if !all_specializations_satisfy(static_specializations) { + return false; + } + for gradual_constraint in gradual_constraints { + if !some_specialization_satisfies(gradual_constraint) { + return false; + } + } + } + } + + true } /// Returns a new BDD that returns the same results as `self`, but with some inputs fixed to @@ -1861,6 +2013,93 @@ impl<'db> SatisfiedClauses<'db> { } } +impl<'db> BoundTypeVarInstance<'db> { + /// Returns the valid specializations of a typevar. This is used when checking a constraint set + /// when this typevar is in inferable position, where we only need _some_ specialization to + /// satisfy the constraint set. + fn valid_specializations(self, db: &'db dyn Db) -> Node<'db> { + // For gradual upper bounds and constraints, we are free to choose any materialization that + // makes the check succeed. In inferable positions, it is most helpful to choose a + // materialization that is as permissive as possible, since that maximizes the number of + // valid specializations that might satisfy the check. We therefore take the top + // materialization of the bound or constraints. + // + // Moreover, for a gradual constraint, we don't need to worry that typevar constraints are + // _equality_ comparisons, not _subtyping_ comparisons — since we are only going to check + // that _some_ valid specialization satisfies the constraint set, it's correct for us to + // return the range of valid materializations that we can choose from. + match self.typevar(db).bound_or_constraints(db) { + None => Node::AlwaysTrue, + Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { + let bound = bound.top_materialization(db); + ConstrainedTypeVar::new_node(db, self, Type::Never, bound) + } + Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { + let mut specializations = Node::AlwaysFalse; + for constraint in constraints.elements(db) { + let constraint_lower = constraint.bottom_materialization(db); + let constraint_upper = constraint.top_materialization(db); + specializations = specializations.or( + db, + ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper), + ); + } + specializations + } + } + } + + /// Returns the required specializations of a typevar. This is used when checking a constraint + /// set when this typevar is in non-inferable position, where we need _all_ specializations to + /// satisfy the constraint set. + /// + /// That causes complications if this is a constrained typevar, where one of the constraints is + /// gradual. In that case, we need to return the range of valid materializations, but we don't + /// want to require that all of those materializations satisfy the constraint set. + /// + /// To handle this, we return a "primary" result, and an iterator of any gradual constraints. + /// For an unbounded/unconstrained typevar or a bounded typevar, the primary result fully + /// specifies the required specializations, and the iterator will be empty. For a constrained + /// typevar, the primary result will include the fully static constraints, and the iterator + /// will include an entry for each non-fully-static constraint. + fn required_specializations( + self, + db: &'db dyn Db, + ) -> (Node<'db>, impl IntoIterator>) { + // For upper bounds and constraints, we are free to choose any materialization that makes + // the check succeed. In non-inferable positions, it is most helpful to choose a + // materialization that is as restrictive as possible, since that minimizes the number of + // valid specializations that must satisfy the check. We therefore take the bottom + // materialization of the bound or constraints. + match self.typevar(db).bound_or_constraints(db) { + None => (Node::AlwaysTrue, Vec::new()), + Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { + let bound = bound.bottom_materialization(db); + ( + ConstrainedTypeVar::new_node(db, self, Type::Never, bound), + Vec::new(), + ) + } + Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { + let mut non_gradual_constraints = Node::AlwaysFalse; + let mut gradual_constraints = Vec::new(); + for constraint in constraints.elements(db) { + let constraint_lower = constraint.bottom_materialization(db); + let constraint_upper = constraint.top_materialization(db); + let constraint = + ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper); + if constraint_lower == constraint_upper { + non_gradual_constraints = non_gradual_constraints.or(db, constraint); + } else { + gradual_constraints.push(constraint); + } + } + (non_gradual_constraints, gradual_constraints) + } + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/ty_python_semantic/src/types/definition.rs b/crates/ty_python_semantic/src/types/definition.rs index f98d47ba93..9095dcea44 100644 --- a/crates/ty_python_semantic/src/types/definition.rs +++ b/crates/ty_python_semantic/src/types/definition.rs @@ -12,6 +12,7 @@ pub enum TypeDefinition<'db> { Function(Definition<'db>), TypeVar(Definition<'db>), TypeAlias(Definition<'db>), + NewType(Definition<'db>), } impl TypeDefinition<'_> { @@ -21,7 +22,8 @@ impl TypeDefinition<'_> { Self::Class(definition) | Self::Function(definition) | Self::TypeVar(definition) - | Self::TypeAlias(definition) => { + | Self::TypeAlias(definition) + | Self::NewType(definition) => { let module = parsed_module(db, definition.file(db)).load(db); Some(definition.focus_range(db, &module)) } @@ -38,7 +40,8 @@ impl TypeDefinition<'_> { Self::Class(definition) | Self::Function(definition) | Self::TypeVar(definition) - | Self::TypeAlias(definition) => { + | Self::TypeAlias(definition) + | Self::NewType(definition) => { let module = parsed_module(db, definition.file(db)).load(db); Some(definition.full_range(db, &module)) } diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs index 7db83b9b88..25c1efa27d 100644 --- a/crates/ty_python_semantic/src/types/diagnostic.rs +++ b/crates/ty_python_semantic/src/types/diagnostic.rs @@ -12,6 +12,7 @@ use crate::semantic_index::definition::{Definition, DefinitionKind}; use crate::semantic_index::place::{PlaceTable, ScopedPlaceId}; use crate::semantic_index::{global_scope, place_table}; use crate::suppression::FileSuppressionId; +use crate::types::KnownInstanceType; use crate::types::call::CallError; use crate::types::class::{DisjointBase, DisjointBaseKind, Field}; use crate::types::function::KnownFunction; @@ -63,7 +64,9 @@ pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) { registry.register_lint(&INVALID_EXCEPTION_CAUGHT); registry.register_lint(&INVALID_GENERIC_CLASS); registry.register_lint(&INVALID_LEGACY_TYPE_VARIABLE); + registry.register_lint(&INVALID_PARAMSPEC); registry.register_lint(&INVALID_TYPE_ALIAS_TYPE); + registry.register_lint(&INVALID_NEWTYPE); registry.register_lint(&INVALID_METACLASS); registry.register_lint(&INVALID_OVERLOAD); registry.register_lint(&USELESS_OVERLOAD_BODY); @@ -572,10 +575,14 @@ declare_lint! { // Added in #19763. declare_lint! { /// ## What it does - /// Checks for subscript accesses with invalid keys. + /// Checks for subscript accesses with invalid keys and `TypedDict` construction with an + /// unknown key. /// /// ## Why is this bad? - /// Using an invalid key will raise a `KeyError` at runtime. + /// Subscripting with an invalid key will raise a `KeyError` at runtime. + /// + /// Creating a `TypedDict` with an unknown key is likely a mistake; if the `TypedDict` is + /// `closed=true` it also violates the expectations of the type. /// /// ## Examples /// ```python @@ -587,9 +594,13 @@ declare_lint! { /// /// alice = Person(name="Alice", age=30) /// alice["height"] # KeyError: 'height' + /// + /// bob: Person = { "name": "Bob", "age": 30 } # typo! + /// + /// carol = Person(name="Carol", age=25) # typo! /// ``` pub(crate) static INVALID_KEY = { - summary: "detects invalid subscript accesses", + summary: "detects invalid subscript accesses or TypedDict literal keys", status: LintStatus::stable("0.0.1-alpha.17"), default_level: Level::Error, } @@ -872,6 +883,30 @@ declare_lint! { } } +declare_lint! { + /// ## What it does + /// Checks for the creation of invalid `ParamSpec`s + /// + /// ## Why is this bad? + /// There are several requirements that you must follow when creating a `ParamSpec`. + /// + /// ## Examples + /// ```python + /// from typing import ParamSpec + /// + /// P1 = ParamSpec("P1") # okay + /// P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assigned to + /// ``` + /// + /// ## References + /// - [Typing spec: ParamSpec](https://typing.python.org/en/latest/spec/generics.html#paramspec) + pub(crate) static INVALID_PARAMSPEC = { + summary: "detects invalid ParamSpec usage", + status: LintStatus::stable("0.0.1-alpha.1"), + default_level: Level::Error, + } +} + declare_lint! { /// ## What it does /// Checks for the creation of invalid `TypeAliasType`s @@ -893,6 +928,30 @@ declare_lint! { } } +declare_lint! { + /// ## What it does + /// Checks for the creation of invalid `NewType`s + /// + /// ## Why is this bad? + /// There are several requirements that you must follow when creating a `NewType`. + /// + /// ## Examples + /// ```python + /// from typing import NewType + /// + /// def get_name() -> str: ... + /// + /// Foo = NewType("Foo", int) # okay + /// Bar = NewType(get_name(), int) # error: The first argument to `NewType` must be a string literal + /// Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType` + /// ``` + pub(crate) static INVALID_NEWTYPE = { + summary: "detects invalid NewType definitions", + status: LintStatus::preview("1.0.0"), + default_level: Level::Error, + } +} + declare_lint! { /// ## What it does /// Checks for arguments to `metaclass=` that are invalid. @@ -1992,7 +2051,21 @@ pub(super) fn report_slice_step_size_zero(context: &InferContext, node: AnyNodeR let Some(builder) = context.report_lint(&ZERO_STEPSIZE_IN_SLICE, node) else { return; }; - builder.into_diagnostic("Slice step size can not be zero"); + builder.into_diagnostic("Slice step size cannot be zero"); +} + +// We avoid emitting invalid assignment diagnostic for literal assignments to a `TypedDict`, as +// they can only occur if we already failed to validate the dict (and emitted some diagnostic). +pub(crate) fn is_invalid_typed_dict_literal( + db: &dyn Db, + target_ty: Type, + source: AnyNodeRef<'_>, +) -> bool { + target_ty + .filter_union(db, Type::is_typed_dict) + .as_typed_dict() + .is_some() + && matches!(source, AnyNodeRef::ExprDict(_)) } fn report_invalid_assignment_with_message( @@ -2032,15 +2105,27 @@ pub(super) fn report_invalid_assignment<'db>( target_ty: Type, mut source_ty: Type<'db>, ) { + let value_expr = match definition.kind(context.db()) { + DefinitionKind::Assignment(def) => Some(def.value(context.module())), + DefinitionKind::AnnotatedAssignment(def) => def.value(context.module()), + DefinitionKind::NamedExpression(def) => Some(&*def.node(context.module()).value), + _ => None, + }; + + if let Some(value_expr) = value_expr + && is_invalid_typed_dict_literal(context.db(), target_ty, value_expr.into()) + { + return; + } + let settings = DisplaySettings::from_possibly_ambiguous_type_pair(context.db(), target_ty, source_ty); - if let DefinitionKind::AnnotatedAssignment(annotated_assignment) = definition.kind(context.db()) - && let Some(value) = annotated_assignment.value(context.module()) - { + if let Some(value_expr) = value_expr { // Re-infer the RHS of the annotated assignment, ignoring the type context for more precise // error messages. - source_ty = infer_isolated_expression(context.db(), definition.scope(context.db()), value); + source_ty = + infer_isolated_expression(context.db(), definition.scope(context.db()), value_expr); } report_invalid_assignment_with_message( @@ -2062,6 +2147,11 @@ pub(super) fn report_invalid_attribute_assignment( source_ty: Type, attribute_name: &'_ str, ) { + // TODO: Ideally we would not emit diagnostics for `TypedDict` literal arguments + // here (see `diagnostic::is_invalid_typed_dict_literal`). However, we may have + // silenced diagnostics during attribute resolution, and rely on the assignability + // diagnostic being emitted here. + report_invalid_assignment_with_message( context, node, @@ -2834,6 +2924,24 @@ pub(crate) fn report_invalid_or_unsupported_base( return; } + if let Type::KnownInstance(KnownInstanceType::NewType(newtype)) = base_type { + let Some(builder) = context.report_lint(&INVALID_BASE, base_node) else { + return; + }; + let mut diagnostic = builder.into_diagnostic("Cannot subclass an instance of NewType"); + diagnostic.info(format_args!( + "Perhaps you were looking for: `{} = NewType('{}', {})`", + class.name(context.db()), + class.name(context.db()), + newtype.name(context.db()), + )); + diagnostic.info(format_args!( + "Definition of class `{}` will raise `TypeError` at runtime", + class.name(context.db()) + )); + return; + } + let tuple_of_types = Type::homogeneous_tuple(db, instance_of_type); let explain_mro_entries = |diagnostic: &mut LintDiagnosticGuard| { @@ -2955,6 +3063,7 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( typed_dict_node: AnyNodeRef, key_node: AnyNodeRef, typed_dict_ty: Type<'db>, + full_object_ty: Option>, key_ty: Type<'db>, items: &FxOrderMap>, ) { @@ -2966,14 +3075,24 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( let typed_dict_name = typed_dict_ty.display(db); let mut diagnostic = builder.into_diagnostic(format_args!( - "Invalid key access on TypedDict `{typed_dict_name}`", + "Invalid key for TypedDict `{typed_dict_name}`", )); - diagnostic.annotate( + diagnostic.annotate(if let Some(full_object_ty) = full_object_ty { + context.secondary(typed_dict_node).message(format_args!( + "TypedDict `{typed_dict_name}` in {kind} type `{full_object_ty}`", + kind = if full_object_ty.is_union() { + "union" + } else { + "intersection" + }, + full_object_ty = full_object_ty.display(db) + )) + } else { context .secondary(typed_dict_node) - .message(format_args!("TypedDict `{typed_dict_name}`")), - ); + .message(format_args!("TypedDict `{typed_dict_name}`")) + }); let existing_keys = items.iter().map(|(name, _)| name.as_str()); @@ -2985,15 +3104,22 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( String::new() } )); - - diagnostic } - _ => builder.into_diagnostic(format_args!( - "TypedDict `{}` cannot be indexed with a key of type `{}`", - typed_dict_ty.display(db), - key_ty.display(db), - )), - }; + _ => { + let mut diagnostic = builder.into_diagnostic(format_args!( + "Invalid key of type `{}` for TypedDict `{}`", + key_ty.display(db), + typed_dict_ty.display(db), + )); + + if let Some(full_object_ty) = full_object_ty { + diagnostic.info(format_args!( + "The full type of the subscripted object is `{}`", + full_object_ty.display(db) + )); + } + } + } } } diff --git a/crates/ty_python_semantic/src/types/display.rs b/crates/ty_python_semantic/src/types/display.rs index 7748dd3ab5..b8a8a05ac4 100644 --- a/crates/ty_python_semantic/src/types/display.rs +++ b/crates/ty_python_semantic/src/types/display.rs @@ -535,6 +535,12 @@ impl Display for DisplayRepresentation<'_> { Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)) => { f.write_str("bound method `ConstraintSet.implies_subtype_of`") } + Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetSatisfies(_)) => { + f.write_str("bound method `ConstraintSet.satisfies`") + } + Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars( + _, + )) => f.write_str("bound method `ConstraintSet.satisfied_by_all_typevars`"), Type::WrapperDescriptor(kind) => { let (method, object) = match kind { WrapperDescriptorKind::FunctionTypeDunderGet => ("__get__", "function"), @@ -612,6 +618,7 @@ impl Display for DisplayRepresentation<'_> { .fmt(f), } } + Type::NewTypeInstance(newtype) => f.write_str(newtype.name(self.db)), } } } diff --git a/crates/ty_python_semantic/src/types/enums.rs b/crates/ty_python_semantic/src/types/enums.rs index 671b919929..dbde339221 100644 --- a/crates/ty_python_semantic/src/types/enums.rs +++ b/crates/ty_python_semantic/src/types/enums.rs @@ -6,7 +6,7 @@ use crate::{ place::{Place, PlaceAndQualifiers, place_from_bindings, place_from_declarations}, semantic_index::{place_table, use_def_map}, types::{ - ClassLiteral, DynamicType, EnumLiteralType, KnownClass, MemberLookupPolicy, + ClassBase, ClassLiteral, DynamicType, EnumLiteralType, KnownClass, MemberLookupPolicy, StringLiteralType, Type, TypeQualifiers, }, }; @@ -68,9 +68,6 @@ pub(crate) fn enum_metadata<'db>( return None; } - let is_str_enum = - Type::ClassLiteral(class).is_subtype_of(db, KnownClass::StrEnum.to_subclass_of(db)); - let scope_id = class.body_scope(db); let use_def_map = use_def_map(db, scope_id); let table = place_table(db, scope_id); @@ -141,14 +138,48 @@ pub(crate) fn enum_metadata<'db>( // enum.auto Some(KnownClass::Auto) => { auto_counter += 1; - Some(if is_str_enum { + + // `StrEnum`s have different `auto()` behaviour to enums inheriting from `(str, Enum)` + let auto_value_ty = if Type::ClassLiteral(class) + .is_subtype_of(db, KnownClass::StrEnum.to_subclass_of(db)) + { Type::StringLiteral(StringLiteralType::new( db, name.to_lowercase().as_str(), )) } else { - Type::IntLiteral(auto_counter) - }) + let custom_mixins: smallvec::SmallVec<[Option; 1]> = + class + .iter_mro(db, None) + .skip(1) + .filter_map(ClassBase::into_class) + .filter(|class| { + !Type::from(*class).is_subtype_of( + db, + KnownClass::Enum.to_subclass_of(db), + ) + }) + .map(|class| class.known(db)) + .filter(|class| { + !matches!(class, Some(KnownClass::Object)) + }) + .collect(); + + // `IntEnum`s have the same `auto()` behaviour to enums inheriting from `(int, Enum)`, + // and `IntEnum`s also have `int` in their MROs, so both cases are handled here. + // + // In general, the `auto()` behaviour for enums with non-`int` mixins is hard to predict, + // so we fall back to `Any` in those cases. + if matches!( + custom_mixins.as_slice(), + [] | [Some(KnownClass::Int)] + ) { + Type::IntLiteral(auto_counter) + } else { + Type::any() + } + }; + Some(auto_value_ty) } _ => None, diff --git a/crates/ty_python_semantic/src/types/function.rs b/crates/ty_python_semantic/src/types/function.rs index 0f5797ae7a..737a5218e4 100644 --- a/crates/ty_python_semantic/src/types/function.rs +++ b/crates/ty_python_semantic/src/types/function.rs @@ -81,9 +81,9 @@ use crate::types::visitor::any_over_type; use crate::types::{ ApplyTypeMappingVisitor, BoundMethodType, BoundTypeVarInstance, CallableType, ClassBase, ClassLiteral, ClassType, DeprecatedInstance, DynamicType, FindLegacyTypeVarsVisitor, - HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, KnownClass, NormalizedVisitor, - SpecialFormType, Truthiness, Type, TypeContext, TypeMapping, TypeRelation, UnionBuilder, - binding_type, todo_type, walk_signature, + HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, KnownClass, KnownInstanceType, + NormalizedVisitor, SpecialFormType, Truthiness, Type, TypeContext, TypeMapping, TypeRelation, + UnionBuilder, binding_type, todo_type, walk_signature, }; use crate::{Db, FxOrderSet, ModuleName, resolve_module}; @@ -534,6 +534,14 @@ pub struct FunctionLiteral<'db> { // The Salsa heap is tracked separately. impl get_size2::GetSize for FunctionLiteral<'_> {} +fn overloads_and_implementation_cycle_initial<'db>( + _db: &'db dyn Db, + _id: salsa::Id, + _self: FunctionLiteral<'db>, +) -> (Box<[OverloadLiteral<'db>]>, Option>) { + (Box::new([]), None) +} + #[salsa::tracked] impl<'db> FunctionLiteral<'db> { fn name(self, db: &'db dyn Db) -> &'db ast::name::Name { @@ -576,7 +584,7 @@ impl<'db> FunctionLiteral<'db> { self.last_definition(db).spans(db) } - #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)] + #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size, cycle_initial=overloads_and_implementation_cycle_initial)] fn overloads_and_implementation( self, db: &'db dyn Db, @@ -1093,6 +1101,11 @@ fn is_instance_truthiness<'db>( Type::NominalInstance(..) => always_true_if(is_instance(&ty)), + Type::NewTypeInstance(newtype) => always_true_if(is_instance(&Type::instance( + db, + newtype.base_class_type(db), + ))), + Type::BooleanLiteral(..) | Type::BytesLiteral(..) | Type::IntLiteral(..) @@ -1747,6 +1760,82 @@ impl KnownFunction { diagnostic .set_primary_message("This call will raise `TypeError` at runtime"); } + + Type::KnownInstance(KnownInstanceType::UnionType(_)) => { + fn find_invalid_elements<'db>( + db: &'db dyn Db, + function: KnownFunction, + ty: Type<'db>, + invalid_elements: &mut Vec>, + ) { + match ty { + Type::ClassLiteral(_) => {} + Type::NominalInstance(instance) + if instance.has_known_class(db, KnownClass::NoneType) => {} + Type::SpecialForm(special_form) + if special_form.is_valid_isinstance_target() => {} + // `Any` can be used in `issubclass()` calls but not `isinstance()` calls + Type::SpecialForm(SpecialFormType::Any) + if function == KnownFunction::IsSubclass => {} + Type::KnownInstance(KnownInstanceType::UnionType(union)) => { + for element in union.elements(db) { + find_invalid_elements( + db, + function, + *element, + invalid_elements, + ); + } + } + _ => invalid_elements.push(ty), + } + } + + let mut invalid_elements = vec![]; + find_invalid_elements(db, self, *second_argument, &mut invalid_elements); + + let Some((first_invalid_element, other_invalid_elements)) = + invalid_elements.split_first() + else { + return; + }; + + let Some(builder) = + context.report_lint(&INVALID_ARGUMENT_TYPE, call_expression) + else { + return; + }; + + let function_name: &str = self.into(); + + let mut diagnostic = builder.into_diagnostic(format_args!( + "Invalid second argument to `{function_name}`" + )); + diagnostic.info(format_args!( + "A `UnionType` instance can only be used as the second argument to \ + `{function_name}` if all elements are class objects" + )); + diagnostic.annotate( + Annotation::secondary(context.span(&call_expression.arguments.args[1])) + .message("This `UnionType` instance contains non-class elements"), + ); + match other_invalid_elements { + [] => diagnostic.info(format_args!( + "Element `{}` in the union is not a class object", + first_invalid_element.display(db) + )), + [single] => diagnostic.info(format_args!( + "Elements `{}` and `{}` in the union are not class objects", + first_invalid_element.display(db), + single.display(db), + )), + _ => diagnostic.info(format_args!( + "Element `{}` in the union, and {} more elements, are not class objects", + first_invalid_element.display(db), + other_invalid_elements.len(), + )) + } + } _ => {} } } diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs index 59216ca607..8be33c95fc 100644 --- a/crates/ty_python_semantic/src/types/generics.rs +++ b/crates/ty_python_semantic/src/types/generics.rs @@ -1,4 +1,5 @@ use std::cell::RefCell; +use std::collections::hash_map::Entry; use std::fmt::Display; use itertools::Itertools; @@ -14,7 +15,7 @@ use crate::types::constraints::ConstraintSet; use crate::types::instance::{Protocol, ProtocolInstanceType}; use crate::types::signatures::{Parameter, Parameters, Signature}; use crate::types::tuple::{TupleSpec, TupleType, walk_tuple_type}; -use crate::types::visitor::{NonAtomicType, TypeKind, TypeVisitor, walk_non_atomic_type}; +use crate::types::visitor::{TypeCollector, TypeVisitor, walk_type_with_recursion_guard}; use crate::types::{ ApplyTypeMappingVisitor, BoundTypeVarIdentity, BoundTypeVarInstance, ClassLiteral, FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, @@ -22,7 +23,7 @@ use crate::types::{ TypeMapping, TypeRelation, TypeVarBoundOrConstraints, TypeVarIdentity, TypeVarInstance, TypeVarKind, TypeVarVariance, UnionType, declaration_type, walk_bound_type_var_type, }; -use crate::{Db, FxIndexSet, FxOrderMap, FxOrderSet}; +use crate::{Db, FxOrderMap, FxOrderSet}; /// Returns an iterator of any generic context introduced by the given scope or any enclosing /// scope. @@ -288,7 +289,7 @@ impl<'db> GenericContext<'db> { #[derive(Default)] struct CollectTypeVars<'db> { typevars: RefCell>>, - seen_types: RefCell>>, + recursion_guard: TypeCollector<'db>, } impl<'db> TypeVisitor<'db> for CollectTypeVars<'db> { @@ -308,16 +309,7 @@ impl<'db> GenericContext<'db> { } fn visit_type(&self, db: &'db dyn Db, ty: Type<'db>) { - match TypeKind::from(ty) { - TypeKind::Atomic => {} - TypeKind::NonAtomic(non_atomic_type) => { - if !self.seen_types.borrow_mut().insert(non_atomic_type) { - // If we have already seen this type, we can skip it. - return; - } - walk_non_atomic_type(db, non_atomic_type, self); - } - } + walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard); } } @@ -714,7 +706,7 @@ fn is_subtype_in_invariant_position<'db>( // TODO: // This should be removed and properly handled in the respective // `(Type::TypeVar(_), _) | (_, Type::TypeVar(_))` branch of - // `Type::has_relation_to_impl`. Right now, we can not generally + // `Type::has_relation_to_impl`. Right now, we cannot generally // return `ConstraintSet::from(true)` from that branch, as that // leads to union simplification, which means that we lose track // of type variables without recording the constraints under which @@ -969,10 +961,15 @@ impl<'db> Specialization<'db> { let types: Box<[_]> = self .types(db) .iter() + .zip(self.generic_context(db).variables(db)) .enumerate() - .map(|(i, ty)| { + .map(|(i, (ty, typevar))| { let tcx = TypeContext::new(tcx.get(i).copied()); - ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor) + if typevar.variance(db).is_covariant() { + ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor) + } else { + ty.apply_type_mapping_impl(db, &type_mapping.flip(), tcx, visitor) + } }) .collect(); @@ -1319,6 +1316,11 @@ impl<'db> SpecializationBuilder<'db> { } } + /// Returns the current set of type mappings for this specialization. + pub(crate) fn type_mappings(&self) -> &FxHashMap, Type<'db>> { + &self.types + } + pub(crate) fn build( &mut self, generic_context: GenericContext<'db>, @@ -1326,7 +1328,7 @@ impl<'db> SpecializationBuilder<'db> { ) -> Specialization<'db> { let tcx_specialization = tcx .annotation - .and_then(|annotation| annotation.specialization_of(self.db, None)); + .and_then(|annotation| annotation.class_specialization(self.db)); let types = (generic_context.variables_inner(self.db).iter()).map(|(identity, variable)| { @@ -1349,54 +1351,55 @@ impl<'db> SpecializationBuilder<'db> { generic_context.specialize_partial(self.db, types) } - fn add_type_mapping(&mut self, bound_typevar: BoundTypeVarInstance<'db>, ty: Type<'db>) { - self.types - .entry(bound_typevar.identity(self.db)) - .and_modify(|existing| { - *existing = UnionType::from_elements(self.db, [*existing, ty]); - }) - .or_insert(ty); + fn add_type_mapping( + &mut self, + bound_typevar: BoundTypeVarInstance<'db>, + ty: Type<'db>, + filter: impl Fn(BoundTypeVarIdentity<'db>, Type<'db>) -> bool, + ) { + let identity = bound_typevar.identity(self.db); + match self.types.entry(identity) { + Entry::Occupied(mut entry) => { + if filter(identity, ty) { + *entry.get_mut() = UnionType::from_elements(self.db, [*entry.get(), ty]); + } + } + Entry::Vacant(entry) => { + entry.insert(ty); + } + } } + /// Infer type mappings for the specialization based on a given type and its declared type. pub(crate) fn infer( &mut self, formal: Type<'db>, actual: Type<'db>, + ) -> Result<(), SpecializationError<'db>> { + self.infer_filter(formal, actual, |_, _| true) + } + + /// Infer type mappings for the specialization based on a given type and its declared type. + /// + /// The filter predicate is provided with a type variable and the type being mapped to it. Type + /// mappings to which the predicate returns `false` will be ignored. + pub(crate) fn infer_filter( + &mut self, + formal: Type<'db>, + actual: Type<'db>, + filter: impl Fn(BoundTypeVarIdentity<'db>, Type<'db>) -> bool, ) -> Result<(), SpecializationError<'db>> { if formal == actual { return Ok(()); } - // If the actual type is a subtype of the formal type, then return without adding any new - // type mappings. (Note that if the formal type contains any typevars, this check will - // fail, since no non-typevar types are assignable to a typevar. Also note that we are - // checking _subtyping_, not _assignability_, so that we do specialize typevars to dynamic - // argument types; and we have a special case for `Never`, which is a subtype of all types, - // but which we also do want as a specialization candidate.) + // Remove the union elements from `actual` that are not related to `formal`, and vice + // versa. // - // In particular, this handles a case like - // - // ```py - // def f[T](t: T | None): ... - // - // f(None) - // ``` - // - // without specializing `T` to `None`. - if !matches!(formal, Type::ProtocolInstance(_)) - && !actual.is_never() - && actual - .when_subtype_of(self.db, formal, self.inferable) - .is_always_satisfied(self.db) - { - return Ok(()); - } - - // Remove the union elements that are not related to `formal`. - // - // For example, if `formal` is `list[T]` and `actual` is `list[int] | None`, we want to specialize `T` - // to `int`. + // For example, if `formal` is `list[T]` and `actual` is `list[int] | None`, we want to + // specialize `T` to `int`, and so ignore the `None`. let actual = actual.filter_disjoint_elements(self.db, formal, self.inferable); + let formal = formal.filter_disjoint_elements(self.db, actual, self.inferable); match (formal, actual) { // TODO: We haven't implemented a full unification solver yet. If typevars appear in @@ -1442,17 +1445,37 @@ impl<'db> SpecializationBuilder<'db> { if remaining_actual.is_never() { return Ok(()); } - self.add_type_mapping(*formal_bound_typevar, remaining_actual); + self.add_type_mapping(*formal_bound_typevar, remaining_actual, filter); } (Type::Union(formal), _) => { - // Second, if the formal is a union, and precisely one union element _is_ a typevar (not - // _contains_ a typevar), then we add a mapping between that typevar and the actual - // type. (Note that we've already handled above the case where the actual is - // assignable to any _non-typevar_ union element.) + // Second, if the formal is a union, and precisely one union element is assignable + // from the actual type, then we don't add any type mapping. This handles a case like + // + // ```py + // def f[T](t: T | None): ... + // + // f(None) + // ``` + // + // without specializing `T` to `None`. + // + // Otherwise, if precisely one union element _is_ a typevar (not _contains_ a + // typevar), then we add a mapping between that typevar and the actual type. + if !actual.is_never() { + let assignable_elements = (formal.elements(self.db).iter()).filter(|ty| { + actual + .when_subtype_of(self.db, **ty, self.inferable) + .is_always_satisfied(self.db) + }); + if assignable_elements.exactly_one().is_ok() { + return Ok(()); + } + } + let bound_typevars = (formal.elements(self.db).iter()).filter_map(|ty| ty.as_typevar()); if let Ok(bound_typevar) = bound_typevars.exactly_one() { - self.add_type_mapping(bound_typevar, actual); + self.add_type_mapping(bound_typevar, actual, filter); } } @@ -1480,15 +1503,23 @@ impl<'db> SpecializationBuilder<'db> { argument: ty, }); } - self.add_type_mapping(bound_typevar, ty); + self.add_type_mapping(bound_typevar, ty, filter); } Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { + // Prefer an exact match first. + for constraint in constraints.elements(self.db) { + if ty == *constraint { + self.add_type_mapping(bound_typevar, ty, filter); + return Ok(()); + } + } + for constraint in constraints.elements(self.db) { if ty .when_assignable_to(self.db, *constraint, self.inferable) .is_always_satisfied(self.db) { - self.add_type_mapping(bound_typevar, *constraint); + self.add_type_mapping(bound_typevar, *constraint, filter); return Ok(()); } } @@ -1498,7 +1529,7 @@ impl<'db> SpecializationBuilder<'db> { }); } _ => { - self.add_type_mapping(bound_typevar, ty); + self.add_type_mapping(bound_typevar, ty, filter); } } } diff --git a/crates/ty_python_semantic/src/types/ide_support.rs b/crates/ty_python_semantic/src/types/ide_support.rs index 02a9330299..475c3017c7 100644 --- a/crates/ty_python_semantic/src/types/ide_support.rs +++ b/crates/ty_python_semantic/src/types/ide_support.rs @@ -10,9 +10,9 @@ use crate::semantic_index::scope::ScopeId; use crate::semantic_index::{ attribute_scopes, global_scope, place_table, semantic_index, use_def_map, }; -use crate::types::CallDunderError; use crate::types::call::{CallArguments, MatchedArgument}; use crate::types::signatures::Signature; +use crate::types::{CallDunderError, UnionType}; use crate::types::{ ClassBase, ClassLiteral, DynamicType, KnownClass, KnownInstanceType, Type, TypeContext, TypeVarBoundOrConstraints, class::CodeGeneratorKind, @@ -128,6 +128,10 @@ impl<'db> AllMembers<'db> { } } + Type::NewTypeInstance(newtype) => { + self.extend_with_type(db, Type::instance(db, newtype.base_class_type(db))); + } + Type::ClassLiteral(class_literal) if class_literal.is_typed_dict(db) => { self.extend_with_type(db, KnownClass::TypedDictFallback.to_class_literal(db)); } @@ -290,7 +294,9 @@ impl<'db> AllMembers<'db> { } Type::ClassLiteral(class) if class.is_protocol(db) => continue, Type::KnownInstance( - KnownInstanceType::TypeVar(_) | KnownInstanceType::TypeAliasType(_), + KnownInstanceType::TypeVar(_) + | KnownInstanceType::TypeAliasType(_) + | KnownInstanceType::UnionType(_), ) => continue, Type::Dynamic(DynamicType::TodoTypeAlias) => continue, _ => {} @@ -471,32 +477,17 @@ pub fn all_members<'db>(db: &'db dyn Db, ty: Type<'db>) -> FxHashSet /// Get the primary definition kind for a name expression within a specific file. /// Returns the first definition kind that is reachable for this name in its scope. /// This is useful for IDE features like semantic tokens. -pub fn definition_kind_for_name<'db>( +pub fn definition_for_name<'db>( db: &'db dyn Db, file: File, name: &ast::ExprName, -) -> Option> { - let index = semantic_index(db, file); - let name_str = name.id.as_str(); - - // Get the scope for this name expression - let file_scope = index.expression_scope_id(&ast::ExprRef::from(name)); - - // Get the place table for this scope - let place_table = index.place_table(file_scope); - - // Look up the place by name - let symbol_id = place_table.symbol_id(name_str)?; - - // Get the use-def map and look up definitions for this place - let declarations = index - .use_def_map(file_scope) - .all_reachable_symbol_declarations(symbol_id); +) -> Option> { + let definitions = definitions_for_name(db, file, name); // Find the first valid definition and return its kind - for declaration in declarations { - if let Some(def) = declaration.declaration.definition() { - return Some(def.kind(db).clone()); + for declaration in definitions { + if let Some(def) = declaration.definition() { + return Some(def); } } @@ -611,8 +602,34 @@ pub fn definitions_for_name<'db>( // If we didn't find any definitions in scopes, fallback to builtins if resolved_definitions.is_empty() { let Some(builtins_scope) = builtins_module_scope(db) else { - return Vec::new(); + return resolved_definitions; }; + + // Special cases for `float` and `complex` in type annotation positions. + // We don't know whether we're in a type annotation position, so we'll just ask `Name`'s type, + // which resolves to `int | float` or `int | float | complex` if `float` or `complex` is used in + // a type annotation position and `float` or `complex` otherwise. + // + // https://typing.python.org/en/latest/spec/special-types.html#special-cases-for-float-and-complex + if matches!(name_str, "float" | "complex") + && let Some(union) = name.inferred_type(&SemanticModel::new(db, file)).as_union() + && is_float_or_complex_annotation(db, union, name_str) + { + return union + .elements(db) + .iter() + // Use `rev` so that `complex` and `float` come first. + // This is required for hover to pick up the docstring of `complex` and `float` + // instead of `int` (hover only shows the docstring of the first definition). + .rev() + .filter_map(|ty| ty.as_nominal_instance()) + .map(|instance| { + let definition = instance.class_literal(db).definition(db); + ResolvedDefinition::Definition(definition) + }) + .collect(); + } + find_symbol_in_scope(db, builtins_scope, name_str) .into_iter() .filter(|def| def.is_reexported(db)) @@ -630,6 +647,30 @@ pub fn definitions_for_name<'db>( } } +fn is_float_or_complex_annotation(db: &dyn Db, ty: UnionType, name: &str) -> bool { + let float_or_complex_ty = match name { + "float" => UnionType::from_elements( + db, + [ + KnownClass::Int.to_instance(db), + KnownClass::Float.to_instance(db), + ], + ), + "complex" => UnionType::from_elements( + db, + [ + KnownClass::Int.to_instance(db), + KnownClass::Float.to_instance(db), + KnownClass::Complex.to_instance(db), + ], + ), + _ => return false, + } + .expect_union(); + + ty == float_or_complex_ty +} + /// Returns all resolved definitions for an attribute expression `x.y`. /// This function duplicates much of the functionality in the semantic /// analyzer, but it has somewhat different behavior so we've decided @@ -1190,6 +1231,14 @@ mod resolve_definition { } impl<'db> ResolvedDefinition<'db> { + pub(crate) fn definition(&self) -> Option> { + match self { + ResolvedDefinition::Definition(definition) => Some(*definition), + ResolvedDefinition::Module(_) => None, + ResolvedDefinition::FileWithRange(_) => None, + } + } + fn file(&self, db: &'db dyn Db) -> File { match self { ResolvedDefinition::Definition(definition) => definition.file(db), @@ -1279,7 +1328,7 @@ mod resolve_definition { let file = definition.file(db); let module = parsed_module(db, file).load(db); let import_node = import_from_def.import(&module); - let alias = import_from_def.alias(&module); + let name = &import_from_def.alias(&module).name; // For `ImportFrom`, we need to resolve the original imported symbol name // (alias.name), not the local alias (symbol_name) @@ -1287,7 +1336,7 @@ mod resolve_definition { db, file, import_node, - &alias.name, + name, visited, alias_resolution, ) @@ -1619,6 +1668,7 @@ mod resolve_definition { DefinitionKind::TypeAlias(_) | DefinitionKind::Import(_) | DefinitionKind::ImportFrom(_) + | DefinitionKind::ImportFromSubmodule(_) | DefinitionKind::StarImport(_) | DefinitionKind::NamedExpression(_) | DefinitionKind::Assignment(_) diff --git a/crates/ty_python_semantic/src/types/infer.rs b/crates/ty_python_semantic/src/types/infer.rs index 78e91f5883..f8ccfc05ae 100644 --- a/crates/ty_python_semantic/src/types/infer.rs +++ b/crates/ty_python_semantic/src/types/infer.rs @@ -114,17 +114,15 @@ pub(crate) fn infer_definition_types<'db>( fn definition_cycle_recover<'db>( db: &'db dyn Db, _id: salsa::Id, - _last_provisional_value: &DefinitionInference<'db>, - _value: &DefinitionInference<'db>, + last_provisional_value: &DefinitionInference<'db>, + value: DefinitionInference<'db>, count: u32, definition: Definition<'db>, -) -> salsa::CycleRecoveryAction> { - if count == ITERATIONS_BEFORE_FALLBACK { - salsa::CycleRecoveryAction::Fallback(DefinitionInference::cycle_fallback( - definition.scope(db), - )) +) -> DefinitionInference<'db> { + if &value == last_provisional_value || count != ITERATIONS_BEFORE_FALLBACK { + value } else { - salsa::CycleRecoveryAction::Iterate + DefinitionInference::cycle_fallback(definition.scope(db)) } } @@ -230,17 +228,15 @@ pub(crate) fn infer_isolated_expression<'db>( fn expression_cycle_recover<'db>( db: &'db dyn Db, _id: salsa::Id, - _last_provisional_value: &ExpressionInference<'db>, - _value: &ExpressionInference<'db>, + last_provisional_value: &ExpressionInference<'db>, + value: ExpressionInference<'db>, count: u32, input: InferExpression<'db>, -) -> salsa::CycleRecoveryAction> { - if count == ITERATIONS_BEFORE_FALLBACK { - salsa::CycleRecoveryAction::Fallback(ExpressionInference::cycle_fallback( - input.expression(db).scope(db), - )) +) -> ExpressionInference<'db> { + if &value == last_provisional_value || count != ITERATIONS_BEFORE_FALLBACK { + value } else { - salsa::CycleRecoveryAction::Iterate + ExpressionInference::cycle_fallback(input.expression(db).scope(db)) } } diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index edf8581bcd..2c445f92ea 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -1,11 +1,13 @@ -use std::{iter, mem}; +use std::iter; use itertools::{Either, Itertools}; use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity}; use ruff_db::files::File; use ruff_db::parsed::ParsedModuleRef; use ruff_python_ast::visitor::{Visitor, walk_expr}; -use ruff_python_ast::{self as ast, AnyNodeRef, ExprContext, PythonVersion}; +use ruff_python_ast::{ + self as ast, AnyNodeRef, ExprContext, HasNodeIndex, NodeIndex, PythonVersion, +}; use ruff_python_stdlib::builtins::version_builtin_was_added; use ruff_text_size::{Ranged, TextRange}; use rustc_hash::{FxHashMap, FxHashSet}; @@ -57,21 +59,22 @@ use crate::types::diagnostic::{ DIVISION_BY_ZERO, DUPLICATE_KW_ONLY, INCONSISTENT_MRO, INVALID_ARGUMENT_TYPE, INVALID_ASSIGNMENT, INVALID_ATTRIBUTE_ACCESS, INVALID_BASE, INVALID_DECLARATION, INVALID_GENERIC_CLASS, INVALID_KEY, INVALID_LEGACY_TYPE_VARIABLE, INVALID_METACLASS, - INVALID_NAMED_TUPLE, INVALID_OVERLOAD, INVALID_PARAMETER_DEFAULT, INVALID_PROTOCOL, - INVALID_TYPE_FORM, INVALID_TYPE_GUARD_CALL, INVALID_TYPE_VARIABLE_CONSTRAINTS, - IncompatibleBases, NON_SUBSCRIPTABLE, POSSIBLY_MISSING_IMPLICIT_CALL, POSSIBLY_MISSING_IMPORT, - SUBCLASS_OF_FINAL_CLASS, UNDEFINED_REVEAL, UNRESOLVED_ATTRIBUTE, UNRESOLVED_GLOBAL, - UNRESOLVED_IMPORT, UNRESOLVED_REFERENCE, UNSUPPORTED_OPERATOR, USELESS_OVERLOAD_BODY, + INVALID_NAMED_TUPLE, INVALID_NEWTYPE, INVALID_OVERLOAD, INVALID_PARAMETER_DEFAULT, + INVALID_PARAMSPEC, INVALID_PROTOCOL, INVALID_TYPE_FORM, INVALID_TYPE_GUARD_CALL, + INVALID_TYPE_VARIABLE_CONSTRAINTS, IncompatibleBases, NON_SUBSCRIPTABLE, + POSSIBLY_MISSING_IMPLICIT_CALL, POSSIBLY_MISSING_IMPORT, SUBCLASS_OF_FINAL_CLASS, + UNDEFINED_REVEAL, UNRESOLVED_ATTRIBUTE, UNRESOLVED_GLOBAL, UNRESOLVED_IMPORT, + UNRESOLVED_REFERENCE, UNSUPPORTED_OPERATOR, USELESS_OVERLOAD_BODY, hint_if_stdlib_attribute_exists_on_other_versions, hint_if_stdlib_submodule_exists_on_other_versions, report_attempted_protocol_instantiation, report_bad_dunder_set_call, report_cannot_pop_required_field_on_typed_dict, report_duplicate_bases, report_implicit_return_type, report_index_out_of_bounds, - report_instance_layout_conflict, report_invalid_assignment, - report_invalid_attribute_assignment, report_invalid_exception_caught, - report_invalid_exception_cause, report_invalid_exception_raised, - report_invalid_generator_function_return_type, report_invalid_key_on_typed_dict, - report_invalid_or_unsupported_base, report_invalid_return_type, - report_invalid_type_checking_constant, + report_instance_layout_conflict, report_invalid_arguments_to_annotated, + report_invalid_assignment, report_invalid_attribute_assignment, + report_invalid_exception_caught, report_invalid_exception_cause, + report_invalid_exception_raised, report_invalid_generator_function_return_type, + report_invalid_key_on_typed_dict, report_invalid_or_unsupported_base, + report_invalid_return_type, report_invalid_type_checking_constant, report_namedtuple_field_without_default_after_field_with_default, report_non_subscriptable, report_possibly_missing_attribute, report_possibly_unresolved_reference, report_rebound_typevar, report_slice_step_size_zero, @@ -87,6 +90,7 @@ use crate::types::generics::{ use crate::types::infer::nearest_enclosing_function; use crate::types::instance::SliceLiteral; use crate::types::mro::MroErrorKind; +use crate::types::newtype::NewType; use crate::types::signatures::Signature; use crate::types::subclass_of::SubclassOfInner; use crate::types::tuple::{Tuple, TupleLength, TupleSpec, TupleType}; @@ -97,11 +101,11 @@ use crate::types::typed_dict::{ use crate::types::visitor::any_over_type; use crate::types::{ CallDunderError, CallableBinding, CallableType, ClassLiteral, ClassType, DataclassParams, - DynamicType, IntersectionBuilder, IntersectionType, KnownClass, KnownInstanceType, - MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, Parameter, ParameterForm, - Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness, Type, - TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers, - TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity, + DynamicType, InferredAs, InternedType, InternedTypes, IntersectionBuilder, IntersectionType, + KnownClass, KnownInstanceType, LintDiagnosticGuard, MemberLookupPolicy, MetaclassCandidate, + PEP695TypeAliasType, Parameter, ParameterForm, Parameters, SpecialFormType, SubclassOfType, + TrackedConstraintSet, Truthiness, Type, TypeAliasType, TypeAndQualifiers, TypeContext, + TypeQualifiers, TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity, TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType, binding_type, todo_type, }; @@ -208,6 +212,7 @@ const NUM_FIELD_SPECIFIERS_INLINE: usize = 1; /// don't infer its types more than once. pub(super) struct TypeInferenceBuilder<'db, 'ast> { context: InferContext<'db, 'ast>, + index: &'db SemanticIndex<'db>, region: InferenceRegion<'db>, @@ -346,16 +351,19 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { assert_eq!(self.scope, inference.scope); self.expressions.extend(inference.expressions.iter()); - self.declarations.extend(inference.declarations()); + self.declarations + .extend(inference.declarations(), self.multi_inference_state); if !matches!(self.region, InferenceRegion::Scope(..)) { - self.bindings.extend(inference.bindings()); + self.bindings + .extend(inference.bindings(), self.multi_inference_state); } if let Some(extra) = &inference.extra { self.extend_cycle_recovery(extra.cycle_recovery); self.context.extend(&extra.diagnostics); - self.deferred.extend(extra.deferred.iter().copied()); + self.deferred + .extend(extra.deferred.iter().copied(), self.multi_inference_state); } } @@ -374,7 +382,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.extend_cycle_recovery(extra.cycle_recovery); if !matches!(self.region, InferenceRegion::Scope(..)) { - self.bindings.extend(extra.bindings.iter().copied()); + self.bindings + .extend(extra.bindings.iter().copied(), self.multi_inference_state); } } } @@ -395,6 +404,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.scope } + /// Set the multi-inference state, returning the previous value. + fn set_multi_inference_state(&mut self, state: MultiInferenceState) -> MultiInferenceState { + std::mem::replace(&mut self.multi_inference_state, state) + } + /// Are we currently inferring types in file with deferred types? /// This is true for stub files, for files with `__future__.annotations`, and /// by default for all source files in Python 3.14 and later. @@ -1200,6 +1214,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { definition, ); } + DefinitionKind::ImportFromSubmodule(import_from) => { + self.infer_import_from_submodule_definition( + import_from.import(self.module()), + definition, + ); + } DefinitionKind::StarImport(import) => { self.infer_import_from_definition( import.import(self.module()), @@ -1294,6 +1314,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { DefinitionKind::TypeVar(typevar) => { self.infer_typevar_deferred(typevar.node(self.module())); } + DefinitionKind::ParamSpec(paramspec) => { + self.infer_paramspec_deferred(paramspec.node(self.module())); + } DefinitionKind::Assignment(assignment) => { self.infer_assignment_deferred(assignment.value(self.module())); } @@ -1631,7 +1654,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - self.bindings.insert(binding, bound_ty); + self.bindings + .insert(binding, bound_ty, self.multi_inference_state); inferred_ty } @@ -1698,7 +1722,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } TypeAndQualifiers::declared(Type::unknown()) }; - self.declarations.insert(declaration, ty); + self.declarations + .insert(declaration, ty, self.multi_inference_state); } fn add_declaration_with_binding( @@ -1772,8 +1797,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } }; - self.declarations.insert(definition, declared_ty); - self.bindings.insert(definition, inferred_ty); + self.declarations + .insert(definition, declared_ty, self.multi_inference_state); + self.bindings + .insert(definition, inferred_ty, self.multi_inference_state); } fn add_unknown_declaration_with_binding( @@ -2192,7 +2219,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // `infer_function_type_params`, rather than here. if type_params.is_none() { if self.defer_annotations() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } else { let previous_typevar_binding_context = self.typevar_binding_context.replace(definition); @@ -2423,15 +2450,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { /// /// The declared type is the annotated type, if any, or `Unknown`. /// - /// The inferred type is the annotated type, unioned with the type of the default value, if - /// any. If both types are fully static, this union is a no-op (it should simplify to just the - /// annotated type.) But in a case like `f(x=None)` with no annotated type, we want to infer - /// the type `Unknown | None` for `x`, not just `Unknown`, so that we can error on usage of `x` - /// that would not be valid for `None`. - /// - /// If the default-value type is not assignable to the declared (annotated) type, we ignore the - /// default-value type and just infer the annotated type; this is the same way we handle - /// assignments, and allows an explicit annotation to override a bad inference. + /// The inferred type is the annotated type, if any. If there is no annotation, it is the union + /// of `Unknown` and the type of the default value, if any. /// /// Parameter definitions are odd in that they define a symbol in the function-body scope, so /// the Definition belongs to the function body scope, but the expressions (annotation and @@ -2460,23 +2480,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .map(|default| self.file_expression_type(default)); if let Some(annotation) = parameter.annotation.as_ref() { let declared_ty = self.file_expression_type(annotation); - let declared_and_inferred_ty = if let Some(default_ty) = default_ty { - if default_ty.is_assignable_to(self.db(), declared_ty) { - DeclaredAndInferredType::MightBeDifferent { - declared_ty: TypeAndQualifiers::declared(declared_ty), - inferred_ty: UnionType::from_elements(self.db(), [declared_ty, default_ty]), - } - } else if (self.in_stub() - || self.in_function_overload_or_abstractmethod() - || self - .class_context_of_current_method() - .is_some_and(|class| class.is_protocol(self.db()))) - && default - .as_ref() - .is_some_and(|d| d.is_ellipsis_literal_expr()) + if let Some(default_ty) = default_ty { + if !default_ty.is_assignable_to(self.db(), declared_ty) + && !((self.in_stub() + || self.in_function_overload_or_abstractmethod() + || self + .class_context_of_current_method() + .is_some_and(|class| class.is_protocol(self.db()))) + && default + .as_ref() + .is_some_and(|d| d.is_ellipsis_literal_expr())) { - DeclaredAndInferredType::are_the_same_type(declared_ty) - } else { if let Some(builder) = self .context .report_lint(&INVALID_PARAMETER_DEFAULT, parameter_with_default) @@ -2488,15 +2502,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { declared_ty.display(self.db()) )); } - DeclaredAndInferredType::are_the_same_type(declared_ty) } - } else { - DeclaredAndInferredType::are_the_same_type(declared_ty) - }; + } self.add_declaration_with_binding( parameter.into(), definition, - &declared_and_inferred_ty, + &DeclaredAndInferredType::are_the_same_type(declared_ty), ); } else { let ty = if let Some(default_ty) = default_ty { @@ -2766,7 +2777,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // Inference of bases deferred in stubs, or if any are string literals. if self.in_stub() || class_node.bases().iter().any(contains_string_literal) { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } else { let previous_typevar_binding_context = self.typevar_binding_context.replace(definition); @@ -2924,12 +2935,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { for item in items { let target = item.optional_vars.as_deref(); if let Some(target) = target { - self.infer_target(target, &item.context_expr, |builder| { + self.infer_target(target, &item.context_expr, |builder, tcx| { // TODO: `infer_with_statement_definition` reports a diagnostic if `ctx_manager_ty` isn't a context manager // but only if the target is a name. We should report a diagnostic here if the target isn't a name: // `with not_context_manager as a.x: ... builder - .infer_standalone_expression(&item.context_expr, TypeContext::default()) + .infer_standalone_expression(&item.context_expr, tcx) .enter(builder.db()) }); } else { @@ -3136,7 +3147,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { None => None, }; if bound_or_constraint.is_some() || default.is_some() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } let identity = TypeVarIdentity::new(self.db(), &name.id, Some(definition), TypeVarKind::Pep695); @@ -3196,18 +3207,120 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let ast::TypeParamParamSpec { range: _, node_index: _, - name: _, + name, default, } = node; - self.infer_optional_expression(default.as_deref(), TypeContext::default()); - let pep_695_todo = Type::Dynamic(DynamicType::TodoPEP695ParamSpec); + if default.is_some() { + self.deferred.insert(definition, self.multi_inference_state); + } + let identity = TypeVarIdentity::new( + self.db(), + &name.id, + Some(definition), + TypeVarKind::Pep695ParamSpec, + ); + let ty = Type::KnownInstance(KnownInstanceType::TypeVar(TypeVarInstance::new( + self.db(), + identity, + None, // ParamSpec, when declared using PEP 695 syntax, has no bounds or constraints + None, // explicit_variance + default.as_deref().map(|_| TypeVarDefaultEvaluation::Lazy), + ))); self.add_declaration_with_binding( node.into(), definition, - &DeclaredAndInferredType::are_the_same_type(pep_695_todo), + &DeclaredAndInferredType::are_the_same_type(ty), ); } + fn infer_paramspec_deferred(&mut self, node: &ast::TypeParamParamSpec) { + let ast::TypeParamParamSpec { + range: _, + node_index: _, + name: _, + default: Some(default), + } = node + else { + return; + }; + let previous_deferred_state = + std::mem::replace(&mut self.deferred_state, DeferredExpressionState::Deferred); + let default_ty = self.infer_paramspec_default(default); + self.store_expression_type(default, default_ty); + self.deferred_state = previous_deferred_state; + } + + fn infer_paramspec_default(&mut self, default: &ast::Expr) -> Type<'db> { + // This is the same logic as `TypeInferenceBuilder::infer_callable_parameter_types` except + // for the subscript branch which is required for `Concatenate` but that cannot be + // specified in this context. + match default { + ast::Expr::EllipsisLiteral(_) => { + CallableType::single(self.db(), Signature::new(Parameters::gradual_form(), None)) + } + ast::Expr::List(ast::ExprList { elts, .. }) => { + let mut parameter_types = Vec::with_capacity(elts.len()); + + // Whether to infer `Todo` for the parameters + let mut return_todo = false; + + for param in elts { + let param_type = self.infer_type_expression(param); + // This is similar to what we currently do for inferring tuple type expression. + // We currently infer `Todo` for the parameters to avoid invalid diagnostics + // when trying to check for assignability or any other relation. For example, + // `*tuple[int, str]`, `Unpack[]`, etc. are not yet supported. + return_todo |= param_type.is_todo() + && matches!(param, ast::Expr::Starred(_) | ast::Expr::Subscript(_)); + parameter_types.push(param_type); + } + + let parameters = if return_todo { + // TODO: `Unpack` + Parameters::todo() + } else { + Parameters::new(parameter_types.iter().map(|param_type| { + Parameter::positional_only(None).with_annotated_type(*param_type) + })) + }; + + CallableType::single(self.db(), Signature::new(parameters, None)) + } + ast::Expr::Name(name) => { + let name_ty = self.infer_name_load(name); + let is_paramspec = match name_ty { + Type::KnownInstance(known_instance) => { + known_instance.class(self.db()) == KnownClass::ParamSpec + } + Type::NominalInstance(nominal) => { + nominal.has_known_class(self.db(), KnownClass::ParamSpec) + } + _ => false, + }; + if is_paramspec { + name_ty + } else { + if let Some(builder) = self.context.report_lint(&INVALID_PARAMSPEC, default) { + builder.into_diagnostic( + "The default value to `ParamSpec` must be either a list of types, \ + `ParamSpec`, or `...`", + ); + } + Type::unknown() + } + } + _ => { + if let Some(builder) = self.context.report_lint(&INVALID_PARAMSPEC, default) { + builder.into_diagnostic( + "The default value to `ParamSpec` must be either a list of types, \ + `ParamSpec`, or `...`", + ); + } + Type::unknown() + } + } + } + fn infer_typevartuple_definition( &mut self, node: &ast::TypeParamTypeVarTuple, @@ -3393,8 +3506,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } = assignment; for target in targets { - self.infer_target(target, value, |builder| { - builder.infer_standalone_expression(value, TypeContext::default()) + self.infer_target(target, value, |builder, tcx| { + builder.infer_standalone_expression(value, tcx) }); } } @@ -3410,151 +3523,320 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { /// `target`. fn infer_target(&mut self, target: &ast::Expr, value: &ast::Expr, infer_value_expr: F) where - F: Fn(&mut Self) -> Type<'db>, + F: Fn(&mut Self, TypeContext<'db>) -> Type<'db>, { - let assigned_ty = match target { - ast::Expr::Name(_) => None, - _ => Some(infer_value_expr(self)), - }; - self.infer_target_impl(target, value, assigned_ty); + match target { + ast::Expr::Name(_) => { + self.infer_target_impl(target, value, None); + } + + _ => self.infer_target_impl( + target, + value, + Some(&|builder, tcx| infer_value_expr(builder, tcx)), + ), + } } - /// Make sure that the subscript assignment `obj[slice] = value` is valid. + /// Validate a subscript assignment of the form `object[key] = rhs_value`. fn validate_subscript_assignment( &mut self, target: &ast::ExprSubscript, - rhs: &ast::Expr, - assigned_ty: Type<'db>, + rhs_value: &ast::Expr, + rhs_value_ty: Type<'db>, ) -> bool { let ast::ExprSubscript { range: _, node_index: _, - value, + value: object, slice, ctx: _, } = target; - let value_ty = self.infer_expression(value, TypeContext::default()); + let object_ty = self.infer_expression(object, TypeContext::default()); let slice_ty = self.infer_expression(slice, TypeContext::default()); + self.validate_subscript_assignment_impl( + object.as_ref(), + None, + object_ty, + slice.as_ref(), + slice_ty, + rhs_value, + rhs_value_ty, + true, + ) + } + + #[expect(clippy::too_many_arguments)] + fn validate_subscript_assignment_impl( + &self, + object_node: &'ast ast::Expr, + full_object_ty: Option>, + object_ty: Type<'db>, + slice_node: &'ast ast::Expr, + slice_ty: Type<'db>, + rhs_value_node: &'ast ast::Expr, + rhs_value_ty: Type<'db>, + emit_diagnostic: bool, + ) -> bool { + /// Given a string literal or a union of string literals, return an iterator over the contained + /// strings, or `None`, if the type is neither. + fn key_literals<'db>( + db: &'db dyn Db, + slice_ty: Type<'db>, + ) -> Option + 'db> { + if let Some(literal) = slice_ty.as_string_literal() { + Some(Either::Left(std::iter::once(literal.value(db)))) + } else { + slice_ty.as_union().map(|union| { + Either::Right( + union + .elements(db) + .iter() + .filter_map(|ty| ty.as_string_literal().map(|lit| lit.value(db))), + ) + }) + } + } + let db = self.db(); - let context = &self.context; - match value_ty.try_call_dunder( - db, - "__setitem__", - CallArguments::positional([slice_ty, assigned_ty]), - TypeContext::default(), - ) { - Ok(_) => true, - Err(err) => match err { - CallDunderError::PossiblyUnbound { .. } => { - if let Some(builder) = - context.report_lint(&POSSIBLY_MISSING_IMPLICIT_CALL, &**value) - { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` may be missing", - value_ty.display(db), - )); - } - false + let attach_original_type_info = |mut diagnostic: LintDiagnosticGuard| { + if let Some(full_object_ty) = full_object_ty { + diagnostic.info(format_args!( + "The full type of the subscripted object is `{}`", + full_object_ty.display(db) + )); + } + }; + + match object_ty { + Type::Union(union) => { + // Note that we use a loop here instead of .all(…) to avoid short-circuiting. + // We need to keep iterating to emit all diagnostics. + let mut valid = true; + for element_ty in union.elements(db) { + valid &= self.validate_subscript_assignment_impl( + object_node, + full_object_ty.or(Some(object_ty)), + *element_ty, + slice_node, + slice_ty, + rhs_value_node, + rhs_value_ty, + emit_diagnostic, + ); } - CallDunderError::CallError(call_error_kind, bindings) => { - match call_error_kind { - CallErrorKind::NotCallable => { - if let Some(builder) = context.report_lint(&CALL_NON_CALLABLE, &**value) - { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` is not callable \ - on object of type `{}`", - bindings.callable_type().display(db), - value_ty.display(db), - )); - } - } - CallErrorKind::BindingError => { - let assigned_d = assigned_ty.display(db); - let value_d = value_ty.display(db); + valid + } - if let Some(typed_dict) = value_ty.as_typed_dict() { - if let Some(key) = slice_ty.as_string_literal() { - let key = key.value(self.db()); - validate_typed_dict_key_assignment( - &self.context, - typed_dict, - key, - assigned_ty, - value.as_ref(), - slice.as_ref(), - rhs, - TypedDictAssignmentKind::Subscript, - ); - } else { - // Check if the key has a valid type. We only allow string literals, a union of string literals, - // or a dynamic type like `Any`. We can do this by checking assignability to `LiteralString`, - // but we need to exclude `LiteralString` itself. This check would technically allow weird key - // types like `LiteralString & Any` to pass, but it does not need to be perfect. We would just - // fail to provide the "Only string literals are allowed" hint in that case. - if slice_ty.is_assignable_to(db, Type::LiteralString) - && !slice_ty.is_equivalent_to(db, Type::LiteralString) + Type::Intersection(intersection) => { + let check_positive_elements = |emit_diagnostic_and_short_circuit| { + let mut valid = false; + for element_ty in intersection.positive(db) { + valid |= self.validate_subscript_assignment_impl( + object_node, + full_object_ty.or(Some(object_ty)), + *element_ty, + slice_node, + slice_ty, + rhs_value_node, + rhs_value_ty, + emit_diagnostic_and_short_circuit, + ); + + if !valid && emit_diagnostic_and_short_circuit { + break; + } + } + + valid + }; + + // Perform an initial check of all elements. If the assignment is valid + // for at least one element, we do not emit any diagnostics. Otherwise, + // we re-run the check and emit a diagnostic on the first failing element. + let valid = check_positive_elements(false); + + if !valid { + check_positive_elements(true); + } + + valid + } + + Type::TypedDict(typed_dict) => { + // As an optimization, prevent calling `__setitem__` on (unions of) large `TypedDict`s, and + // validate the assignment ourselves. This also allows us to emit better diagnostics. + + let mut valid = true; + let Some(keys) = key_literals(db, slice_ty) else { + // Check if the key has a valid type. We only allow string literals, a union of string literals, + // or a dynamic type like `Any`. We can do this by checking assignability to `LiteralString`, + // but we need to exclude `LiteralString` itself. This check would technically allow weird key + // types like `LiteralString & Any` to pass, but it does not need to be perfect. We would just + // fail to provide the "Only string literals are allowed" hint in that case. + + if slice_ty.is_dynamic() { + return true; + } + + let assigned_d = rhs_value_ty.display(db); + let value_d = object_ty.display(db); + + if slice_ty.is_assignable_to(db, Type::LiteralString) + && !slice_ty.is_equivalent_to(db, Type::LiteralString) + { + if let Some(builder) = + self.context.report_lint(&INVALID_ASSIGNMENT, slice_node) + { + let diagnostic = builder.into_diagnostic(format_args!( + "Cannot assign value of type `{assigned_d}` to key of type `{}` on TypedDict `{value_d}`", + slice_ty.display(db) + )); + attach_original_type_info(diagnostic); + } + } else { + if let Some(builder) = self.context.report_lint(&INVALID_KEY, slice_node) { + let diagnostic = builder.into_diagnostic(format_args!( + "Cannot access `{value_d}` with a key of type `{}`. Only string literals are allowed as keys on TypedDicts.", + slice_ty.display(db) + )); + attach_original_type_info(diagnostic); + } + } + + return false; + }; + + for key in keys { + valid &= validate_typed_dict_key_assignment( + &self.context, + typed_dict, + full_object_ty, + key, + rhs_value_ty, + object_node, + slice_node, + rhs_value_node, + TypedDictAssignmentKind::Subscript, + emit_diagnostic, + ); + } + + valid + } + + _ => { + match object_ty.try_call_dunder( + db, + "__setitem__", + CallArguments::positional([slice_ty, rhs_value_ty]), + TypeContext::default(), + ) { + Ok(_) => true, + Err(err) => match err { + CallDunderError::PossiblyUnbound { .. } => { + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&POSSIBLY_MISSING_IMPLICIT_CALL, rhs_value_node) + { + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` may be missing", + object_ty.display(db), + )); + attach_original_type_info(diagnostic); + } + false + } + CallDunderError::CallError(call_error_kind, bindings) => { + match call_error_kind { + CallErrorKind::NotCallable => { + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&CALL_NON_CALLABLE, object_node) { - if let Some(builder) = - context.report_lint(&INVALID_ASSIGNMENT, &**slice) - { - builder.into_diagnostic(format_args!( - "Cannot assign value of type `{assigned_d}` to key of type `{}` on TypedDict `{value_d}`", - slice_ty.display(db) - )); + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` is not callable \ + on object of type `{}`", + bindings.callable_type().display(db), + object_ty.display(db), + )); + attach_original_type_info(diagnostic); + } + } + CallErrorKind::BindingError => { + if let Some(typed_dict) = object_ty.as_typed_dict() { + if let Some(key) = slice_ty.as_string_literal() { + let key = key.value(db); + validate_typed_dict_key_assignment( + &self.context, + typed_dict, + full_object_ty, + key, + rhs_value_ty, + object_node, + slice_node, + rhs_value_node, + TypedDictAssignmentKind::Subscript, + true, + ); } } else { - if let Some(builder) = - context.report_lint(&INVALID_KEY, &**slice) + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&INVALID_ASSIGNMENT, object_node) { - builder.into_diagnostic(format_args!( - "Cannot access `{value_d}` with a key of type `{}`. Only string literals are allowed as keys on TypedDicts.", - slice_ty.display(db) + let assigned_d = rhs_value_ty.display(db); + let value_d = object_ty.display(db); + + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` cannot be called with \ + a key of type `{}` and a value of type `{assigned_d}` on object of type `{value_d}`", + bindings.callable_type().display(db), + slice_ty.display(db), )); + attach_original_type_info(diagnostic); } } } - } else { - if let Some(builder) = - context.report_lint(&INVALID_ASSIGNMENT, &**value) - { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` cannot be called with \ - a key of type `{}` and a value of type `{assigned_d}` on object of type `{value_d}`", - bindings.callable_type().display(db), - slice_ty.display(db), - )); + CallErrorKind::PossiblyNotCallable => { + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&CALL_NON_CALLABLE, object_node) + { + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` may not be callable on object of type `{}`", + bindings.callable_type().display(db), + object_ty.display(db), + )); + attach_original_type_info(diagnostic); + } } } + false } - CallErrorKind::PossiblyNotCallable => { - if let Some(builder) = context.report_lint(&CALL_NON_CALLABLE, &**value) + CallDunderError::MethodNotAvailable => { + if emit_diagnostic + && let Some(builder) = + self.context.report_lint(&INVALID_ASSIGNMENT, object_node) { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` may not be \ - callable on object of type `{}`", - bindings.callable_type().display(db), - value_ty.display(db), + let diagnostic = builder.into_diagnostic(format_args!( + "Cannot assign to a subscript on an object of type `{}` with no `__setitem__` method", + object_ty.display(db), )); + attach_original_type_info(diagnostic); } + false } - } - false + }, } - CallDunderError::MethodNotAvailable => { - if let Some(builder) = context.report_lint(&INVALID_ASSIGNMENT, &**value) { - builder.into_diagnostic(format_args!( - "Cannot assign to object of type `{}` with no `__setitem__` method", - value_ty.display(db), - )); - } - - false - } - }, + } } } @@ -3568,47 +3850,147 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target: &ast::ExprAttribute, object_ty: Type<'db>, attribute: &str, - value_ty: Type<'db>, + infer_value_ty: &mut dyn FnMut(&mut Self, TypeContext<'db>) -> Type<'db>, emit_diagnostics: bool, ) -> bool { let db = self.db(); - let ensure_assignable_to = |attr_ty| -> bool { - let assignable = value_ty.is_assignable_to(db, attr_ty); - if !assignable && emit_diagnostics { - report_invalid_attribute_assignment( - &self.context, - target.into(), - attr_ty, - value_ty, - attribute, - ); + let mut first_tcx = None; + + // A wrapper over `infer_value_ty` that allows inferring the value type multiple times + // during attribute resolution. + let pure_infer_value_ty = infer_value_ty; + let mut infer_value_ty = |builder: &mut Self, tcx: TypeContext<'db>| -> Type<'db> { + // Overwrite the previously inferred value, preferring later inferences, which are + // likely more precise. Note that we still ensure each inference is assignable to + // its declared type, so this mainly affects the IDE hover type. + let prev_multi_inference_state = + builder.set_multi_inference_state(MultiInferenceState::Overwrite); + + // If we are inferring the argument multiple times, silence diagnostics to avoid duplicated warnings. + let was_in_multi_inference = if let Some(first_tcx) = first_tcx { + // The first time we infer an argument during multi-inference must be without type context, + // to avoid leaking diagnostics for bidirectional inference attempts. + debug_assert_eq!(first_tcx, TypeContext::default()); + + builder.context.set_multi_inference(true) + } else { + builder.context.is_in_multi_inference() + }; + + let value_ty = pure_infer_value_ty(builder, tcx); + + // Reset the multi-inference state. + first_tcx.get_or_insert(tcx); + builder.multi_inference_state = prev_multi_inference_state; + builder.context.set_multi_inference(was_in_multi_inference); + + value_ty + }; + + // This closure should only be called if `value_ty` was inferred with `attr_ty` as type context. + let ensure_assignable_to = + |builder: &Self, value_ty: Type<'db>, attr_ty: Type<'db>| -> bool { + let assignable = value_ty.is_assignable_to(db, attr_ty); + if !assignable && emit_diagnostics { + report_invalid_attribute_assignment( + &builder.context, + target.into(), + attr_ty, + value_ty, + attribute, + ); + } + assignable + }; + + let emit_invalid_final = |builder: &Self| { + if emit_diagnostics { + if let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target) { + builder.into_diagnostic(format_args!( + "Cannot assign to final attribute `{attribute}` on type `{}`", + object_ty.display(db) + )); + } } - assignable }; // Return true (and emit a diagnostic) if this is an invalid assignment to a `Final` attribute. - let invalid_assignment_to_final = |qualifiers: TypeQualifiers| -> bool { - if qualifiers.contains(TypeQualifiers::FINAL) { - if emit_diagnostics { - if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) { - builder.into_diagnostic(format_args!( - "Cannot assign to final attribute `{attribute}` \ - on type `{}`", - object_ty.display(db) - )); + // Per PEP 591 and the typing conformance suite, Final instance attributes can be assigned + // in __init__ methods. Multiple assignments within __init__ are allowed (matching mypy + // and pyright behavior), as long as the attribute doesn't have a class-level value. + let invalid_assignment_to_final = |builder: &Self, qualifiers: TypeQualifiers| -> bool { + // Check if it's a Final attribute + if !qualifiers.contains(TypeQualifiers::FINAL) { + return false; + } + + // Check if we're in an __init__ method (where Final attributes can be initialized). + let is_in_init = builder + .current_function_definition() + .is_some_and(|func| func.name.id == "__init__"); + + // Not in __init__ - always disallow + if !is_in_init { + emit_invalid_final(builder); + return true; + } + + // We're in __init__ - verify we're in a method of the class being mutated + let Some(class_ty) = builder.class_context_of_current_method() else { + // Not a method (standalone function named __init__) + emit_invalid_final(builder); + return true; + }; + + // Check that object_ty is an instance of the class we're in + if !object_ty.is_subtype_of(builder.db(), Type::instance(builder.db(), class_ty)) { + // Assigning to a different class's Final attribute + emit_invalid_final(builder); + return true; + } + + // Check if class-level attribute already has a value + { + let class_definition = class_ty.class_literal(db).0; + let class_scope_id = class_definition.body_scope(db).file_scope_id(db); + let place_table = builder.index.place_table(class_scope_id); + + if let Some(symbol) = place_table.symbol_by_name(attribute) { + if symbol.is_bound() { + if emit_diagnostics { + if let Some(diag_builder) = + builder.context.report_lint(&INVALID_ASSIGNMENT, target) + { + diag_builder.into_diagnostic(format_args!( + "Cannot assign to final attribute `{attribute}` in `__init__` \ + because it already has a value at class level" + )); + } + } + return true; } } - true - } else { - false } + + // In __init__ and no class-level value - allow + false }; match object_ty { Type::Union(union) => { + // First infer the value without type context, and then again for each union element. + let value_ty = infer_value_ty(self, TypeContext::default()); + if union.elements(self.db()).iter().all(|elem| { - self.validate_attribute_assignment(target, *elem, attribute, value_ty, false) + self.validate_attribute_assignment( + target, + *elem, + attribute, + // Note that `infer_value_ty` silences diagnostics after the first inference. + &mut infer_value_ty, + false, + ) }) { true } else { @@ -3631,9 +4013,19 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } Type::Intersection(intersection) => { + // First infer the value without type context, and then again for each union element. + let value_ty = infer_value_ty(self, TypeContext::default()); + // TODO: Handle negative intersection elements if intersection.positive(db).iter().any(|elem| { - self.validate_attribute_assignment(target, *elem, attribute, value_ty, false) + self.validate_attribute_assignment( + target, + *elem, + attribute, + // Note that `infer_value_ty` silences diagnostics after the first inference. + &mut infer_value_ty, + false, + ) }) { true } else { @@ -3657,12 +4049,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target, alias.value_type(self.db()), attribute, - value_ty, + pure_infer_value_ty, emit_diagnostics, ), // Super instances do not allow attribute assignment Type::NominalInstance(instance) if instance.has_known_class(db, KnownClass::Super) => { + infer_value_ty(self, TypeContext::default()); + if emit_diagnostics { if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) { builder.into_diagnostic(format_args!( @@ -3674,6 +4068,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { false } Type::BoundSuper(_) => { + infer_value_ty(self, TypeContext::default()); + if emit_diagnostics { if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) { builder.into_diagnostic(format_args!( @@ -3685,7 +4081,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { false } - Type::Dynamic(..) | Type::Never => true, + Type::Dynamic(..) | Type::Never => { + infer_value_ty(self, TypeContext::default()); + true + } Type::NominalInstance(..) | Type::ProtocolInstance(_) @@ -3709,7 +4108,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::AlwaysTruthy | Type::AlwaysFalsy | Type::TypeIs(_) - | Type::TypedDict(_) => { + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => { + // TODO: We could use the annotated parameter type of `__setattr__` as type context here. + // However, we would still have to perform the first inference without type context. + let value_ty = infer_value_ty(self, TypeContext::default()); + // First, try to call the `__setattr__` dunder method. If this is present/defined, overrides // assigning the attributed by the normal mechanism. let setattr_dunder_call_result = object_ty.try_call_dunder_with_policy( @@ -3745,7 +4149,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let msg = if !member_exists { format!( - "Can not assign to unresolved attribute `{attribute}` on type `{}`", + "Cannot assign to unresolved attribute `{attribute}` on type `{}`", object_ty.display(db) ) } else if is_setattr_synthesized { @@ -3781,7 +4185,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target) { builder.into_diagnostic(format_args!( - "Can not assign object of type `{}` to attribute \ + "Cannot assign object of type `{}` to attribute \ `{attribute}` on type `{}` with \ custom `__setattr__` method.", value_ty.display(db), @@ -3811,7 +4215,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { place: Place::Defined(meta_attr_ty, _, meta_attr_boundness), qualifiers, } => { - if invalid_assignment_to_final(qualifiers) { + if invalid_assignment_to_final(self, qualifiers) { return false; } @@ -3819,6 +4223,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { if let Place::Defined(meta_dunder_set, _, _) = meta_attr_ty.class_member(db, "__set__".into()).place { + // TODO: We could use the annotated parameter type of `__set__` as + // type context here. let dunder_set_result = meta_dunder_set.try_call( db, &CallArguments::positional([ @@ -3844,7 +4250,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { dunder_set_result.is_ok() } else { - ensure_assignable_to(meta_attr_ty) + let value_ty = infer_value_ty( + self, + TypeContext::new(Some(meta_attr_ty)), + ); + + ensure_assignable_to(self, value_ty, meta_attr_ty) }; let assignable_to_instance_attribute = if meta_attr_boundness @@ -3857,12 +4268,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } = object_ty.instance_member(db, attribute) { - if invalid_assignment_to_final(qualifiers) { + let value_ty = infer_value_ty( + self, + TypeContext::new(Some(instance_attr_ty)), + ); + if invalid_assignment_to_final(self, qualifiers) { return false; } ( - ensure_assignable_to(instance_attr_ty), + ensure_assignable_to(self, value_ty, instance_attr_ty), instance_attr_boundness, ) } else { @@ -3896,7 +4311,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { qualifiers, } = object_ty.instance_member(db, attribute) { - if invalid_assignment_to_final(qualifiers) { + let value_ty = infer_value_ty( + self, + TypeContext::new(Some(instance_attr_ty)), + ); + if invalid_assignment_to_final(self, qualifiers) { return false; } @@ -3909,7 +4328,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ); } - ensure_assignable_to(instance_attr_ty) + ensure_assignable_to(self, value_ty, instance_attr_ty) } else { if emit_diagnostics { if let Some(builder) = @@ -3937,13 +4356,19 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { place: Place::Defined(meta_attr_ty, _, meta_attr_boundness), qualifiers, } => { - if invalid_assignment_to_final(qualifiers) { + // We may have to perform multi-inference if the meta attribute is possibly unbound. + // However, we are required to perform the first inference without type context. + let value_ty = infer_value_ty(self, TypeContext::default()); + + if invalid_assignment_to_final(self, qualifiers) { return false; } let assignable_to_meta_attr = if let Place::Defined(meta_dunder_set, _, _) = meta_attr_ty.class_member(db, "__set__".into()).place { + // TODO: We could use the annotated parameter type of `__set__` as + // type context here. let dunder_set_result = meta_dunder_set.try_call( db, &CallArguments::positional([meta_attr_ty, object_ty, value_ty]), @@ -3963,7 +4388,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { dunder_set_result.is_ok() } else { - ensure_assignable_to(meta_attr_ty) + let value_ty = + infer_value_ty(self, TypeContext::new(Some(meta_attr_ty))); + ensure_assignable_to(self, value_ty, meta_attr_ty) }; let assignable_to_class_attr = if meta_attr_boundness @@ -3976,7 +4403,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .expect("called on Type::ClassLiteral or Type::SubclassOf") .place { - (ensure_assignable_to(class_attr_ty), class_attr_boundness) + let value_ty = + infer_value_ty(self, TypeContext::new(Some(class_attr_ty))); + ( + ensure_assignable_to(self, value_ty, class_attr_ty), + class_attr_boundness, + ) } else { (true, Definedness::PossiblyUndefined) }; @@ -4008,7 +4440,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .find_name_in_mro(db, attribute) .expect("called on Type::ClassLiteral or Type::SubclassOf") { - if invalid_assignment_to_final(qualifiers) { + let value_ty = + infer_value_ty(self, TypeContext::new(Some(class_attr_ty))); + if invalid_assignment_to_final(self, qualifiers) { return false; } @@ -4021,8 +4455,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ); } - ensure_assignable_to(class_attr_ty) + ensure_assignable_to(self, value_ty, class_attr_ty) } else { + infer_value_ty(self, TypeContext::default()); + let attribute_is_bound_on_instance = object_ty.to_instance(self.db()).is_some_and(|instance| { !instance @@ -4064,6 +4500,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Type::ModuleLiteral(module) => { if let Place::Defined(attr_ty, _, _) = module.static_member(db, attribute).place { + let value_ty = infer_value_ty(self, TypeContext::new(Some(attr_ty))); + let assignable = value_ty.is_assignable_to(db, attr_ty); if assignable { true @@ -4080,6 +4518,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { false } } else { + infer_value_ty(self, TypeContext::default()); + if emit_diagnostics { if let Some(builder) = self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target) @@ -4098,22 +4538,35 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } + #[expect(clippy::type_complexity)] fn infer_target_impl( &mut self, target: &ast::Expr, value: &ast::Expr, - assigned_ty: Option>, + infer_assigned_ty: Option<&dyn Fn(&mut Self, TypeContext<'db>) -> Type<'db>>, ) { match target { - ast::Expr::Name(name) => self.infer_definition(name), + ast::Expr::Name(name) => { + if let Some(infer_assigned_ty) = infer_assigned_ty { + infer_assigned_ty(self, TypeContext::default()); + } + + self.infer_definition(name); + } ast::Expr::List(ast::ExprList { elts, .. }) | ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => { + let assigned_ty = infer_assigned_ty.map(|f| f(self, TypeContext::default())); + if let Some(tuple_spec) = assigned_ty.and_then(|ty| ty.tuple_instance_spec(self.db())) { - let mut assigned_tys = tuple_spec.all_elements(); - for element in elts { - self.infer_target_impl(element, value, assigned_tys.next().copied()); + let assigned_tys = tuple_spec.all_elements().copied().collect::>(); + + for (i, element) in elts.iter().enumerate() { + match assigned_tys.get(i).copied() { + None => self.infer_target_impl(element, value, None), + Some(ty) => self.infer_target_impl(element, value, Some(&|_, _| ty)), + } } } else { for element in elts { @@ -4129,29 +4582,39 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .. }, ) => { - self.store_expression_type(target, assigned_ty.unwrap_or(Type::unknown())); - let object_ty = self.infer_expression(object, TypeContext::default()); - if let Some(assigned_ty) = assigned_ty { + if let Some(infer_assigned_ty) = infer_assigned_ty { + let infer_assigned_ty = &mut |builder: &mut Self, tcx| { + let assigned_ty = infer_assigned_ty(builder, tcx); + builder.store_expression_type(target, assigned_ty); + assigned_ty + }; + self.validate_attribute_assignment( attr_expr, object_ty, attr.id(), - assigned_ty, + infer_assigned_ty, true, ); } } ast::Expr::Subscript(subscript_expr) => { + let assigned_ty = infer_assigned_ty.map(|f| f(self, TypeContext::default())); self.store_expression_type(target, assigned_ty.unwrap_or(Type::unknown())); if let Some(assigned_ty) = assigned_ty { self.validate_subscript_assignment(subscript_expr, value, assigned_ty); } } + + // TODO: Remove this once we handle all possible assignment targets. _ => { - // TODO: Remove this once we handle all possible assignment targets. + if let Some(infer_assigned_ty) = infer_assigned_ty { + infer_assigned_ty(self, TypeContext::default()); + } + self.infer_expression(target, TypeContext::default()); } } @@ -4204,17 +4667,24 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { TypeContext::default(), ); - let typevar_class = callable_type + let ty = match callable_type .as_class_literal() .and_then(|cls| cls.known(self.db())) - .filter(|cls| { - matches!(cls, KnownClass::TypeVar | KnownClass::ExtensionsTypeVar) - }); - - let ty = if let Some(typevar_class) = typevar_class { - self.infer_legacy_typevar(target, call_expr, definition, typevar_class) - } else { - self.infer_call_expression_impl(call_expr, callable_type, tcx) + { + Some( + typevar_class @ (KnownClass::TypeVar | KnownClass::ExtensionsTypeVar), + ) => { + self.infer_legacy_typevar(target, call_expr, definition, typevar_class) + } + Some(KnownClass::ParamSpec) => { + self.infer_paramspec(target, call_expr, definition) + } + Some(KnownClass::NewType) => { + self.infer_newtype_expression(target, call_expr, definition) + } + Some(_) | None => { + self.infer_call_expression_impl(call_expr, callable_type, tcx) + } }; self.store_expression_type(value, ty); @@ -4251,6 +4721,160 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target_ty } + fn infer_paramspec( + &mut self, + target: &ast::Expr, + call_expr: &ast::ExprCall, + definition: Definition<'db>, + ) -> Type<'db> { + fn error<'db>( + context: &InferContext<'db, '_>, + message: impl std::fmt::Display, + node: impl Ranged, + ) -> Type<'db> { + if let Some(builder) = context.report_lint(&INVALID_PARAMSPEC, node) { + builder.into_diagnostic(message); + } + // If the call doesn't create a valid paramspec, we'll emit diagnostics and fall back to + // just creating a regular instance of `typing.ParamSpec`. + KnownClass::ParamSpec.to_instance(context.db()) + } + + let db = self.db(); + let arguments = &call_expr.arguments; + let assume_all_features = self.in_stub(); + let python_version = Program::get(db).python_version(db); + let have_features_from = + |version: PythonVersion| assume_all_features || python_version >= version; + + let mut default = None; + let mut name_param_ty = None; + + if arguments.args.len() > 1 { + return error( + &self.context, + "`ParamSpec` can only have one positional argument", + call_expr, + ); + } + + if let Some(starred) = arguments.args.iter().find(|arg| arg.is_starred_expr()) { + return error( + &self.context, + "Starred arguments are not supported in `ParamSpec` creation", + starred, + ); + } + + for kwarg in &arguments.keywords { + let Some(identifier) = kwarg.arg.as_ref() else { + return error( + &self.context, + "Starred arguments are not supported in `ParamSpec` creation", + kwarg, + ); + }; + match identifier.id().as_str() { + "name" => { + // Duplicate keyword argument is a syntax error, so we don't have to check if + // `name_param_ty.is_some()` here. + if !arguments.args.is_empty() { + return error( + &self.context, + "The `name` parameter of `ParamSpec` can only be provided once", + kwarg, + ); + } + name_param_ty = + Some(self.infer_expression(&kwarg.value, TypeContext::default())); + } + "bound" | "covariant" | "contravariant" | "infer_variance" => { + return error( + &self.context, + "The variance and bound arguments for `ParamSpec` do not have defined semantics yet", + call_expr, + ); + } + "default" => { + if !have_features_from(PythonVersion::PY313) { + // We don't return here; this error is informational since this will error + // at runtime, but the user's intent is plain, we may as well respect it. + error( + &self.context, + "The `default` parameter of `typing.ParamSpec` was added in Python 3.13", + kwarg, + ); + } + default = Some(TypeVarDefaultEvaluation::Lazy); + } + name => { + // We don't return here; this error is informational since this will error + // at runtime, but it will likely cause fewer cascading errors if we just + // ignore the unknown keyword and still understand as much of the typevar as we + // can. + error( + &self.context, + format_args!("Unknown keyword argument `{name}` in `ParamSpec` creation"), + kwarg, + ); + self.infer_expression(&kwarg.value, TypeContext::default()); + } + } + } + + let Some(name_param_ty) = name_param_ty.or_else(|| { + arguments + .find_positional(0) + .map(|arg| self.infer_expression(arg, TypeContext::default())) + }) else { + return error( + &self.context, + "The `name` parameter of `ParamSpec` is required.", + call_expr, + ); + }; + + let Some(name_param) = name_param_ty.as_string_literal().map(|name| name.value(db)) else { + return error( + &self.context, + "The first argument to `ParamSpec` must be a string literal", + call_expr, + ); + }; + + let ast::Expr::Name(ast::ExprName { + id: target_name, .. + }) = target + else { + return error( + &self.context, + "A `ParamSpec` definition must be a simple variable assignment", + target, + ); + }; + + if name_param != target_name { + return error( + &self.context, + format_args!( + "The name of a `ParamSpec` (`{name_param}`) must match \ + the name of the variable it is assigned to (`{target_name}`)" + ), + target, + ); + } + + if default.is_some() { + self.deferred.insert(definition, self.multi_inference_state); + } + + let identity = + TypeVarIdentity::new(db, target_name, Some(definition), TypeVarKind::ParamSpec); + Type::KnownInstance(KnownInstanceType::TypeVar(TypeVarInstance::new( + db, identity, None, None, default, + ))) + } + fn infer_legacy_typevar( &mut self, target: &ast::Expr, @@ -4483,7 +5107,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { }; if bound_or_constraints.is_some() || default.is_some() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } let identity = TypeVarIdentity::new(db, target_name, Some(definition), TypeVarKind::Legacy); @@ -4496,11 +5120,114 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ))) } + fn infer_newtype_expression( + &mut self, + target: &ast::Expr, + call_expr: &ast::ExprCall, + definition: Definition<'db>, + ) -> Type<'db> { + fn error<'db>( + context: &InferContext<'db, '_>, + message: impl std::fmt::Display, + node: impl Ranged, + ) -> Type<'db> { + if let Some(builder) = context.report_lint(&INVALID_NEWTYPE, node) { + builder.into_diagnostic(message); + } + Type::unknown() + } + + let db = self.db(); + let arguments = &call_expr.arguments; + + if !arguments.keywords.is_empty() { + return error( + &self.context, + "Keyword arguments are not supported in `NewType` creation", + call_expr, + ); + } + + if let Some(starred) = arguments.args.iter().find(|arg| arg.is_starred_expr()) { + return error( + &self.context, + "Starred arguments are not supported in `NewType` creation", + starred, + ); + } + + if arguments.args.len() != 2 { + return error( + &self.context, + format!( + "Wrong number of arguments in `NewType` creation, expected 2, found {}", + arguments.args.len() + ), + call_expr, + ); + } + + let name_param_ty = self.infer_expression(&arguments.args[0], TypeContext::default()); + + let Some(name) = name_param_ty.as_string_literal().map(|name| name.value(db)) else { + return error( + &self.context, + "The first argument to `NewType` must be a string literal", + call_expr, + ); + }; + + let ast::Expr::Name(ast::ExprName { + id: target_name, .. + }) = target + else { + return error( + &self.context, + "A `NewType` definition must be a simple variable assignment", + target, + ); + }; + + if name != target_name { + return error( + &self.context, + format_args!( + "The name of a `NewType` (`{name}`) must match \ + the name of the variable it is assigned to (`{target_name}`)" + ), + target, + ); + } + + // Inference of `tp` must be deferred, to avoid cycles. + self.deferred.insert(definition, self.multi_inference_state); + + Type::KnownInstance(KnownInstanceType::NewType(NewType::new( + db, + ast::name::Name::from(name), + definition, + None, + ))) + } + fn infer_assignment_deferred(&mut self, value: &ast::Expr) { - // Infer deferred bounds/constraints/defaults of a legacy TypeVar. - let ast::Expr::Call(ast::ExprCall { arguments, .. }) = value else { + // Infer deferred bounds/constraints/defaults of a legacy TypeVar / ParamSpec / NewType. + let ast::Expr::Call(ast::ExprCall { + func, arguments, .. + }) = value + else { return; }; + let func_ty = self + .try_expression_type(func) + .unwrap_or_else(|| self.infer_expression(func, TypeContext::default())); + let known_class = func_ty + .as_class_literal() + .and_then(|cls| cls.known(self.db())); + if let Some(KnownClass::NewType) = known_class { + self.infer_newtype_assignment_deferred(arguments); + return; + } for arg in arguments.args.iter().skip(1) { self.infer_type_expression(arg); } @@ -4508,7 +5235,39 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.infer_type_expression(&bound.value); } if let Some(default) = arguments.find_keyword("default") { - self.infer_type_expression(&default.value); + if let Some(KnownClass::ParamSpec) = known_class { + self.infer_paramspec_default(&default.value); + } else { + self.infer_type_expression(&default.value); + } + } + } + + // Infer the deferred base type of a NewType. + fn infer_newtype_assignment_deferred(&mut self, arguments: &ast::Arguments) { + match self.infer_type_expression(&arguments.args[1]) { + Type::NominalInstance(_) | Type::NewTypeInstance(_) => {} + // `Unknown` is likely to be the result of an unresolved import or a typo, which will + // already get a diagnostic, so don't pile on an extra diagnostic here. + Type::Dynamic(DynamicType::Unknown) => {} + other_type => { + if let Some(builder) = self + .context + .report_lint(&INVALID_NEWTYPE, &arguments.args[1]) + { + let mut diag = builder.into_diagnostic("invalid base for `typing.NewType`"); + diag.set_primary_message(format!("type `{}`", other_type.display(self.db()))); + if matches!(other_type, Type::ProtocolInstance(_)) { + diag.info("The base of a `NewType` is not allowed to be a protocol class."); + } else if matches!(other_type, Type::TypedDict(_)) { + diag.info("The base of a `NewType` is not allowed to be a `TypedDict`."); + } else { + diag.info( + "The base of a `NewType` must be a class type or another `NewType`.", + ); + } + } + } } } @@ -4836,12 +5595,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { is_async: _, } = for_statement; - self.infer_target(target, iter, |builder| { + self.infer_target(target, iter, |builder, tcx| { // TODO: `infer_for_statement_definition` reports a diagnostic if `iter_ty` isn't iterable // but only if the target is a name. We should report a diagnostic here if the target isn't a name: // `for a.x in not_iterable: ... builder - .infer_standalone_expression(iter, TypeContext::default()) + .infer_standalone_expression(iter, tcx) .iterate(builder.db()) .homogeneous_element_type(builder.db()) }); @@ -5229,6 +5988,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .as_module_literal() .is_some_and(|module| Some(self.file()) == module.module(self.db()).file(self.db())); + // Although it isn't the runtime semantics, we go to some trouble to prioritize a submodule + // over module `__getattr__`, because that's what other type checkers do. + let mut from_module_getattr = None; + // First try loading the requested attribute from the module. if !import_is_self_referential { if let PlaceAndQualifiers { @@ -5248,19 +6011,23 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { )); } } - self.add_declaration_with_binding( - alias.into(), - definition, - &DeclaredAndInferredType::MightBeDifferent { - declared_ty: TypeAndQualifiers { - inner: ty, - origin: TypeOrigin::Declared, - qualifiers, + if qualifiers.contains(TypeQualifiers::FROM_MODULE_GETATTR) { + from_module_getattr = Some((ty, qualifiers)); + } else { + self.add_declaration_with_binding( + alias.into(), + definition, + &DeclaredAndInferredType::MightBeDifferent { + declared_ty: TypeAndQualifiers { + inner: ty, + origin: TypeOrigin::Declared, + qualifiers, + }, + inferred_ty: ty, }, - inferred_ty: ty, - }, - ); - return; + ); + return; + } } } @@ -5300,6 +6067,24 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { return; } + // We've checked for a submodule, so now we can go ahead and use a type from module + // `__getattr__`. + if let Some((ty, qualifiers)) = from_module_getattr { + self.add_declaration_with_binding( + alias.into(), + definition, + &DeclaredAndInferredType::MightBeDifferent { + declared_ty: TypeAndQualifiers { + inner: ty, + origin: TypeOrigin::Declared, + qualifiers, + }, + inferred_ty: ty, + }, + ); + return; + } + self.add_unknown_declaration_with_binding(alias.into(), definition); if &alias.name == "*" { @@ -5331,6 +6116,99 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } + /// Infer the implicit local definition `x = ` that + /// `from .x.y import z` or `from whatever.thispackage.x.y` can introduce in `__init__.py(i)`. + /// + /// For the definition `z`, see [`TypeInferenceBuilder::infer_import_from_definition`]. + /// + /// The runtime semantic of this kind of statement is to introduce a variable in the global + /// scope of this module *the first time it's imported in the entire program*. This + /// implementation just blindly introduces a local variable wherever the `from..import` is + /// (if the imports actually resolve). + /// + /// That gap between the semantics and implementation are currently the responsibility of the + /// code that actually creates these kinds of Definitions (so blindly introducing a local + /// is all we need to be doing here). + fn infer_import_from_submodule_definition( + &mut self, + import_from: &ast::StmtImportFrom, + definition: Definition<'db>, + ) { + // Get this package's absolute module name by resolving `.`, and make sure it exists + let Ok(thispackage_name) = ModuleName::package_for_file(self.db(), self.file()) else { + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + return; + }; + let Some(module) = resolve_module(self.db(), &thispackage_name) else { + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + return; + }; + + // We have `from whatever.thispackage.x.y ...` or `from .x.y ...` + // and we want to extract `x` (to ultimately construct `whatever.thispackage.x`): + + // First we normalize to `whatever.thispackage.x.y` + let Some(final_part) = ModuleName::from_identifier_parts( + self.db(), + self.file(), + import_from.module.as_deref(), + import_from.level, + ) + .ok() + // `whatever.thispackage.x.y` => `x.y` + .and_then(|submodule_name| submodule_name.relative_to(&thispackage_name)) + // `x.y` => `x` + .and_then(|relative_submodule_name| { + relative_submodule_name + .components() + .next() + .and_then(ModuleName::new) + }) else { + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + return; + }; + + // `x` => `whatever.thispackage.x` + let mut full_submodule_name = thispackage_name.clone(); + full_submodule_name.extend(&final_part); + + // Try to actually resolve the import `whatever.thispackage.x` + if let Some(submodule_type) = self.module_type_from_name(&full_submodule_name) { + // Success, introduce a binding! + // + // We explicitly don't introduce a *declaration* because it's actual ok + // (and fairly common) to overwrite this import with a function or class + // and we don't want it to be a type error to do so. + self.add_binding(import_from.into(), definition, |_, _| submodule_type); + return; + } + + // That didn't work, try to produce diagnostics + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + + if !self.is_reachable(import_from) { + return; + } + + let Some(builder) = self + .context + .report_lint(&UNRESOLVED_IMPORT, AnyNodeRef::StmtImportFrom(import_from)) + else { + return; + }; + + let diagnostic = builder.into_diagnostic(format_args!( + "Module `{thispackage_name}` has no submodule `{final_part}`" + )); + + hint_if_stdlib_submodule_exists_on_other_versions( + self.db(), + diagnostic, + &full_submodule_name, + module, + ); + } + fn infer_return_statement(&mut self, ret: &ast::StmtReturn) { let tcx = if ret.value.is_some() { nearest_enclosing_function(self.db(), self.index, self.scope()) @@ -5539,27 +6417,160 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - /// Infer the argument types for multiple potential bindings and overloads. - fn infer_all_argument_types<'a>( + fn infer_and_check_argument_types( &mut self, ast_arguments: &ast::Arguments, - arguments: &mut CallArguments<'a, 'db>, - bindings: &Bindings<'db>, - ) { - debug_assert!( - ast_arguments.len() == arguments.len() - && arguments.len() == bindings.argument_forms().len() + argument_types: &mut CallArguments<'_, 'db>, + bindings: &mut Bindings<'db>, + call_expression_tcx: TypeContext<'db>, + ) -> Result<(), CallErrorKind> { + let db = self.db(); + + // If the type context is a union, attempt to narrow to a specific element. + let narrow_targets: &[_] = match call_expression_tcx.annotation { + // TODO: We could theoretically attempt to narrow to every element of + // the power set of this union. However, this leads to an exponential + // explosion of inference attempts, and is rarely needed in practice. + Some(Type::Union(union)) => union.elements(db), + _ => &[], + }; + + // We silence diagnostics until we successfully narrow to a specific type. + let mut speculated_bindings = bindings.clone(); + let was_in_multi_inference = self.context.set_multi_inference(true); + + let mut try_narrow = |narrowed_ty| { + let narrowed_tcx = TypeContext::new(Some(narrowed_ty)); + + // Attempt to infer the argument types using the narrowed type context. + self.infer_all_argument_types( + ast_arguments, + argument_types, + bindings, + narrowed_tcx, + MultiInferenceState::Ignore, + ); + + // Ensure the argument types match their annotated types. + if speculated_bindings + .check_types_impl( + db, + argument_types, + narrowed_tcx, + &self.dataclass_field_specifiers, + ) + .is_err() + { + return None; + } + + // Ensure the inferred return type is assignable to the (narrowed) declared type. + // + // TODO: Checking assignability against the full declared type could help avoid + // cases where the constraint solver is not smart enough to solve complex unions. + // We should see revisit this after the new constraint solver is implemented. + if speculated_bindings + .callable_type() + .synthesized_constructor_return_ty(db) + .is_none() + && !speculated_bindings + .return_type(db) + .is_assignable_to(db, narrowed_ty) + { + return None; + } + + // Successfully narrowed to an element of the union. + // + // If necessary, infer the argument types again with diagnostics enabled. + if !was_in_multi_inference { + self.context.set_multi_inference(was_in_multi_inference); + + self.infer_all_argument_types( + ast_arguments, + argument_types, + bindings, + narrowed_tcx, + MultiInferenceState::Intersect, + ); + } + + Some(bindings.check_types_impl( + db, + argument_types, + narrowed_tcx, + &self.dataclass_field_specifiers, + )) + }; + + // Prefer the declared type of generic classes. + for narrowed_ty in narrow_targets + .iter() + .filter(|ty| ty.class_specialization(db).is_some()) + { + if let Some(result) = try_narrow(*narrowed_ty) { + return result; + } + } + + // Try the remaining elements of the union. + // + // TODO: We could also attempt an inference without type context, but this + // leads to similar performance issues. + for narrowed_ty in narrow_targets + .iter() + .filter(|ty| ty.class_specialization(db).is_none()) + { + if let Some(result) = try_narrow(*narrowed_ty) { + return result; + } + } + + // Re-enable diagnostics, and infer against the entire union as a fallback. + self.context.set_multi_inference(was_in_multi_inference); + + self.infer_all_argument_types( + ast_arguments, + argument_types, + bindings, + call_expression_tcx, + MultiInferenceState::Intersect, ); + bindings.check_types_impl( + db, + argument_types, + call_expression_tcx, + &self.dataclass_field_specifiers, + ) + } + + /// Infer the argument types for all bindings. + /// + /// Note that this method may infer the type of a given argument expression multiple times with + /// distinct type context. The provided `MultiInferenceState` can be used to dictate multi-inference + /// behavior. + fn infer_all_argument_types( + &mut self, + ast_arguments: &ast::Arguments, + arguments_types: &mut CallArguments<'_, 'db>, + bindings: &Bindings<'db>, + call_expression_tcx: TypeContext<'db>, + multi_inference_state: MultiInferenceState, + ) { + debug_assert_eq!(ast_arguments.len(), arguments_types.len()); + debug_assert_eq!(arguments_types.len(), bindings.argument_forms().len()); + + let db = self.db(); let iter = itertools::izip!( 0.., - arguments.iter_mut(), + arguments_types.iter_mut(), bindings.argument_forms().iter().copied(), ast_arguments.arguments_source_order() ); let overloads_with_binding = bindings - .into_iter() + .iter() .filter_map(|binding| { match binding.matching_overload_index() { MatchingOverloadIndex::Single(_) | MatchingOverloadIndex::Multiple(_) => { @@ -5578,7 +6589,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { }, } }) - .flatten(); + .flatten() + .collect::>(); + + let old_multi_inference_state = self.set_multi_inference_state(multi_inference_state); for (argument_index, (_, argument_type), argument_form, ast_argument) in iter { let ast_argument = match ast_argument { @@ -5600,7 +6614,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } // Retrieve the parameter type for the current argument in a given overload and its binding. - let db = self.db(); let parameter_type = |overload: &Binding<'db>, binding: &CallableBinding<'db>| { let argument_index = if binding.bound_type.is_some() { argument_index + 1 @@ -5613,10 +6626,25 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { return None; }; - let parameter_type = + let mut parameter_type = overload.signature.parameters()[*parameter_index].annotated_type()?; - // TODO: For now, skip any parameter annotations that mention any typevars. There + // If this is a generic call, attempt to specialize the parameter type using the + // declared type context, if provided. + if let Some(generic_context) = overload.signature.generic_context + && let Some(return_ty) = overload.signature.return_ty + && let Some(declared_return_ty) = call_expression_tcx.annotation + { + let mut builder = + SpecializationBuilder::new(db, generic_context.inferable_typevars(db)); + + let _ = builder.infer(return_ty, declared_return_ty); + let specialization = builder.build(generic_context, call_expression_tcx); + + parameter_type = parameter_type.apply_specialization(db, specialization); + } + + // TODO: For now, skip any parameter annotations that still mention any typevars. There // are two issues: // // First, if we include those typevars in the type context that we use to infer the @@ -5647,26 +6675,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // If there is only a single binding and overload, we can infer the argument directly with // the unique parameter type annotation. - if let Ok((overload, binding)) = overloads_with_binding.clone().exactly_one() { - self.infer_expression_impl( + if let Ok((overload, binding)) = overloads_with_binding.iter().exactly_one() { + *argument_type = Some(self.infer_expression( ast_argument, TypeContext::new(parameter_type(overload, binding)), - ); + )); } else { - // Otherwise, each type is a valid independent inference of the given argument, and we may - // require different permutations of argument types to correctly perform argument expansion - // during overload evaluation, so we take the intersection of all the types we inferred for - // each argument. - // - // Note that this applies to all nested expressions within each argument. - let old_multi_inference_state = mem::replace( - &mut self.multi_inference_state, - MultiInferenceState::Intersect, - ); - // We perform inference once without any type context, emitting any diagnostics that are unrelated // to bidirectional type inference. - self.infer_expression_impl(ast_argument, TypeContext::default()); + *argument_type = Some(self.infer_expression(ast_argument, TypeContext::default())); // We then silence any diagnostics emitted during multi-inference, as the type context is only // used as a hint to infer a more assignable argument type, and should not lead to diagnostics @@ -5675,24 +6692,28 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // Infer the type of each argument once with each distinct parameter type as type context. let parameter_types = overloads_with_binding - .clone() + .iter() .filter_map(|(overload, binding)| parameter_type(overload, binding)) .collect::>(); for parameter_type in parameter_types { - self.infer_expression_impl( - ast_argument, - TypeContext::new(Some(parameter_type)), - ); + let inferred_ty = + self.infer_expression(ast_argument, TypeContext::new(Some(parameter_type))); + + // Each type is a valid independent inference of the given argument, and we may require different + // permutations of argument types to correctly perform argument expansion during overload evaluation, + // so we take the intersection of all the types we inferred for each argument. + *argument_type = argument_type + .map(|current| IntersectionType::from_elements(db, [inferred_ty, current])) + .or(Some(inferred_ty)); } - // Restore the multi-inference state. - self.multi_inference_state = old_multi_inference_state; + // Re-enable diagnostics. self.context.set_multi_inference(was_in_multi_inference); } - - *argument_type = self.try_expression_type(ast_argument); } + + self.set_multi_inference_state(old_multi_inference_state); } fn infer_argument_type( @@ -5715,15 +6736,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { expression.map(|expr| self.infer_expression(expr, tcx)) } - fn get_or_infer_expression( - &mut self, - expression: &ast::Expr, - tcx: TypeContext<'db>, - ) -> Type<'db> { - self.try_expression_type(expression) - .unwrap_or_else(|| self.infer_expression(expression, tcx)) - } - #[track_caller] fn infer_expression(&mut self, expression: &ast::Expr, tcx: TypeContext<'db>) -> Type<'db> { debug_assert!( @@ -5809,9 +6821,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ast::Expr::Set(set) => self.infer_set_expression(set, tcx), ast::Expr::Dict(dict) => self.infer_dict_expression(dict, tcx), ast::Expr::Generator(generator) => self.infer_generator_expression(generator), - ast::Expr::ListComp(listcomp) => self.infer_list_comprehension_expression(listcomp), - ast::Expr::DictComp(dictcomp) => self.infer_dict_comprehension_expression(dictcomp), - ast::Expr::SetComp(setcomp) => self.infer_set_comprehension_expression(setcomp), + ast::Expr::ListComp(listcomp) => { + self.infer_list_comprehension_expression(listcomp, tcx) + } + ast::Expr::DictComp(dictcomp) => { + self.infer_dict_comprehension_expression(dictcomp, tcx) + } + ast::Expr::SetComp(setcomp) => self.infer_set_comprehension_expression(setcomp, tcx), ast::Expr::Name(name) => self.infer_name_expression(name), ast::Expr::Attribute(attribute) => self.infer_attribute_expression(attribute), ast::Expr::UnaryOp(unary_op) => self.infer_unary_expression(unary_op), @@ -5858,11 +6874,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let db = self.db(); match self.multi_inference_state { + MultiInferenceState::Ignore => {} + MultiInferenceState::Panic => { let previous = self.expressions.insert(expression.into(), ty); assert_eq!(previous, None); } + MultiInferenceState::Overwrite => { + self.expressions.insert(expression.into(), ty); + } + MultiInferenceState::Intersect => { self.expressions .entry(expression.into()) @@ -6071,7 +7093,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } = list; let elts = elts.iter().map(|elt| [Some(elt)]); - self.infer_collection_literal(elts, tcx, KnownClass::List) + let infer_elt_ty = |builder: &mut Self, elt, tcx| builder.infer_expression(elt, tcx); + self.infer_collection_literal(elts, tcx, infer_elt_ty, KnownClass::List) .unwrap_or_else(|| { KnownClass::List.to_specialized_instance(self.db(), [Type::unknown()]) }) @@ -6085,7 +7108,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } = set; let elts = elts.iter().map(|elt| [Some(elt)]); - self.infer_collection_literal(elts, tcx, KnownClass::Set) + let infer_elt_ty = |builder: &mut Self, elt, tcx| builder.infer_expression(elt, tcx); + self.infer_collection_literal(elts, tcx, infer_elt_ty, KnownClass::Set) .unwrap_or_else(|| { KnownClass::Set.to_specialized_instance(self.db(), [Type::unknown()]) }) @@ -6098,12 +7122,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { items, } = dict; + let mut item_types = FxHashMap::default(); + // Validate `TypedDict` dictionary literal assignments. if let Some(tcx) = tcx.annotation && let Some(typed_dict) = tcx .filter_union(self.db(), Type::is_typed_dict) .as_typed_dict() - && let Some(ty) = self.infer_typed_dict_expression(dict, typed_dict) + && let Some(ty) = self.infer_typed_dict_expression(dict, typed_dict, &mut item_types) { return ty; } @@ -6119,7 +7145,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .iter() .map(|item| [item.key.as_ref(), Some(&item.value)]); - self.infer_collection_literal(items, tcx, KnownClass::Dict) + // Avoid inferring the items multiple times if we already attempted to infer the + // dictionary literal as a `TypedDict`. This also allows us to infer using the + // type context of the expected `TypedDict` field. + let infer_elt_ty = |builder: &mut Self, elt: &ast::Expr, tcx| { + item_types + .get(&elt.node_index().load()) + .copied() + .unwrap_or_else(|| builder.infer_expression(elt, tcx)) + }; + + self.infer_collection_literal(items, tcx, infer_elt_ty, KnownClass::Dict) .unwrap_or_else(|| { KnownClass::Dict .to_specialized_instance(self.db(), [Type::unknown(), Type::unknown()]) @@ -6130,6 +7166,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { &mut self, dict: &ast::ExprDict, typed_dict: TypedDictType<'db>, + item_types: &mut FxHashMap>, ) -> Option> { let ast::ExprDict { range: _, @@ -6141,30 +7178,43 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { for item in items { let key_ty = self.infer_optional_expression(item.key.as_ref(), TypeContext::default()); + if let Some((key, key_ty)) = item.key.as_ref().zip(key_ty) { + item_types.insert(key.node_index().load(), key_ty); + } - if let Some(Type::StringLiteral(key)) = key_ty + let value_ty = if let Some(Type::StringLiteral(key)) = key_ty && let Some(field) = typed_dict_items.get(key.value(self.db())) { - self.infer_expression(&item.value, TypeContext::new(Some(field.declared_ty))); + self.infer_expression(&item.value, TypeContext::new(Some(field.declared_ty))) } else { - self.infer_expression(&item.value, TypeContext::default()); - } + self.infer_expression(&item.value, TypeContext::default()) + }; + + item_types.insert(item.value.node_index().load(), value_ty); } validate_typed_dict_dict_literal(&self.context, typed_dict, dict, dict.into(), |expr| { - self.expression_type(expr) + item_types + .get(&expr.node_index().load()) + .copied() + .unwrap_or(Type::unknown()) }) .ok() .map(|_| Type::TypedDict(typed_dict)) } // Infer the type of a collection literal expression. - fn infer_collection_literal<'expr, const N: usize>( + fn infer_collection_literal<'expr, const N: usize, F, I>( &mut self, - elts: impl Iterator; N]>, + elts: I, tcx: TypeContext<'db>, + mut infer_elt_expression: F, collection_class: KnownClass, - ) -> Option> { + ) -> Option> + where + I: Iterator; N]>, + F: FnMut(&mut Self, &'expr ast::Expr, TypeContext<'db>) -> Type<'db>, + { // Extract the type variable `T` from `list[T]` in typeshed. let elt_tys = |collection_class: KnownClass| { let class_literal = collection_class.try_to_class_literal(self.db())?; @@ -6180,7 +7230,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // Infer the element types without type context, and fallback to unknown for // custom typesheds. for elt in elts.flatten().flatten() { - self.get_or_infer_expression(elt, TypeContext::default()); + infer_elt_expression(self, elt, TypeContext::default()); } return None; @@ -6235,7 +7285,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { for elts in elts { // An unpacking expression for a dictionary. if let &[None, Some(value)] = elts.as_slice() { - let inferred_value_ty = self.get_or_infer_expression(value, TypeContext::default()); + let inferred_value_ty = infer_elt_expression(self, value, TypeContext::default()); // Merge the inferred type of the nested dictionary. if let Some(specialization) = @@ -6258,7 +7308,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { { let Some(elt) = elt else { continue }; - let inferred_elt_ty = self.get_or_infer_expression(elt, elt_tcx); + let inferred_elt_ty = infer_elt_expression(self, elt, elt_tcx); // Simplify the inference based on the declared type of the element. if let Some(elt_tcx) = elt_tcx.annotation { @@ -6312,52 +7362,121 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ) } - fn infer_list_comprehension_expression(&mut self, listcomp: &ast::ExprListComp) -> Type<'db> { + /// Return a specialization of the collection class (list, dict, set) based on the type context and the inferred + /// element / key-value types from the comprehension expression. + fn infer_comprehension_specialization( + &self, + collection_class: KnownClass, + inferred_element_types: &[Type<'db>], + tcx: TypeContext<'db>, + ) -> Type<'db> { + // Remove any union elements of that are unrelated to the collection type. + let tcx = tcx.map(|annotation| { + annotation.filter_disjoint_elements( + self.db(), + collection_class.to_instance(self.db()), + InferableTypeVars::None, + ) + }); + + if let Some(annotated_element_types) = tcx + .known_specialization(self.db(), collection_class) + .map(|specialization| specialization.types(self.db())) + && annotated_element_types + .iter() + .zip(inferred_element_types.iter()) + .all(|(annotated, inferred)| inferred.is_assignable_to(self.db(), *annotated)) + { + collection_class + .to_specialized_instance(self.db(), annotated_element_types.iter().copied()) + } else { + collection_class.to_specialized_instance( + self.db(), + inferred_element_types.iter().map(|ty| { + UnionType::from_elements( + self.db(), + [ + ty.promote_literals(self.db(), TypeContext::default()), + Type::unknown(), + ], + ) + }), + ) + } + } + + fn infer_list_comprehension_expression( + &mut self, + listcomp: &ast::ExprListComp, + tcx: TypeContext<'db>, + ) -> Type<'db> { let ast::ExprListComp { range: _, node_index: _, - elt: _, + elt, generators, } = listcomp; self.infer_first_comprehension_iter(generators); - KnownClass::List - .to_specialized_instance(self.db(), [todo_type!("list comprehension element type")]) + let scope_id = self + .index + .node_scope(NodeWithScopeRef::ListComprehension(listcomp)); + let scope = scope_id.to_scope_id(self.db(), self.file()); + let inference = infer_scope_types(self.db(), scope); + let element_type = inference.expression_type(elt.as_ref()); + + self.infer_comprehension_specialization(KnownClass::List, &[element_type], tcx) } - fn infer_dict_comprehension_expression(&mut self, dictcomp: &ast::ExprDictComp) -> Type<'db> { + fn infer_dict_comprehension_expression( + &mut self, + dictcomp: &ast::ExprDictComp, + tcx: TypeContext<'db>, + ) -> Type<'db> { let ast::ExprDictComp { range: _, node_index: _, - key: _, - value: _, + key, + value, generators, } = dictcomp; self.infer_first_comprehension_iter(generators); - KnownClass::Dict.to_specialized_instance( - self.db(), - [ - todo_type!("dict comprehension key type"), - todo_type!("dict comprehension value type"), - ], - ) + let scope_id = self + .index + .node_scope(NodeWithScopeRef::DictComprehension(dictcomp)); + let scope = scope_id.to_scope_id(self.db(), self.file()); + let inference = infer_scope_types(self.db(), scope); + let key_type = inference.expression_type(key.as_ref()); + let value_type = inference.expression_type(value.as_ref()); + + self.infer_comprehension_specialization(KnownClass::Dict, &[key_type, value_type], tcx) } - fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> { + fn infer_set_comprehension_expression( + &mut self, + setcomp: &ast::ExprSetComp, + tcx: TypeContext<'db>, + ) -> Type<'db> { let ast::ExprSetComp { range: _, node_index: _, - elt: _, + elt, generators, } = setcomp; self.infer_first_comprehension_iter(generators); - KnownClass::Set - .to_specialized_instance(self.db(), [todo_type!("set comprehension element type")]) + let scope_id = self + .index + .node_scope(NodeWithScopeRef::SetComprehension(setcomp)); + let scope = scope_id.to_scope_id(self.db(), self.file()); + let inference = infer_scope_types(self.db(), scope); + let element_type = inference.expression_type(elt.as_ref()); + + self.infer_comprehension_specialization(KnownClass::Set, &[element_type], tcx) } fn infer_generator_expression_scope(&mut self, generator: &ast::ExprGenerator) { @@ -6432,7 +7551,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { is_async: _, } = comprehension; - self.infer_target(target, iter, |builder| { + self.infer_target(target, iter, |builder, tcx| { // TODO: `infer_comprehension_definition` reports a diagnostic if `iter_ty` isn't iterable // but only if the target is a name. We should report a diagnostic here if the target isn't a name: // `[... for a.x in not_iterable] @@ -6440,11 +7559,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { infer_same_file_expression_type( builder.db(), builder.index.expression(iter), - TypeContext::default(), + tcx, builder.module(), ) } else { - builder.infer_standalone_expression(iter, TypeContext::default()) + builder.infer_standalone_expression(iter, tcx) } .iterate(builder.db()) .homogeneous_element_type(builder.db()) @@ -6726,6 +7845,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { first_arg.into(), first_arg.into(), Type::TypedDict(typed_dict_ty), + None, key_ty, &items, ); @@ -6808,29 +7928,55 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .to_class_type(self.db()) .is_none_or(|enum_class| !class.is_subclass_of(self.db(), enum_class)) { - if matches!( - class.known(self.db()), - Some(KnownClass::TypeVar | KnownClass::ExtensionsTypeVar) - ) { - // Inference of correctly-placed `TypeVar` definitions is done in - // `TypeInferenceBuilder::infer_legacy_typevar`, and doesn't use the full - // call-binding machinery. If we reach here, it means that someone is trying to - // instantiate a `typing.TypeVar` in an invalid context. - if let Some(builder) = self - .context - .report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression) - { - builder.into_diagnostic( - "A `TypeVar` definition must be a simple variable assignment", - ); + // Inference of correctly-placed `TypeVar`, `ParamSpec`, and `NewType` definitions + // is done in `infer_legacy_typevar`, `infer_paramspec`, and + // `infer_newtype_expression`, and doesn't use the full call-binding machinery. If + // we reach here, it means that someone is trying to instantiate one of these in an + // invalid context. + match class.known(self.db()) { + Some(KnownClass::TypeVar | KnownClass::ExtensionsTypeVar) => { + if let Some(builder) = self + .context + .report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression) + { + builder.into_diagnostic( + "A `TypeVar` definition must be a simple variable assignment", + ); + } } + Some(KnownClass::ParamSpec) => { + if let Some(builder) = self + .context + .report_lint(&INVALID_PARAMSPEC, call_expression) + { + builder.into_diagnostic( + "A `ParamSpec` definition must be a simple variable assignment", + ); + } + } + Some(KnownClass::NewType) => { + if let Some(builder) = + self.context.report_lint(&INVALID_NEWTYPE, call_expression) + { + builder.into_diagnostic( + "A `NewType` definition must be a simple variable assignment", + ); + } + } + _ => {} } let db = self.db(); let infer_call_arguments = |bindings: Option>| { if let Some(bindings) = bindings { let bindings = bindings.match_parameters(self.db(), &call_arguments); - self.infer_all_argument_types(arguments, &mut call_arguments, &bindings); + self.infer_all_argument_types( + arguments, + &mut call_arguments, + &bindings, + tcx, + MultiInferenceState::Intersect, + ); } else { let argument_forms = vec![Some(ParameterForm::Value); call_arguments.len()]; self.infer_argument_types(arguments, &mut call_arguments, &argument_forms); @@ -6848,10 +7994,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - let bindings = callable_type + let mut bindings = callable_type .bindings(self.db()) .match_parameters(self.db(), &call_arguments); - self.infer_all_argument_types(arguments, &mut call_arguments, &bindings); + + let bindings_result = + self.infer_and_check_argument_types(arguments, &mut call_arguments, &mut bindings, tcx); // Validate `TypedDict` constructor calls after argument type inference if let Some(class_literal) = callable_type.as_class_literal() { @@ -6869,14 +8017,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - let mut bindings = match bindings.check_types( - self.db(), - &call_arguments, - &tcx, - &self.dataclass_field_specifiers[..], - ) { - Ok(bindings) => bindings, - Err(CallError(_, bindings)) => { + let mut bindings = match bindings_result { + Ok(()) => bindings, + Err(_) => { bindings.report_diagnostics(&self.context, call_expression.into()); return bindings.return_type(self.db()); } @@ -7340,6 +8483,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let mut nonlocal_union_builder = UnionBuilder::new(db); let mut found_some_definition = false; for (enclosing_scope_file_id, _) in self.index.ancestor_scopes(file_scope_id).skip(1) { + // If the current enclosing scope is global, no place lookup is performed here, + // instead falling back to the module's explicit global lookup below. + if enclosing_scope_file_id.is_global() { + break; + } + // Class scopes are not visible to nested scopes, and we need to handle global // scope differently (because an unbound name there falls back to builtins), so // check only function-like scopes. @@ -7370,6 +8519,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // registering eager bindings for nested scopes that are actually eager, and for // enclosing scopes that actually contain bindings that we should use when // resolving the reference.) + let mut eagerly_resolved_place = None; if !self.is_deferred() { match self.index.enclosing_snapshot( enclosing_scope_file_id, @@ -7381,6 +8531,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { enclosing_scope_file_id, ConstraintKey::NarrowingConstraint(constraint), )); + // If the current scope is eager, it is certain that the place is undefined in the current scope. + // Do not call the `place` query below as a fallback. + if scope.scope(db).is_eager() { + eagerly_resolved_place = Some(Place::Undefined.into()); + } } EnclosingSnapshotResult::FoundBindings(bindings) => { let place = place_from_bindings(db, bindings).map_type(|ty| { @@ -7442,18 +8597,20 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // `nonlocal` variable, but we don't enforce that here. See the // `ast::Stmt::AnnAssign` handling in `SemanticIndexBuilder::visit_stmt`.) if enclosing_place.is_bound() || enclosing_place.is_declared() { - let local_place_and_qualifiers = place( - db, - enclosing_scope_id, - place_expr, - ConsideredDefinitions::AllReachable, - ) - .map_type(|ty| { - self.narrow_place_with_applicable_constraints( + let local_place_and_qualifiers = eagerly_resolved_place.unwrap_or_else(|| { + place( + db, + enclosing_scope_id, place_expr, - ty, - &constraint_keys, + ConsideredDefinitions::AllReachable, ) + .map_type(|ty| { + self.narrow_place_with_applicable_constraints( + place_expr, + ty, + &constraint_keys, + ) + }) }); // We could have `Place::Undefined` here, despite the checks above, for example if // this scope contains a `del` statement but no binding or declaration. @@ -7496,6 +8653,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { FileScopeId::global(), ConstraintKey::NarrowingConstraint(constraint), )); + // Reaching here means that no bindings are found in any scope. + // Since `explicit_global_symbol` may return a cycle initial value, we return `Place::Undefined` here. + return Place::Undefined.into(); } EnclosingSnapshotResult::FoundBindings(bindings) => { let place = place_from_bindings(db, bindings).map_type(|ty| { @@ -7888,7 +9048,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypeIs(_) - | Type::TypedDict(_), + | Type::TypedDict(_) + | Type::NewTypeInstance(_), ) => { let unary_dunder_method = match op { ast::UnaryOp::Invert => "__invert__", @@ -7982,6 +9143,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { emitted_division_by_zero_diagnostic = self.check_division_by_zero(node, op, left_ty); } + let pep_604_unions_allowed = || { + Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 + || self.file().is_stub(self.db()) + || self.scope().scope(self.db()).in_type_checking_block() + }; + match (left_ty, right_ty, op) { (Type::Union(lhs_union), rhs, _) => lhs_union.try_map(self.db(), |lhs_element| { self.infer_binary_expression_type( @@ -8031,10 +9198,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ( todo @ Type::Dynamic( - DynamicType::Todo(_) - | DynamicType::TodoPEP695ParamSpec - | DynamicType::TodoUnpack - | DynamicType::TodoTypeAlias, + DynamicType::Todo(_) | DynamicType::TodoUnpack | DynamicType::TodoTypeAlias, ), _, _, @@ -8042,10 +9206,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | ( _, todo @ Type::Dynamic( - DynamicType::Todo(_) - | DynamicType::TodoPEP695ParamSpec - | DynamicType::TodoUnpack - | DynamicType::TodoTypeAlias, + DynamicType::Todo(_) | DynamicType::TodoUnpack | DynamicType::TodoTypeAlias, ), _, ) => Some(todo), @@ -8228,20 +9389,94 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ))) } - // Special-case `X | Y` with `X` and `Y` instances of `type` to produce a `types.UnionType` instance, in order to - // overwrite the typeshed return type for `type.__or__`, which would result in `types.UnionType | X`. We currently - // do this to avoid false positives when a legacy type alias like `IntOrStr = int | str` is later used in a type - // expression, because `types.UnionType` will result in a `@Todo` type, while `types.UnionType | ` does - // not. - // - // TODO: Remove this special case once we add support for legacy type aliases. + // PEP 604-style union types using the `|` operator. ( - Type::ClassLiteral(..) | Type::SubclassOf(..) | Type::GenericAlias(..), - Type::ClassLiteral(..) | Type::SubclassOf(..) | Type::GenericAlias(..), + Type::ClassLiteral(..) + | Type::SubclassOf(..) + | Type::GenericAlias(..) + | Type::SpecialForm(_) + | Type::KnownInstance( + KnownInstanceType::UnionType(_) + | KnownInstanceType::Literal(_) + | KnownInstanceType::Annotated(_), + ), + Type::ClassLiteral(..) + | Type::SubclassOf(..) + | Type::GenericAlias(..) + | Type::SpecialForm(_) + | Type::KnownInstance( + KnownInstanceType::UnionType(_) + | KnownInstanceType::Literal(_) + | KnownInstanceType::Annotated(_), + ), ast::Operator::BitOr, - ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => { - Some(KnownClass::UnionType.to_instance(self.db())) + ) if pep_604_unions_allowed() => { + if left_ty.is_equivalent_to(self.db(), right_ty) { + Some(left_ty) + } else { + Some(Type::KnownInstance(KnownInstanceType::UnionType( + InternedTypes::from_elements( + self.db(), + [left_ty, right_ty], + InferredAs::ValueExpression, + ), + ))) + } } + ( + Type::ClassLiteral(..) + | Type::SubclassOf(..) + | Type::GenericAlias(..) + | Type::KnownInstance(..) + | Type::SpecialForm(..), + Type::NominalInstance(instance), + ast::Operator::BitOr, + ) + | ( + Type::NominalInstance(instance), + Type::ClassLiteral(..) + | Type::SubclassOf(..) + | Type::GenericAlias(..) + | Type::KnownInstance(..) + | Type::SpecialForm(..), + ast::Operator::BitOr, + ) if pep_604_unions_allowed() + && instance.has_known_class(self.db(), KnownClass::NoneType) => + { + Some(Type::KnownInstance(KnownInstanceType::UnionType( + InternedTypes::from_elements( + self.db(), + [left_ty, right_ty], + InferredAs::ValueExpression, + ), + ))) + } + + // We avoid calling `type.__(r)or__`, as typeshed annotates these methods as + // accepting `Any` (since typeforms are inexpressable in the type system currently). + // This means that many common errors would not be caught if we fell back to typeshed's stubs here. + // + // Note that if a class had a custom metaclass that overrode `__(r)or__`, we would also ignore + // that custom method as we'd take one of the earlier branches. + // This seems like it's probably rare enough that it's acceptable, however. + ( + Type::ClassLiteral(..) | Type::GenericAlias(..) | Type::SubclassOf(..), + _, + ast::Operator::BitOr, + ) + | ( + _, + Type::ClassLiteral(..) | Type::GenericAlias(..) | Type::SubclassOf(..), + ast::Operator::BitOr, + ) if pep_604_unions_allowed() => Type::try_call_bin_op_with_policy( + self.db(), + left_ty, + ast::Operator::BitOr, + right_ty, + MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK, + ) + .ok() + .map(|binding| binding.return_type(self.db())), // We've handled all of the special cases that we support for literals, so we need to // fall back on looking for dunder methods on one of the operand types. @@ -8274,7 +9509,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypeIs(_) - | Type::TypedDict(_), + | Type::TypedDict(_) + | Type::NewTypeInstance(_), Type::FunctionLiteral(_) | Type::BooleanLiteral(_) | Type::Callable(..) @@ -8303,7 +9539,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypeIs(_) - | Type::TypedDict(_), + | Type::TypedDict(_) + | Type::NewTypeInstance(_), op, ) => Type::try_call_bin_op(self.db(), left_ty, op, right_ty) .map(|outcome| outcome.return_type(self.db())) @@ -9279,14 +10516,23 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } fn infer_subscript_load(&mut self, subscript: &ast::ExprSubscript) -> Type<'db> { + let value_ty = self.infer_expression(&subscript.value, TypeContext::default()); + self.infer_subscript_load_impl(value_ty, subscript) + } + + fn infer_subscript_load_impl( + &mut self, + value_ty: Type<'db>, + subscript: &ast::ExprSubscript, + ) -> Type<'db> { let ast::ExprSubscript { range: _, node_index: _, - value, + value: _, slice, ctx, } = subscript; - let value_ty = self.infer_expression(value, TypeContext::default()); + let mut constraint_keys = vec![]; // If `value` is a valid reference, we attempt type narrowing by assignment. @@ -9312,37 +10558,154 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Type::from(tuple.to_class_type(db)) }; - // HACK ALERT: If we are subscripting a generic class, short-circuit the rest of the - // subscript inference logic and treat this as an explicit specialization. - // TODO: Move this logic into a custom callable, and update `find_name_in_mro` to return - // this callable as the `__class_getitem__` method on `type`. That probably requires - // updating all of the subscript logic below to use custom callables for all of the _other_ - // special cases, too. - if let Type::ClassLiteral(class) = value_ty { - if class.is_tuple(self.db()) { + match value_ty { + Type::ClassLiteral(class) => { + // HACK ALERT: If we are subscripting a generic class, short-circuit the rest of the + // subscript inference logic and treat this as an explicit specialization. + // TODO: Move this logic into a custom callable, and update `find_name_in_mro` to return + // this callable as the `__class_getitem__` method on `type`. That probably requires + // updating all of the subscript logic below to use custom callables for all of the _other_ + // special cases, too. + if class.is_tuple(self.db()) { + return tuple_generic_alias(self.db(), self.infer_tuple_type_expression(slice)); + } + if let Some(generic_context) = class.generic_context(self.db()) { + return self.infer_explicit_class_specialization( + subscript, + value_ty, + class, + generic_context, + ); + } + } + Type::KnownInstance(KnownInstanceType::TypeAliasType(type_alias)) => { + if let Some(generic_context) = type_alias.generic_context(self.db()) { + return self.infer_explicit_type_alias_specialization( + subscript, + value_ty, + type_alias, + generic_context, + ); + } + } + Type::SpecialForm(SpecialFormType::Tuple) => { return tuple_generic_alias(self.db(), self.infer_tuple_type_expression(slice)); } - if let Some(generic_context) = class.generic_context(self.db()) { - return self.infer_explicit_class_specialization( - subscript, - value_ty, - class, - generic_context, - ); + Type::SpecialForm(SpecialFormType::Literal) => { + match self.infer_literal_parameter_type(slice) { + Ok(result) => { + return Type::KnownInstance(KnownInstanceType::Literal(InternedType::new( + self.db(), + result, + ))); + } + Err(nodes) => { + for node in nodes { + let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, node) + else { + continue; + }; + builder.into_diagnostic( + "Type arguments for `Literal` must be `None`, \ + a literal value (int, bool, str, or bytes), or an enum member", + ); + } + return Type::unknown(); + } + } } - } - if let Type::SpecialForm(SpecialFormType::Tuple) = value_ty { - return tuple_generic_alias(self.db(), self.infer_tuple_type_expression(slice)); - } - if let Type::KnownInstance(KnownInstanceType::TypeAliasType(type_alias)) = value_ty { - if let Some(generic_context) = type_alias.generic_context(self.db()) { - return self.infer_explicit_type_alias_specialization( - subscript, - value_ty, - type_alias, - generic_context, - ); + Type::SpecialForm(SpecialFormType::Annotated) => { + let ast::Expr::Tuple(ast::ExprTuple { + elts: ref arguments, + .. + }) = **slice + else { + report_invalid_arguments_to_annotated(&self.context, subscript); + + return self.infer_expression(slice, TypeContext::default()); + }; + + if arguments.len() < 2 { + report_invalid_arguments_to_annotated(&self.context, subscript); + } + + let [type_expr, metadata @ ..] = &arguments[..] else { + for argument in arguments { + self.infer_expression(argument, TypeContext::default()); + } + self.store_expression_type(slice, Type::unknown()); + return Type::unknown(); + }; + + for element in metadata { + self.infer_expression(element, TypeContext::default()); + } + + let ty = self.infer_expression(type_expr, TypeContext::default()); + + return Type::KnownInstance(KnownInstanceType::Annotated(InternedType::new( + self.db(), + ty, + ))); } + Type::SpecialForm(SpecialFormType::Optional) => { + if matches!(**slice, ast::Expr::Tuple(_)) { + if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) { + builder.into_diagnostic(format_args!( + "`typing.Optional` requires exactly one argument" + )); + } + } + + let ty = self.infer_expression(slice, TypeContext::default()); + + // `Optional[None]` is equivalent to `None`: + if ty.is_none(self.db()) { + return ty; + } + + return Type::KnownInstance(KnownInstanceType::UnionType( + InternedTypes::from_elements( + self.db(), + [ty, Type::none(self.db())], + InferredAs::ValueExpression, + ), + )); + } + Type::SpecialForm(SpecialFormType::Union) => { + let db = self.db(); + + match **slice { + ast::Expr::Tuple(ref tuple) => { + let mut elements = tuple + .elts + .iter() + .map(|elt| self.infer_type_expression(elt)) + .peekable(); + + let is_empty = elements.peek().is_none(); + let union_type = Type::KnownInstance(KnownInstanceType::UnionType( + InternedTypes::from_elements(db, elements, InferredAs::TypeExpression), + )); + + if is_empty { + if let Some(builder) = + self.context.report_lint(&INVALID_TYPE_FORM, subscript) + { + builder.into_diagnostic( + "`typing.Union` requires at least one type argument", + ); + } + } + + return union_type; + } + _ => { + return self.infer_expression(slice, TypeContext::default()); + } + } + } + _ => {} } let slice_ty = self.infer_expression(slice, TypeContext::default()); @@ -9423,8 +10786,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .check_types( self.db(), &call_argument_types, - &TypeContext::default(), - &self.dataclass_field_specifiers[..], + TypeContext::default(), + &self.dataclass_field_specifiers, ) { Ok(bindings) => bindings, Err(CallError(_, bindings)) => { @@ -9709,6 +11072,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { value_node.into(), slice_node.into(), value_ty, + None, slice_ty, &typed_dict.items(db), ); @@ -10155,16 +11519,22 @@ enum MultiInferenceState { #[default] Panic, + /// Overwrite the previously inferred value. + /// + /// Note that `Overwrite` does not interact well with nested inferences: + /// it overwrites values that were written with `MultiInferenceState::Intersect`. + Overwrite, + + /// Ignore the newly inferred value. + Ignore, + /// Store the intersection of all types inferred for the expression. Intersect, } impl MultiInferenceState { - fn is_panic(self) -> bool { - match self { - MultiInferenceState::Panic => true, - MultiInferenceState::Intersect => false, - } + const fn is_panic(self) -> bool { + matches!(self, MultiInferenceState::Panic) } } @@ -10401,7 +11771,11 @@ where self.0.iter().map(|(k, v)| (k, v)) } - fn insert(&mut self, key: K, value: V) { + fn insert(&mut self, key: K, value: V, multi_inference_state: MultiInferenceState) { + if matches!(multi_inference_state, MultiInferenceState::Ignore) { + return; + } + debug_assert!( !self.0.iter().any(|(existing, _)| existing == &key), "An existing entry already exists for key {key:?}", @@ -10415,17 +11789,21 @@ where } } -impl Extend<(K, V)> for VecMap +impl VecMap where K: Eq, K: std::fmt::Debug, V: std::fmt::Debug, { #[inline] - fn extend>(&mut self, iter: T) { + fn extend>( + &mut self, + iter: T, + multi_inference_state: MultiInferenceState, + ) { if cfg!(debug_assertions) { for (key, value) in iter { - self.insert(key, value); + self.insert(key, value, multi_inference_state); } } else { self.0.extend(iter); @@ -10463,7 +11841,11 @@ where V: Eq, V: std::fmt::Debug, { - fn insert(&mut self, value: V) { + fn insert(&mut self, value: V, multi_inference_state: MultiInferenceState) { + if matches!(multi_inference_state, MultiInferenceState::Ignore) { + return; + } + debug_assert!( !self.0.iter().any(|existing| existing == &value), "An existing entry already exists for {value:?}", @@ -10473,16 +11855,20 @@ where } } -impl Extend for VecSet +impl VecSet where V: Eq, V: std::fmt::Debug, { #[inline] - fn extend>(&mut self, iter: T) { + fn extend>( + &mut self, + iter: T, + multi_inference_state: MultiInferenceState, + ) { if cfg!(debug_assertions) { for value in iter { - self.insert(value); + self.insert(value, multi_inference_state); } } else { self.0.extend(iter); diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs index 3c7bdb5464..9fc1f35b2a 100644 --- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs +++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs @@ -4,9 +4,8 @@ use ruff_python_ast as ast; use super::{DeferredExpressionState, TypeInferenceBuilder}; use crate::types::diagnostic::{ self, INVALID_TYPE_FORM, NON_SUBSCRIPTABLE, report_invalid_argument_number_to_special_form, - report_invalid_arguments_to_annotated, report_invalid_arguments_to_callable, + report_invalid_arguments_to_callable, }; -use crate::types::enums::is_enum_class; use crate::types::signatures::Signature; use crate::types::string_annotation::parse_string_annotation; use crate::types::tuple::{TupleSpecBuilder, TupleType}; @@ -346,7 +345,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } ast::Expr::DictComp(dictcomp) => { - self.infer_dict_comprehension_expression(dictcomp); + self.infer_dict_comprehension_expression(dictcomp, TypeContext::default()); self.report_invalid_type_expression( expression, format_args!("Dict comprehensions are not allowed in type expressions"), @@ -355,7 +354,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } ast::Expr::ListComp(listcomp) => { - self.infer_list_comprehension_expression(listcomp); + self.infer_list_comprehension_expression(listcomp, TypeContext::default()); self.report_invalid_type_expression( expression, format_args!("List comprehensions are not allowed in type expressions"), @@ -364,7 +363,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } ast::Expr::SetComp(setcomp) => { - self.infer_set_comprehension_expression(setcomp); + self.infer_set_comprehension_expression(setcomp, TypeContext::default()); self.report_invalid_type_expression( expression, format_args!("Set comprehensions are not allowed in type expressions"), @@ -811,6 +810,34 @@ impl<'db> TypeInferenceBuilder<'db, '_> { self.infer_type_expression(slice); todo_type!("Generic manual PEP-695 type alias") } + KnownInstanceType::UnionType(_) => { + self.infer_type_expression(slice); + todo_type!("Generic specialization of types.UnionType") + } + KnownInstanceType::Literal(ty) => { + self.infer_type_expression(slice); + if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) { + builder.into_diagnostic(format_args!( + "`{ty}` is not a generic class", + ty = ty.inner(self.db()).display(self.db()) + )); + } + Type::unknown() + } + KnownInstanceType::Annotated(_) => { + self.infer_type_expression(slice); + todo_type!("Generic specialization of typing.Annotated") + } + KnownInstanceType::NewType(newtype) => { + self.infer_type_expression(&subscript.slice); + if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) { + builder.into_diagnostic(format_args!( + "`{}` is a `NewType` and cannot be specialized", + newtype.name(self.db()) + )); + } + Type::unknown() + } }, Type::Dynamic(DynamicType::Todo(_)) => { self.infer_type_expression(slice); @@ -887,7 +914,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { ty } - fn infer_parameterized_special_form_type_expression( + pub(crate) fn infer_parameterized_special_form_type_expression( &mut self, subscript: &ast::ExprSubscript, special_form: SpecialFormType, @@ -896,36 +923,13 @@ impl<'db> TypeInferenceBuilder<'db, '_> { let arguments_slice = &*subscript.slice; match special_form { SpecialFormType::Annotated => { - let ast::Expr::Tuple(ast::ExprTuple { - elts: arguments, .. - }) = arguments_slice - else { - report_invalid_arguments_to_annotated(&self.context, subscript); - - // `Annotated[]` with less than two arguments is an error at runtime. - // However, we still treat `Annotated[T]` as `T` here for the purpose of - // giving better diagnostics later on. - // Pyright also does this. Mypy doesn't; it falls back to `Any` instead. - return self.infer_type_expression(arguments_slice); - }; - - if arguments.len() < 2 { - report_invalid_arguments_to_annotated(&self.context, subscript); - } - - let [type_expr, metadata @ ..] = &arguments[..] else { - for argument in arguments { - self.infer_expression(argument, TypeContext::default()); - } - self.store_expression_type(arguments_slice, Type::unknown()); - return Type::unknown(); - }; - - for element in metadata { - self.infer_expression(element, TypeContext::default()); - } - - let ty = self.infer_type_expression(type_expr); + let ty = self + .infer_subscript_load_impl( + Type::SpecialForm(SpecialFormType::Annotated), + subscript, + ) + .in_type_expression(db, self.scope(), None) + .unwrap_or_else(|err| err.into_fallback_type(&self.context, subscript, true)); self.store_expression_type(arguments_slice, ty); ty } @@ -1364,12 +1368,11 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } } - fn infer_literal_parameter_type<'param>( + pub(crate) fn infer_literal_parameter_type<'param>( &mut self, parameters: &'param ast::Expr, ) -> Result, Vec<&'param ast::Expr>> { Ok(match parameters { - // TODO handle type aliases ast::Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => { let value_ty = self.infer_expression(value, TypeContext::default()); if matches!(value_ty, Type::SpecialForm(SpecialFormType::Literal)) { @@ -1421,27 +1424,6 @@ impl<'db> TypeInferenceBuilder<'db, '_> { literal @ ast::Expr::NumberLiteral(number) if number.value.is_int() => { self.infer_expression(literal, TypeContext::default()) } - // For enum values - ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => { - let value_ty = self.infer_expression(value, TypeContext::default()); - - if is_enum_class(self.db(), value_ty) { - let ty = value_ty - .member(self.db(), &attr.id) - .place - .ignore_possibly_undefined() - .unwrap_or(Type::unknown()); - self.store_expression_type(parameters, ty); - ty - } else { - self.store_expression_type(parameters, Type::unknown()); - if value_ty.is_todo() { - value_ty - } else { - return Err(vec![parameters]); - } - } - } // for negative and positive numbers ast::Expr::UnaryOp(u) if matches!(u.op, ast::UnaryOp::USub | ast::UnaryOp::UAdd) @@ -1451,6 +1433,37 @@ impl<'db> TypeInferenceBuilder<'db, '_> { self.store_expression_type(parameters, ty); ty } + // enum members and aliases to literal types + ast::Expr::Name(_) | ast::Expr::Attribute(_) => { + let subscript_ty = self.infer_expression(parameters, TypeContext::default()); + match subscript_ty { + // type aliases to literal types + Type::KnownInstance(KnownInstanceType::TypeAliasType(type_alias)) => { + let value_ty = type_alias.value_type(self.db()); + if value_ty.is_literal_or_union_of_literals(self.db()) { + return Ok(value_ty); + } + } + Type::KnownInstance(KnownInstanceType::Literal(ty)) => { + return Ok(ty.inner(self.db())); + } + // `Literal[SomeEnum.Member]` + Type::EnumLiteral(_) => { + return Ok(subscript_ty); + } + // `Literal[SingletonEnum.Member]`, where `SingletonEnum.Member` simplifies to + // just `SingletonEnum`. + Type::NominalInstance(_) if subscript_ty.is_enum(self.db()) => { + return Ok(subscript_ty); + } + // suppress false positives for e.g. members of functional-syntax enums + Type::Dynamic(DynamicType::Todo(_)) => { + return Ok(subscript_ty); + } + _ => {} + } + return Err(vec![parameters]); + } _ => { self.infer_expression(parameters, TypeContext::default()); return Err(vec![parameters]); @@ -1514,7 +1527,9 @@ impl<'db> TypeInferenceBuilder<'db, '_> { self.db(), self.infer_name_load(name), &|ty| match ty { - Type::Dynamic(DynamicType::TodoPEP695ParamSpec) => true, + Type::KnownInstance(known_instance) => { + known_instance.class(self.db()) == KnownClass::ParamSpec + } Type::NominalInstance(nominal) => { nominal.has_known_class(self.db(), KnownClass::ParamSpec) } diff --git a/crates/ty_python_semantic/src/types/instance.rs b/crates/ty_python_semantic/src/types/instance.rs index f6c7b8406d..8c5adc9e0d 100644 --- a/crates/ty_python_semantic/src/types/instance.rs +++ b/crates/ty_python_semantic/src/types/instance.rs @@ -95,6 +95,14 @@ impl<'db> Type<'db> { } } + /// Return `true` if `self` is a nominal instance of the given known class. + pub(crate) fn is_instance_of(self, db: &'db dyn Db, known_class: KnownClass) -> bool { + match self { + Type::NominalInstance(instance) => instance.class(db).is_known(db, known_class), + _ => false, + } + } + /// Synthesize a protocol instance type with a given set of read-only property members. pub(super) fn protocol_with_readonly_members<'a, M>(db: &'db dyn Db, members: M) -> Self where diff --git a/crates/ty_python_semantic/src/types/narrow.rs b/crates/ty_python_semantic/src/types/narrow.rs index 736272cb4a..e89822b182 100644 --- a/crates/ty_python_semantic/src/types/narrow.rs +++ b/crates/ty_python_semantic/src/types/narrow.rs @@ -11,8 +11,9 @@ use crate::types::enums::{enum_member_literals, enum_metadata}; use crate::types::function::KnownFunction; use crate::types::infer::infer_same_file_expression_type; use crate::types::{ - ClassLiteral, ClassType, IntersectionBuilder, KnownClass, SubclassOfInner, SubclassOfType, - Truthiness, Type, TypeContext, TypeVarBoundOrConstraints, UnionBuilder, infer_expression_types, + CallableType, ClassLiteral, ClassType, IntersectionBuilder, KnownClass, KnownInstanceType, + SpecialFormType, SubclassOfInner, SubclassOfType, Truthiness, Type, TypeContext, + TypeVarBoundOrConstraints, UnionBuilder, infer_expression_types, }; use ruff_db::parsed::{ParsedModuleRef, parsed_module}; @@ -211,6 +212,35 @@ impl ClassInfoConstraintFunction { ) }), + Type::KnownInstance(KnownInstanceType::UnionType(elements)) => { + UnionType::try_from_elements( + db, + elements.elements(db).iter().map(|element| { + // A special case is made for `None` at runtime + // (it's implicitly converted to `NoneType` in `int | None`) + // which means that `isinstance(x, int | None)` works even though + // `None` is not a class literal. + if element.is_none(db) { + self.generate_constraint(db, KnownClass::NoneType.to_class_literal(db)) + } else { + self.generate_constraint(db, *element) + } + }), + ) + } + + // We don't have a good meta-type for `Callable`s right now, + // so only apply `isinstance()` narrowing, not `issubclass()` + Type::SpecialForm(SpecialFormType::Callable) + if self == ClassInfoConstraintFunction::IsInstance => + { + Some(CallableType::unknown(db).top_materialization(db)) + } + + Type::SpecialForm(special_form) => special_form + .aliased_stdlib_class() + .and_then(|class| self.generate_constraint(db, class.to_class_literal(db))), + Type::AlwaysFalsy | Type::AlwaysTruthy | Type::BooleanLiteral(_) @@ -226,7 +256,6 @@ impl ClassInfoConstraintFunction { | Type::FunctionLiteral(_) | Type::ProtocolInstance(_) | Type::PropertyInstance(_) - | Type::SpecialForm(_) | Type::LiteralString | Type::StringLiteral(_) | Type::IntLiteral(_) @@ -234,7 +263,8 @@ impl ClassInfoConstraintFunction { | Type::TypeIs(_) | Type::WrapperDescriptor(_) | Type::DataclassTransformer(_) - | Type::TypedDict(_) => None, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => None, } } } @@ -962,11 +992,20 @@ impl<'db, 'ast> NarrowingConstraintsBuilder<'db, 'ast> { let subject = place_expr(subject.node_ref(self.db, self.module))?; let place = self.expect_place(&subject); - let ty = infer_same_file_expression_type(self.db, cls, TypeContext::default(), self.module) - .to_instance(self.db)?; + let class_type = + infer_same_file_expression_type(self.db, cls, TypeContext::default(), self.module); - let ty = ty.negate_if(self.db, !is_positive); - Some(NarrowingConstraints::from_iter([(place, ty)])) + let narrowed_type = match class_type { + Type::ClassLiteral(class) => { + Type::instance(self.db, class.top_materialization(self.db)) + .negate_if(self.db, !is_positive) + } + dynamic @ Type::Dynamic(_) => dynamic, + Type::SpecialForm(SpecialFormType::Any) => Type::any(), + _ => return None, + }; + + Some(NarrowingConstraints::from_iter([(place, narrowed_type)])) } fn evaluate_match_pattern_value( diff --git a/crates/ty_python_semantic/src/types/newtype.rs b/crates/ty_python_semantic/src/types/newtype.rs new file mode 100644 index 0000000000..fe08fa7bee --- /dev/null +++ b/crates/ty_python_semantic/src/types/newtype.rs @@ -0,0 +1,266 @@ +use std::collections::BTreeSet; + +use crate::Db; +use crate::semantic_index::definition::{Definition, DefinitionKind}; +use crate::types::constraints::ConstraintSet; +use crate::types::{ClassType, Type, definition_expression_type, visitor}; +use ruff_db::parsed::parsed_module; +use ruff_python_ast as ast; + +/// A `typing.NewType` declaration, either from the perspective of the +/// identity-callable-that-acts-like-a-subtype-in-type-expressions returned by the call to +/// `typing.NewType(...)`, or from the perspective of instances of that subtype returned by the +/// identity callable. For example: +/// +/// ```py +/// import typing +/// Foo = typing.NewType("Foo", int) +/// x = Foo(42) +/// ``` +/// +/// The revealed types there are: +/// - `typing.NewType`: `Type::ClassLiteral(ClassLiteral)` with `KnownClass::NewType`. +/// - `Foo`: `Type::KnownInstance(KnownInstanceType::NewType(NewType { .. }))` +/// - `x`: `Type::NewTypeInstance(NewType { .. })` +/// +/// # Ordering +/// Ordering is based on the newtype's salsa-assigned id and not on its values. +/// The id may change between runs, or when the newtype was garbage collected and recreated. +#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] +#[derive(PartialOrd, Ord)] +pub struct NewType<'db> { + /// The name of this NewType (e.g. `"Foo"`) + #[returns(ref)] + pub name: ast::name::Name, + + /// The binding where this NewType is first created. + pub definition: Definition<'db>, + + // The base type of this NewType, if it's eagerly specified. This is typically `None` when a + // `NewType` is first encountered, because the base type is lazy/deferred to avoid panics in + // the recursive case. This becomes `Some` when a `NewType` is modified by methods like + // `.normalize()`. Callers should use the `base` method instead of accessing this field + // directly. + eager_base: Option>, +} + +impl get_size2::GetSize for NewType<'_> {} + +#[salsa::tracked] +impl<'db> NewType<'db> { + pub fn base(self, db: &'db dyn Db) -> NewTypeBase<'db> { + match self.eager_base(db) { + Some(base) => base, + None => self.lazy_base(db), + } + } + + #[salsa::tracked( + cycle_initial=lazy_base_cycle_initial, + heap_size=ruff_memory_usage::heap_size + )] + fn lazy_base(self, db: &'db dyn Db) -> NewTypeBase<'db> { + // `TypeInferenceBuilder` emits diagnostics for invalid `NewType` definitions that show up + // in assignments, but invalid definitions still get here, and also `NewType` might show up + // in places that aren't definitions at all. Fall back to `object` in all error cases. + let object_fallback = NewTypeBase::ClassType(ClassType::object(db)); + let definition = self.definition(db); + let module = parsed_module(db, definition.file(db)).load(db); + let DefinitionKind::Assignment(assignment) = definition.kind(db) else { + return object_fallback; + }; + let Some(call_expr) = assignment.value(&module).as_call_expr() else { + return object_fallback; + }; + let Some(second_arg) = call_expr.arguments.args.get(1) else { + return object_fallback; + }; + match definition_expression_type(db, definition, second_arg) { + Type::NominalInstance(nominal_instance_type) => { + NewTypeBase::ClassType(nominal_instance_type.class(db)) + } + Type::NewTypeInstance(newtype) => NewTypeBase::NewType(newtype), + // This branch includes bases that are other typing constructs besides classes and + // other newtypes, for example unions. `NewType("Foo", int | str)` is not allowed. + _ => object_fallback, + } + } + + fn iter_bases(self, db: &'db dyn Db) -> NewTypeBaseIter<'db> { + NewTypeBaseIter { + current: Some(self), + seen_before: BTreeSet::new(), + db, + } + } + + // Walk the `NewTypeBase` chain to find the underlying `ClassType`. There might not be a + // `ClassType` if this `NewType` is cyclical, and we fall back to `object` in that case. + pub fn base_class_type(self, db: &'db dyn Db) -> ClassType<'db> { + for base in self.iter_bases(db) { + if let NewTypeBase::ClassType(class_type) = base { + return class_type; + } + } + ClassType::object(db) + } + + pub(crate) fn is_equivalent_to_impl(self, db: &'db dyn Db, other: Self) -> bool { + // Two instances of the "same" `NewType` won't compare == if one of them has an eagerly + // evaluated base (or a normalized base, etc.) and the other doesn't, so we only check for + // equality of the `definition`. + self.definition(db) == other.definition(db) + } + + // Since a regular class can't inherit from a newtype, the only way for one newtype to be a + // subtype of another is to have the other in its chain of newtype bases. Once we reach the + // base class, we don't have to keep looking. + pub(crate) fn has_relation_to_impl(self, db: &'db dyn Db, other: Self) -> ConstraintSet<'db> { + if self.is_equivalent_to_impl(db, other) { + return ConstraintSet::from(true); + } + for base in self.iter_bases(db) { + if let NewTypeBase::NewType(base_newtype) = base { + if base_newtype.is_equivalent_to_impl(db, other) { + return ConstraintSet::from(true); + } + } + } + ConstraintSet::from(false) + } + + pub(crate) fn is_disjoint_from_impl(self, db: &'db dyn Db, other: Self) -> ConstraintSet<'db> { + // Two NewTypes are disjoint if they're not equal and neither inherits from the other. + // NewTypes have single inheritance, and a regular class can't inherit from a NewType, so + // it's not possible for some third type to multiply-inherit from both. + let mut self_not_subtype_of_other = self.has_relation_to_impl(db, other).negate(db); + let other_not_subtype_of_self = other.has_relation_to_impl(db, self).negate(db); + self_not_subtype_of_other.intersect(db, other_not_subtype_of_self) + } + + /// Create a new `NewType` by mapping the underlying `ClassType`. This descends through any + /// number of nested `NewType` layers and rebuilds the whole chain. In the rare case of cyclic + /// `NewType`s with no underlying `ClassType`, this has no effect and does not call `f`. + pub(crate) fn map_base_class_type( + self, + db: &'db dyn Db, + f: impl FnOnce(ClassType<'db>) -> ClassType<'db>, + ) -> Self { + // Modifying the base class type requires unwrapping and re-wrapping however many base + // newtypes there are between here and there. Normally recursion would be natural for this, + // but the bases iterator does cycle detection, and I think using that with a stack is a + // little cleaner than conjuring up yet another `CycleDetector` visitor and yet another + // layer of "*_impl" nesting. Also if there is no base class type, returning `self` + // unmodified seems more correct than injecting some default type like `object` into the + // cycle, which is what `CycleDetector` would do if we used it here. + let mut inner_newtype_stack = Vec::new(); + for base in self.iter_bases(db) { + match base { + // Build up the stack of intermediate newtypes that we'll need to re-wrap after + // we've mapped the `ClassType`. + NewTypeBase::NewType(base_newtype) => inner_newtype_stack.push(base_newtype), + // We've reached the `ClassType`. + NewTypeBase::ClassType(base_class_type) => { + // Call `f`. + let mut mapped_base = NewTypeBase::ClassType(f(base_class_type)); + // Re-wrap the mapped base class in however many newtypes we unwrapped. + for inner_newtype in inner_newtype_stack.into_iter().rev() { + mapped_base = NewTypeBase::NewType(NewType::new( + db, + inner_newtype.name(db).clone(), + inner_newtype.definition(db), + Some(mapped_base), + )); + } + return NewType::new( + db, + self.name(db).clone(), + self.definition(db), + Some(mapped_base), + ); + } + } + } + // If we get here, there is no `ClassType` (because this newtype is cyclic), and we don't + // call `f` at all. + self + } +} + +pub(crate) fn walk_newtype_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( + db: &'db dyn Db, + newtype: NewType<'db>, + visitor: &V, +) { + visitor.visit_type(db, newtype.base(db).instance_type(db)); +} + +/// `typing.NewType` typically wraps a class type, but it can also wrap another newtype. +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] +pub enum NewTypeBase<'db> { + ClassType(ClassType<'db>), + NewType(NewType<'db>), +} + +impl<'db> NewTypeBase<'db> { + pub fn instance_type(self, db: &'db dyn Db) -> Type<'db> { + match self { + NewTypeBase::ClassType(class_type) => Type::instance(db, class_type), + NewTypeBase::NewType(newtype) => Type::NewTypeInstance(newtype), + } + } +} + +/// An iterator over the transitive bases of a `NewType`. In the most common case, e.g. +/// `Foo = NewType("Foo", int)`, this yields the one `NewTypeBase::ClassType` (e.g. `int`). For +/// newtypes that wrap other newtypes, this iterator yields the `NewTypeBase::NewType`s (not +/// including `self`) before finally yielding the `NewTypeBase::ClassType`. In the pathological +/// case of cyclic newtypes like `Foo = NewType("Foo", "Foo")`, this iterator yields the unique +/// `NewTypeBase::NewType`s (not including `self`), detects the cycle, and then stops. +/// +/// Note that this does *not* detect indirect cycles that go through a proper class, like this: +/// ```py +/// Foo = NewType("Foo", list["Foo"]) +/// ``` +/// As far as this iterator is concerned, that's the "common case", and it yields the one +/// `NewTypeBase::ClassType` for `list[Foo]`. Functions like `normalize` that continue recursing +/// over the base class need to pass down a cycle-detecting visitor as usual. +struct NewTypeBaseIter<'db> { + current: Option>, + seen_before: BTreeSet>, + db: &'db dyn Db, +} + +impl<'db> Iterator for NewTypeBaseIter<'db> { + type Item = NewTypeBase<'db>; + + fn next(&mut self) -> Option { + let current = self.current?; + match current.base(self.db) { + NewTypeBase::ClassType(base_class_type) => { + self.current = None; + Some(NewTypeBase::ClassType(base_class_type)) + } + NewTypeBase::NewType(base_newtype) => { + // Doing the insertion only in this branch avoids allocating in the common case. + self.seen_before.insert(current); + if self.seen_before.contains(&base_newtype) { + // Cycle detected. Stop iterating. + self.current = None; + None + } else { + self.current = Some(base_newtype); + Some(NewTypeBase::NewType(base_newtype)) + } + } + } + } +} + +fn lazy_base_cycle_initial<'db>( + db: &'db dyn Db, + _id: salsa::Id, + _self: NewType<'db>, +) -> NewTypeBase<'db> { + NewTypeBase::ClassType(ClassType::object(db)) +} diff --git a/crates/ty_python_semantic/src/types/signatures.rs b/crates/ty_python_semantic/src/types/signatures.rs index b0ff205e48..7c48b4c289 100644 --- a/crates/ty_python_semantic/src/types/signatures.rs +++ b/crates/ty_python_semantic/src/types/signatures.rs @@ -13,6 +13,7 @@ use std::{collections::HashMap, slice::Iter}; use itertools::{EitherOrBoth, Itertools}; +use ruff_db::parsed::parsed_module; use ruff_python_ast::ParameterWithDefault; use smallvec::{SmallVec, smallvec_inline}; @@ -20,9 +21,9 @@ use super::{ DynamicType, Type, TypeVarVariance, definition_expression_type, infer_definition_types, semantic_index, }; -use crate::semantic_index::definition::Definition; +use crate::semantic_index::definition::{Definition, DefinitionKind}; use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension}; -use crate::types::function::FunctionType; +use crate::types::function::{is_implicit_classmethod, is_implicit_staticmethod}; use crate::types::generics::{ GenericContext, InferableTypeVars, typing_self, walk_generic_context, }; @@ -36,8 +37,11 @@ use crate::{Db, FxOrderSet}; use ruff_python_ast::{self as ast, name::Name}; #[derive(Clone, Copy, Debug)] +#[expect(clippy::struct_excessive_bools)] struct MethodInformation<'db> { - method: FunctionType<'db>, + is_staticmethod: bool, + is_classmethod: bool, + method_may_be_generic: bool, class_literal: ClassLiteral<'db>, class_is_generic: bool, } @@ -46,17 +50,49 @@ fn infer_method_information<'db>( db: &'db dyn Db, definition: Definition<'db>, ) -> Option> { + let DefinitionKind::Function(function_definition) = definition.kind(db) else { + return None; + }; + let class_scope_id = definition.scope(db); let file = class_scope_id.file(db); + let module = parsed_module(db, file).load(db); let index = semantic_index(db, file); let class_scope = index.scope(class_scope_id.file_scope_id(db)); let class_node = class_scope.node().as_class()?; - let method = infer_definition_types(db, definition) - .declaration_type(definition) - .inner_type() - .as_function_literal()?; + let function_node = function_definition.node(&module); + let function_name = &function_node.name; + + let mut is_staticmethod = is_implicit_classmethod(function_name); + let mut is_classmethod = is_implicit_staticmethod(function_name); + + let inference = infer_definition_types(db, definition); + for decorator in &function_node.decorator_list { + let decorator_ty = inference.expression_type(&decorator.expression); + + match decorator_ty + .as_class_literal() + .and_then(|class| class.known(db)) + { + Some(KnownClass::Staticmethod) => { + is_staticmethod = true; + } + Some(KnownClass::Classmethod) => { + is_classmethod = true; + } + _ => {} + } + } + + let method_may_be_generic = match inference.declaration_type(definition).inner_type() { + Type::FunctionLiteral(f) => f.signature(db).overloads.iter().any(|s| { + s.generic_context + .is_some_and(|context| context.variables(db).any(|v| v.typevar(db).is_self(db))) + }), + _ => true, + }; let class_def = index.expect_single_definition(class_node); let (class_literal, class_is_generic) = match infer_definition_types(db, class_def) @@ -71,7 +107,9 @@ fn infer_method_information<'db>( }; Some(MethodInformation { - method, + is_staticmethod, + is_classmethod, + method_may_be_generic, class_literal, class_is_generic, }) @@ -509,20 +547,17 @@ impl<'db> Signature<'db> { tcx: TypeContext<'db>, visitor: &ApplyTypeMappingVisitor<'db>, ) -> Self { - let flipped_mapping = match type_mapping { - TypeMapping::Materialize(materialization_kind) => { - &TypeMapping::Materialize(materialization_kind.flip()) - } - _ => type_mapping, - }; Self { generic_context: self .generic_context .map(|context| type_mapping.update_signature_generic_context(db, context)), definition: self.definition, - parameters: self - .parameters - .apply_type_mapping_impl(db, flipped_mapping, tcx, visitor), + parameters: self.parameters.apply_type_mapping_impl( + db, + &type_mapping.flip(), + tcx, + visitor, + ), return_ty: self .return_ty .map(|ty| ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor)), @@ -1114,7 +1149,7 @@ impl<'db> Signature<'db> { impl<'db> VarianceInferable<'db> for &Signature<'db> { fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance { - tracing::debug!( + tracing::trace!( "Checking variance of `{tvar}` in `{self:?}`", tvar = typevar.typevar(db).name(db) ); @@ -1273,27 +1308,21 @@ impl<'db> Parameters<'db> { }; let method_info = infer_method_information(db, definition); - let is_static_or_classmethod = method_info - .is_some_and(|f| f.method.is_staticmethod(db) || f.method.is_classmethod(db)); + let is_static_or_classmethod = + method_info.is_some_and(|f| f.is_staticmethod || f.is_classmethod); let inferred_annotation = |arg: &ParameterWithDefault| { if let Some(MethodInformation { - method, + method_may_be_generic, class_literal, class_is_generic, + .. }) = method_info && !is_static_or_classmethod && arg.parameter.annotation().is_none() && parameters.index(arg.name().id()) == Some(0) { - let method_has_self_in_generic_context = - method.signature(db).overloads.iter().any(|s| { - s.generic_context.is_some_and(|context| { - context.variables(db).any(|v| v.typevar(db).is_self(db)) - }) - }); - - if method_has_self_in_generic_context + if method_may_be_generic || class_is_generic || class_literal .known(db) diff --git a/crates/ty_python_semantic/src/types/special_form.rs b/crates/ty_python_semantic/src/types/special_form.rs index 721def0dee..54d9640b87 100644 --- a/crates/ty_python_semantic/src/types/special_form.rs +++ b/crates/ty_python_semantic/src/types/special_form.rs @@ -328,6 +328,113 @@ impl SpecialFormType { } } + /// Return `Some(KnownClass)` if this special form is an alias + /// to a standard library class. + pub(super) const fn aliased_stdlib_class(self) -> Option { + match self { + Self::List => Some(KnownClass::List), + Self::Dict => Some(KnownClass::Dict), + Self::Set => Some(KnownClass::Set), + Self::FrozenSet => Some(KnownClass::FrozenSet), + Self::ChainMap => Some(KnownClass::ChainMap), + Self::Counter => Some(KnownClass::Counter), + Self::DefaultDict => Some(KnownClass::DefaultDict), + Self::Deque => Some(KnownClass::Deque), + Self::OrderedDict => Some(KnownClass::OrderedDict), + Self::Tuple => Some(KnownClass::Tuple), + Self::Type => Some(KnownClass::Type), + + Self::AlwaysFalsy + | Self::AlwaysTruthy + | Self::Annotated + | Self::Bottom + | Self::CallableTypeOf + | Self::ClassVar + | Self::Concatenate + | Self::Final + | Self::Intersection + | Self::Literal + | Self::LiteralString + | Self::Never + | Self::NoReturn + | Self::Not + | Self::ReadOnly + | Self::Required + | Self::TypeAlias + | Self::TypeGuard + | Self::NamedTuple + | Self::NotRequired + | Self::Optional + | Self::Top + | Self::TypeIs + | Self::TypedDict + | Self::TypingSelf + | Self::Union + | Self::Unknown + | Self::TypeOf + | Self::Any + // `typing.Callable` is an alias to `collections.abc.Callable`, + // but they're both the same `SpecialFormType` in our model, + // and neither is a class in typeshed (even though the `collections.abc` one is at runtime) + | Self::Callable + | Self::Protocol + | Self::Generic + | Self::Unpack => None, + } + } + + /// Return `true` if this special form is valid as the second argument + /// to `issubclass()` and `isinstance()` calls. + pub(super) const fn is_valid_isinstance_target(self) -> bool { + match self { + Self::Callable + | Self::ChainMap + | Self::Counter + | Self::DefaultDict + | Self::Deque + | Self::FrozenSet + | Self::Dict + | Self::List + | Self::OrderedDict + | Self::Set + | Self::Tuple + | Self::Type + | Self::Protocol + | Self::Generic => true, + + Self::AlwaysFalsy + | Self::AlwaysTruthy + | Self::Annotated + | Self::Bottom + | Self::CallableTypeOf + | Self::ClassVar + | Self::Concatenate + | Self::Final + | Self::Intersection + | Self::Literal + | Self::LiteralString + | Self::Never + | Self::NoReturn + | Self::Not + | Self::ReadOnly + | Self::Required + | Self::TypeAlias + | Self::TypeGuard + | Self::NamedTuple + | Self::NotRequired + | Self::Optional + | Self::Top + | Self::TypeIs + | Self::TypedDict + | Self::TypingSelf + | Self::Union + | Self::Unknown + | Self::TypeOf + | Self::Any // can be used in `issubclass()` but not `isinstance()`. + | Self::Unpack => false, + } + } + /// Return the repr of the symbol at runtime pub(super) const fn repr(self) -> &'static str { match self { diff --git a/crates/ty_python_semantic/src/types/type_ordering.rs b/crates/ty_python_semantic/src/types/type_ordering.rs index e45e0c9ba5..946b6173a2 100644 --- a/crates/ty_python_semantic/src/types/type_ordering.rs +++ b/crates/ty_python_semantic/src/types/type_ordering.rs @@ -213,6 +213,10 @@ pub(super) fn union_or_intersection_elements_ordering<'db>( (Type::TypedDict(_), _) => Ordering::Less, (_, Type::TypedDict(_)) => Ordering::Greater, + (Type::NewTypeInstance(left), Type::NewTypeInstance(right)) => left.cmp(right), + (Type::NewTypeInstance(_), _) => Ordering::Less, + (_, Type::NewTypeInstance(_)) => Ordering::Greater, + (Type::Union(_), _) | (_, Type::Union(_)) => { unreachable!("our type representation does not permit nested unions"); } @@ -262,9 +266,6 @@ fn dynamic_elements_ordering(left: DynamicType, right: DynamicType) -> Ordering #[cfg(not(debug_assertions))] (DynamicType::Todo(TodoType), DynamicType::Todo(TodoType)) => Ordering::Equal, - (DynamicType::TodoPEP695ParamSpec, _) => Ordering::Less, - (_, DynamicType::TodoPEP695ParamSpec) => Ordering::Greater, - (DynamicType::TodoUnpack, _) => Ordering::Less, (_, DynamicType::TodoUnpack) => Ordering::Greater, diff --git a/crates/ty_python_semantic/src/types/typed_dict.rs b/crates/ty_python_semantic/src/types/typed_dict.rs index e29b836d8a..e07dbe6e60 100644 --- a/crates/ty_python_semantic/src/types/typed_dict.rs +++ b/crates/ty_python_semantic/src/types/typed_dict.rs @@ -8,7 +8,7 @@ use ruff_text_size::Ranged; use super::class::{ClassType, CodeGeneratorKind, Field}; use super::context::InferContext; use super::diagnostic::{ - INVALID_ARGUMENT_TYPE, INVALID_ASSIGNMENT, report_invalid_key_on_typed_dict, + self, INVALID_ARGUMENT_TYPE, INVALID_ASSIGNMENT, report_invalid_key_on_typed_dict, report_missing_typed_dict_key, }; use super::{ApplyTypeMappingVisitor, Type, TypeMapping, visitor}; @@ -143,30 +143,57 @@ impl TypedDictAssignmentKind { pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( context: &InferContext<'db, 'ast>, typed_dict: TypedDictType<'db>, + full_object_ty: Option>, key: &str, value_ty: Type<'db>, - typed_dict_node: impl Into>, + typed_dict_node: impl Into> + Copy, key_node: impl Into>, value_node: impl Into>, assignment_kind: TypedDictAssignmentKind, + emit_diagnostic: bool, ) -> bool { let db = context.db(); let items = typed_dict.items(db); // Check if key exists in `TypedDict` let Some((_, item)) = items.iter().find(|(name, _)| *name == key) else { - report_invalid_key_on_typed_dict( - context, - typed_dict_node.into(), - key_node.into(), - Type::TypedDict(typed_dict), - Type::string_literal(db, key), - &items, - ); + if emit_diagnostic { + report_invalid_key_on_typed_dict( + context, + typed_dict_node.into(), + key_node.into(), + Type::TypedDict(typed_dict), + full_object_ty, + Type::string_literal(db, key), + &items, + ); + } return false; }; + let add_object_type_annotation = + |diagnostic: &mut Diagnostic| { + if let Some(full_object_ty) = full_object_ty { + diagnostic.annotate(context.secondary(typed_dict_node.into()).message( + format_args!( + "TypedDict `{}` in {kind} type `{}`", + Type::TypedDict(typed_dict).display(db), + full_object_ty.display(db), + kind = if full_object_ty.is_union() { + "union" + } else { + "intersection" + }, + ), + )); + } else { + diagnostic.annotate(context.secondary(typed_dict_node.into()).message( + format_args!("TypedDict `{}`", Type::TypedDict(typed_dict).display(db)), + )); + } + }; + let add_item_definition_subdiagnostic = |diagnostic: &mut Diagnostic, message| { if let Some(declaration) = item.single_declaration { let file = declaration.file(db); @@ -184,8 +211,9 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( }; if assignment_kind.is_subscript() && item.is_read_only() { - if let Some(builder) = - context.report_lint(assignment_kind.diagnostic_type(), key_node.into()) + if emit_diagnostic + && let Some(builder) = + context.report_lint(assignment_kind.diagnostic_type(), key_node.into()) { let typed_dict_ty = Type::TypedDict(typed_dict); let typed_dict_d = typed_dict_ty.display(db); @@ -195,13 +223,7 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( )); diagnostic.set_primary_message(format_args!("key is marked read-only")); - - diagnostic.annotate( - context - .secondary(typed_dict_node.into()) - .message(format_args!("TypedDict `{typed_dict_d}`")), - ); - + add_object_type_annotation(&mut diagnostic); add_item_definition_subdiagnostic(&mut diagnostic, "Read-only item declared here"); } @@ -213,8 +235,14 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( return true; } + let value_node = value_node.into(); + if diagnostic::is_invalid_typed_dict_literal(context.db(), item.declared_ty, value_node) { + return false; + } + // Invalid assignment - emit diagnostic - if let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node.into()) + if emit_diagnostic + && let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node) { let typed_dict_ty = Type::TypedDict(typed_dict); let typed_dict_d = typed_dict_ty.display(db); @@ -228,12 +256,6 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( diagnostic.set_primary_message(format_args!("value of type `{value_d}`")); - diagnostic.annotate( - context - .secondary(typed_dict_node.into()) - .message(format_args!("TypedDict `{typed_dict_d}`")), - ); - diagnostic.annotate( context .secondary(key_node.into()) @@ -241,6 +263,7 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( ); add_item_definition_subdiagnostic(&mut diagnostic, "Item declared here"); + add_object_type_annotation(&mut diagnostic); } false @@ -339,12 +362,14 @@ fn validate_from_dict_literal<'db, 'ast>( validate_typed_dict_key_assignment( context, typed_dict, + None, key_str, value_type, error_node, key_expr, &dict_item.value, TypedDictAssignmentKind::Constructor, + true, ); } } @@ -376,12 +401,14 @@ fn validate_from_keywords<'db, 'ast>( validate_typed_dict_key_assignment( context, typed_dict, + None, arg_name.as_str(), arg_type, error_node, keyword, &keyword.value, TypedDictAssignmentKind::Constructor, + true, ); } } @@ -414,12 +441,14 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( valid &= validate_typed_dict_key_assignment( context, typed_dict, + None, key_str, value_type, error_node, key_expr, &item.value, TypedDictAssignmentKind::Constructor, + true, ); } } diff --git a/crates/ty_python_semantic/src/types/variance.rs b/crates/ty_python_semantic/src/types/variance.rs index fb9c87d062..5ec1d5a8ff 100644 --- a/crates/ty_python_semantic/src/types/variance.rs +++ b/crates/ty_python_semantic/src/types/variance.rs @@ -85,6 +85,13 @@ impl TypeVarVariance { TypeVarVariance::Bivariant => TypeVarVariance::Bivariant, } } + + pub(crate) const fn is_covariant(self) -> bool { + matches!( + self, + TypeVarVariance::Covariant | TypeVarVariance::Bivariant + ) + } } impl std::iter::FromIterator for TypeVarVariance { diff --git a/crates/ty_python_semantic/src/types/visitor.rs b/crates/ty_python_semantic/src/types/visitor.rs index d58bf046f1..7692c205ff 100644 --- a/crates/ty_python_semantic/src/types/visitor.rs +++ b/crates/ty_python_semantic/src/types/visitor.rs @@ -11,6 +11,7 @@ use crate::{ class::walk_generic_alias, function::{FunctionType, walk_function_type}, instance::{walk_nominal_instance_type, walk_protocol_instance_type}, + newtype::{NewType, walk_newtype_instance_type}, subclass_of::walk_subclass_of_type, walk_bound_method_type, walk_bound_type_var_type, walk_callable_type, walk_intersection_type, walk_known_instance_type, walk_method_wrapper_type, @@ -109,6 +110,10 @@ pub(crate) trait TypeVisitor<'db> { fn visit_typed_dict_type(&self, db: &'db dyn Db, typed_dict: TypedDictType<'db>) { walk_typed_dict_type(db, typed_dict, self); } + + fn visit_newtype_instance_type(&self, db: &'db dyn Db, newtype: NewType<'db>) { + walk_newtype_instance_type(db, newtype, self); + } } /// Enumeration of types that may contain other types, such as unions, intersections, and generics. @@ -131,6 +136,7 @@ pub(super) enum NonAtomicType<'db> { ProtocolInstance(ProtocolInstanceType<'db>), TypedDict(TypedDictType<'db>), TypeAlias(TypeAliasType<'db>), + NewTypeInstance(NewType<'db>), } pub(super) enum TypeKind<'db> { @@ -198,6 +204,9 @@ impl<'db> From> for TypeKind<'db> { TypeKind::NonAtomic(NonAtomicType::TypedDict(typed_dict)) } Type::TypeAlias(alias) => TypeKind::NonAtomic(NonAtomicType::TypeAlias(alias)), + Type::NewTypeInstance(newtype) => { + TypeKind::NonAtomic(NonAtomicType::NewTypeInstance(newtype)) + } } } } @@ -239,6 +248,36 @@ pub(super) fn walk_non_atomic_type<'db, V: TypeVisitor<'db> + ?Sized>( NonAtomicType::TypeAlias(alias) => { visitor.visit_type_alias_type(db, alias); } + NonAtomicType::NewTypeInstance(newtype) => { + visitor.visit_newtype_instance_type(db, newtype); + } + } +} + +pub(crate) fn walk_type_with_recursion_guard<'db>( + db: &'db dyn Db, + ty: Type<'db>, + visitor: &impl TypeVisitor<'db>, + recursion_guard: &TypeCollector<'db>, +) { + match TypeKind::from(ty) { + TypeKind::Atomic => {} + TypeKind::NonAtomic(non_atomic_type) => { + if recursion_guard.type_was_already_seen(ty) { + // If we have already seen this type, we can skip it. + return; + } + walk_non_atomic_type(db, non_atomic_type, visitor); + } + } +} + +#[derive(Default, Debug)] +pub(crate) struct TypeCollector<'db>(RefCell>>); + +impl<'db> TypeCollector<'db> { + pub(crate) fn type_was_already_seen(&self, ty: Type<'db>) -> bool { + !self.0.borrow_mut().insert(ty) } } @@ -258,7 +297,7 @@ pub(super) fn any_over_type<'db>( ) -> bool { struct AnyOverTypeVisitor<'db, 'a> { query: &'a dyn Fn(Type<'db>) -> bool, - seen_types: RefCell>>, + recursion_guard: TypeCollector<'db>, found_matching_type: Cell, should_visit_lazy_type_attributes: bool, } @@ -278,22 +317,13 @@ pub(super) fn any_over_type<'db>( if found { return; } - match TypeKind::from(ty) { - TypeKind::Atomic => {} - TypeKind::NonAtomic(non_atomic_type) => { - if !self.seen_types.borrow_mut().insert(non_atomic_type) { - // If we have already seen this type, we can skip it. - return; - } - walk_non_atomic_type(db, non_atomic_type, self); - } - } + walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard); } } let visitor = AnyOverTypeVisitor { query, - seen_types: RefCell::new(FxIndexSet::default()), + recursion_guard: TypeCollector::default(), found_matching_type: Cell::new(false), should_visit_lazy_type_attributes, }; diff --git a/crates/ty_server/src/db.rs b/crates/ty_server/src/db.rs new file mode 100644 index 0000000000..9ddc746cf1 --- /dev/null +++ b/crates/ty_server/src/db.rs @@ -0,0 +1,33 @@ +use crate::NotebookDocument; +use crate::session::index::Document; +use crate::system::LSPSystem; +use ruff_db::Db as _; +use ruff_db::files::{File, FilePath}; +use ty_project::{Db as ProjectDb, ProjectDatabase}; + +#[salsa::db] +pub(crate) trait Db: ProjectDb { + /// Returns the LSP [`Document`] corresponding to `File` or + /// `None` if the file isn't open in the editor. + fn document(&self, file: File) -> Option<&Document>; + + /// Returns the LSP [`NotebookDocument`] corresponding to `File` or + /// `None` if the file isn't open in the editor or if it isn't a notebook. + fn notebook_document(&self, file: File) -> Option<&NotebookDocument> { + self.document(file)?.as_notebook() + } +} + +#[salsa::db] +impl Db for ProjectDatabase { + fn document(&self, file: File) -> Option<&Document> { + self.system() + .as_any() + .downcast_ref::() + .and_then(|system| match file.path(self) { + FilePath::System(path) => system.system_path_to_document(path), + FilePath::SystemVirtual(path) => system.system_virtual_path_to_document(path), + FilePath::Vendored(_) => None, + }) + } +} diff --git a/crates/ty_server/src/document.rs b/crates/ty_server/src/document.rs index fff51d2f49..e2c582475b 100644 --- a/crates/ty_server/src/document.rs +++ b/crates/ty_server/src/document.rs @@ -11,6 +11,7 @@ use lsp_types::{PositionEncodingKind, Url}; use crate::system::AnySystemPath; pub use notebook::NotebookDocument; pub(crate) use range::{FileRangeExt, PositionExt, RangeExt, TextSizeExt, ToRangeExt}; +use ruff_db::system::{SystemPathBuf, SystemVirtualPath}; pub(crate) use text_document::DocumentVersion; pub use text_document::TextDocument; @@ -41,39 +42,75 @@ impl From for ruff_source_file::PositionEncoding { /// A unique document ID, derived from a URL passed as part of an LSP request. /// This document ID can point to either be a standalone Python file, a full notebook, or a cell within a notebook. -#[derive(Clone, Debug)] -pub(crate) enum DocumentKey { - Notebook(AnySystemPath), - NotebookCell { - cell_url: Url, - notebook_path: AnySystemPath, - }, - Text(AnySystemPath), +/// +/// The `DocumentKey` is very similar to `AnySystemPath`. The important distinction is that +/// ty doesn't know about individual notebook cells, instead, ty operates on full notebook documents. +/// ty also doesn't support resolving settings per cell, instead, settings are resolved per file or notebook. +/// +/// Thus, the motivation of `DocumentKey` is to prevent accidental use of Cell keys for operations +/// that expect to work on a file path level. That's what [`DocumentHandle::to_file_path`] +/// is for, it returns a file path for any document, taking into account that these methods should +/// return the notebook for cell documents and notebooks. +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub(super) enum DocumentKey { + /// A URI using the `file` schema and maps to a valid path. + File(SystemPathBuf), + + /// Any other URI. + /// + /// Used for Notebook-cells, URI's with non-`file` schemes, or invalid `file` URI's. + Opaque(String), } impl DocumentKey { - /// Returns the file path associated with the key. - pub(crate) fn path(&self) -> &AnySystemPath { - match self { - DocumentKey::Notebook(path) | DocumentKey::Text(path) => path, - DocumentKey::NotebookCell { notebook_path, .. } => notebook_path, + /// Converts the given [`Url`] to an [`DocumentKey`]. + /// + /// If the URL scheme is `file`, then the path is converted to a [`SystemPathBuf`] unless + /// the url isn't a valid file path. + /// + /// In all other cases, the URL is kept as an opaque identifier ([`Self::Opaque`]). + pub(crate) fn from_url(url: &Url) -> Self { + if url.scheme() == "file" { + if let Ok(path) = url.to_file_path() { + Self::File(SystemPathBuf::from_path_buf(path).expect("URL to be valid UTF-8")) + } else { + tracing::warn!( + "Treating `file:` url `{url}` as opaque URL as it isn't a valid file path" + ); + Self::Opaque(url.to_string()) + } + } else { + Self::Opaque(url.to_string()) } } - pub(crate) fn from_path(path: AnySystemPath) -> Self { - // For text documents, we assume it's a text document unless it's a notebook file. - match path.extension() { - Some("ipynb") => Self::Notebook(path), - _ => Self::Text(path), + pub(crate) fn as_opaque(&self) -> Option<&str> { + match self { + Self::Opaque(uri) => Some(uri), + Self::File(_) => None, } } - /// Returns the URL for this document key. For notebook cells, returns the cell URL. - /// For other document types, converts the path to a URL. - pub(crate) fn to_url(&self) -> Option { + /// Returns the corresponding [`AnySystemPath`] for this document key. + /// + /// Note, calling this method on a `DocumentKey::Opaque` representing a cell document + /// will return a `SystemVirtualPath` corresponding to the cell URI but not the notebook file path. + /// That's most likely not what you want. + pub(super) fn to_file_path(&self) -> AnySystemPath { match self { - DocumentKey::NotebookCell { cell_url, .. } => Some(cell_url.clone()), - DocumentKey::Notebook(path) | DocumentKey::Text(path) => path.to_url(), + Self::File(path) => AnySystemPath::System(path.clone()), + Self::Opaque(uri) => { + AnySystemPath::SystemVirtual(SystemVirtualPath::new(uri).to_path_buf()) + } + } + } +} + +impl From for DocumentKey { + fn from(value: AnySystemPath) -> Self { + match value { + AnySystemPath::System(system_path) => Self::File(system_path), + AnySystemPath::SystemVirtual(virtual_path) => Self::Opaque(virtual_path.to_string()), } } } @@ -81,11 +118,8 @@ impl DocumentKey { impl std::fmt::Display for DocumentKey { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::NotebookCell { cell_url, .. } => cell_url.fmt(f), - Self::Notebook(path) | Self::Text(path) => match path { - AnySystemPath::System(system_path) => system_path.fmt(f), - AnySystemPath::SystemVirtual(virtual_path) => virtual_path.fmt(f), - }, + Self::File(path) => path.fmt(f), + Self::Opaque(uri) => uri.fmt(f), } } } diff --git a/crates/ty_server/src/document/location.rs b/crates/ty_server/src/document/location.rs index d5924595b2..67b7d0c659 100644 --- a/crates/ty_server/src/document/location.rs +++ b/crates/ty_server/src/document/location.rs @@ -1,12 +1,10 @@ -use crate::PositionEncoding; -use crate::document::{FileRangeExt, ToRangeExt}; -use crate::system::file_to_url; use lsp_types::Location; use ruff_db::files::FileRange; -use ruff_db::source::{line_index, source_text}; -use ruff_text_size::Ranged; use ty_ide::{NavigationTarget, ReferenceTarget}; -use ty_project::Db; + +use crate::Db; +use crate::PositionEncoding; +use crate::document::{FileRangeExt, ToRangeExt}; pub(crate) trait ToLink { fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option; @@ -21,7 +19,9 @@ pub(crate) trait ToLink { impl ToLink for NavigationTarget { fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option { - FileRange::new(self.file(), self.focus_range()).to_location(db, encoding) + FileRange::new(self.file(), self.focus_range()) + .to_lsp_range(db, encoding)? + .to_location() } fn to_link( @@ -31,22 +31,24 @@ impl ToLink for NavigationTarget { encoding: PositionEncoding, ) -> Option { let file = self.file(); - let uri = file_to_url(db, file)?; - let source = source_text(db, file); - let index = line_index(db, file); - let target_range = self.full_range().to_lsp_range(&source, &index, encoding); - let selection_range = self.focus_range().to_lsp_range(&source, &index, encoding); + // Get target_range and URI together to ensure they're consistent (same cell for notebooks) + let target_location = self + .full_range() + .to_lsp_range(db, file, encoding)? + .into_location()?; + let target_range = target_location.range; - let src = src.map(|src| { - let source = source_text(db, src.file()); - let index = line_index(db, src.file()); + // For selection_range, we can use as_local_range since we know it's in the same document/cell + let selection_range = self + .focus_range() + .to_lsp_range(db, file, encoding)? + .local_range(); - src.range().to_lsp_range(&source, &index, encoding) - }); + let src = src.and_then(|src| Some(src.to_lsp_range(db, encoding)?.local_range())); Some(lsp_types::LocationLink { - target_uri: uri, + target_uri: target_location.uri, target_range, target_selection_range: selection_range, origin_selection_range: src, @@ -56,7 +58,9 @@ impl ToLink for NavigationTarget { impl ToLink for ReferenceTarget { fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option { - self.file_range().to_location(db, encoding) + self.file_range() + .to_lsp_range(db, encoding)? + .into_location() } fn to_link( @@ -65,22 +69,18 @@ impl ToLink for ReferenceTarget { src: Option, encoding: PositionEncoding, ) -> Option { - let uri = file_to_url(db, self.file())?; - let source = source_text(db, self.file()); - let index = line_index(db, self.file()); - - let target_range = self.range().to_lsp_range(&source, &index, encoding); + // Get target_range and URI together to ensure they're consistent (same cell for notebooks) + let target_location = self + .range() + .to_lsp_range(db, self.file(), encoding)? + .into_location()?; + let target_range = target_location.range; let selection_range = target_range; - let src = src.map(|src| { - let source = source_text(db, src.file()); - let index = line_index(db, src.file()); - - src.range().to_lsp_range(&source, &index, encoding) - }); + let src = src.and_then(|src| Some(src.to_lsp_range(db, encoding)?.local_range())); Some(lsp_types::LocationLink { - target_uri: uri, + target_uri: target_location.uri, target_range, target_selection_range: selection_range, origin_selection_range: src, diff --git a/crates/ty_server/src/document/notebook.rs b/crates/ty_server/src/document/notebook.rs index 2616cffd70..d1e07648e2 100644 --- a/crates/ty_server/src/document/notebook.rs +++ b/crates/ty_server/src/document/notebook.rs @@ -3,9 +3,8 @@ use lsp_types::NotebookCellKind; use ruff_notebook::CellMetadata; use rustc_hash::{FxBuildHasher, FxHashMap}; -use crate::{PositionEncoding, TextDocument}; - use super::DocumentVersion; +use crate::{PositionEncoding, TextDocument}; pub(super) type CellId = usize; @@ -13,16 +12,25 @@ pub(super) type CellId = usize; /// contents are internally represented by [`TextDocument`]s. #[derive(Clone, Debug)] pub struct NotebookDocument { + url: lsp_types::Url, cells: Vec, metadata: ruff_notebook::RawNotebookMetadata, version: DocumentVersion, // Used to quickly find the index of a cell for a given URL. - cell_index: FxHashMap, + cell_index: FxHashMap, } /// A single cell within a notebook, which has text contents represented as a `TextDocument`. #[derive(Clone, Debug)] struct NotebookCell { + /// The URL uniquely identifying the cell. + /// + /// > Cell text documents have a URI, but servers should not rely on any + /// > format for this URI, since it is up to the client on how it will + /// > create these URIs. The URIs must be unique across ALL notebook + /// > cells and can therefore be used to uniquely identify a notebook cell + /// > or the cell’s text document. + /// > url: lsp_types::Url, kind: NotebookCellKind, document: TextDocument, @@ -30,32 +38,45 @@ struct NotebookCell { impl NotebookDocument { pub fn new( - version: DocumentVersion, + url: lsp_types::Url, + notebook_version: DocumentVersion, cells: Vec, metadata: serde_json::Map, cell_documents: Vec, ) -> crate::Result { - let mut cell_contents: FxHashMap<_, _> = cell_documents - .into_iter() - .map(|document| (document.uri, document.text)) - .collect(); + let mut cells: Vec<_> = cells.into_iter().map(NotebookCell::empty).collect(); - let cells: Vec<_> = cells - .into_iter() - .map(|cell| { - let contents = cell_contents.remove(&cell.document).unwrap_or_default(); - NotebookCell::new(cell, contents, version) - }) - .collect(); + let cell_index = Self::make_cell_index(&cells); + + for cell_document in cell_documents { + let index = cell_index + .get(cell_document.uri.as_str()) + .copied() + .ok_or_else(|| { + anyhow::anyhow!( + "Received content for cell `{}` that isn't present in the metadata", + cell_document.uri + ) + })?; + + cells[index].document = + TextDocument::new(cell_document.uri, cell_document.text, cell_document.version) + .with_language_id(&cell_document.language_id); + } Ok(Self { - version, - cell_index: Self::make_cell_index(cells.as_slice()), - metadata: serde_json::from_value(serde_json::Value::Object(metadata))?, + url, + version: notebook_version, + cell_index, cells, + metadata: serde_json::from_value(serde_json::Value::Object(metadata))?, }) } + pub(crate) fn url(&self) -> &lsp_types::Url { + &self.url + } + /// Generates a pseudo-representation of a notebook that lacks per-cell metadata and contextual information /// but should still work with Ruff's linter. pub fn make_ruff_notebook(&self) -> ruff_notebook::Notebook { @@ -127,7 +148,7 @@ impl NotebookDocument { // First, delete the cells and remove them from the index. if delete > 0 { for cell in self.cells.drain(start..start + delete) { - self.cell_index.remove(&cell.url); + self.cell_index.remove(cell.url.as_str()); deleted_cells.insert(cell.url, cell.document); } } @@ -150,7 +171,7 @@ impl NotebookDocument { // Third, register the new cells in the index and update existing ones that came // after the insertion. for (index, cell) in self.cells.iter().enumerate().skip(start) { - self.cell_index.insert(cell.url.clone(), index); + self.cell_index.insert(cell.url.to_string(), index); } // Finally, update the text document that represents the cell with the actual @@ -158,8 +179,9 @@ impl NotebookDocument { // `cell_index` are updated before we start applying the changes to the cells. if let Some(did_open) = structure.did_open { for cell_text_document in did_open { - if let Some(cell) = self.cell_by_uri_mut(&cell_text_document.uri) { + if let Some(cell) = self.cell_by_uri_mut(cell_text_document.uri.as_str()) { cell.document = TextDocument::new( + cell_text_document.uri, cell_text_document.text, cell_text_document.version, ); @@ -170,7 +192,7 @@ impl NotebookDocument { if let Some(cell_data) = data { for cell in cell_data { - if let Some(existing_cell) = self.cell_by_uri_mut(&cell.document) { + if let Some(existing_cell) = self.cell_by_uri_mut(cell.document.as_str()) { existing_cell.kind = cell.kind; } } @@ -178,7 +200,7 @@ impl NotebookDocument { if let Some(content_changes) = text_content { for content_change in content_changes { - if let Some(cell) = self.cell_by_uri_mut(&content_change.document.uri) { + if let Some(cell) = self.cell_by_uri_mut(content_change.document.uri.as_str()) { cell.document .apply_changes(content_change.changes, version, encoding); } @@ -204,7 +226,8 @@ impl NotebookDocument { } /// Get the text document representing the contents of a cell by the cell URI. - pub(crate) fn cell_document_by_uri(&self, uri: &lsp_types::Url) -> Option<&TextDocument> { + #[expect(unused)] + pub(crate) fn cell_document_by_uri(&self, uri: &str) -> Option<&TextDocument> { self.cells .get(*self.cell_index.get(uri)?) .map(|cell| &cell.document) @@ -215,29 +238,41 @@ impl NotebookDocument { self.cells.iter().map(|cell| &cell.url) } - fn cell_by_uri_mut(&mut self, uri: &lsp_types::Url) -> Option<&mut NotebookCell> { + fn cell_by_uri_mut(&mut self, uri: &str) -> Option<&mut NotebookCell> { self.cells.get_mut(*self.cell_index.get(uri)?) } - fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap { + fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap { let mut index = FxHashMap::with_capacity_and_hasher(cells.len(), FxBuildHasher); for (i, cell) in cells.iter().enumerate() { - index.insert(cell.url.clone(), i); + index.insert(cell.url.to_string(), i); } index } } impl NotebookCell { + pub(crate) fn empty(cell: lsp_types::NotebookCell) -> Self { + Self { + kind: cell.kind, + document: TextDocument::new( + cell.document.clone(), + String::new(), + DocumentVersion::default(), + ), + url: cell.document, + } + } + pub(crate) fn new( cell: lsp_types::NotebookCell, contents: String, version: DocumentVersion, ) -> Self { Self { + document: TextDocument::new(cell.document.clone(), contents, version), url: cell.document, kind: cell.kind, - document: TextDocument::new(contents, version), } } } @@ -294,7 +329,14 @@ mod tests { } } - NotebookDocument::new(0, cells, serde_json::Map::default(), cell_documents).unwrap() + NotebookDocument::new( + lsp_types::Url::parse("file://test.ipynb").unwrap(), + 0, + cells, + serde_json::Map::default(), + cell_documents, + ) + .unwrap() } /// This test case checks that for a notebook with three code cells, when the client sends a diff --git a/crates/ty_server/src/document/range.rs b/crates/ty_server/src/document/range.rs index 1d107e5a30..894ef9ba09 100644 --- a/crates/ty_server/src/document/range.rs +++ b/crates/ty_server/src/document/range.rs @@ -1,154 +1,266 @@ use super::PositionEncoding; -use super::notebook; +use crate::Db; use crate::system::file_to_url; -use lsp_types as types; -use lsp_types::Location; - -use ruff_db::files::FileRange; +use ruff_db::files::{File, FileRange}; use ruff_db::source::{line_index, source_text}; -use ruff_notebook::NotebookIndex; use ruff_source_file::LineIndex; use ruff_source_file::{OneIndexed, SourceLocation}; use ruff_text_size::{Ranged, TextRange, TextSize}; -use ty_python_semantic::Db; -#[expect(dead_code)] -pub(crate) struct NotebookRange { - pub(crate) cell: notebook::CellId, - pub(crate) range: types::Range, +/// A range in an LSP text document (cell or a regular document). +#[derive(Clone, Debug, Default)] +pub(crate) struct LspRange { + range: lsp_types::Range, + + /// The URI of this range's text document + uri: Option, +} + +impl LspRange { + /// Returns the range within this document. + /// + /// Only use `range` when you already have a URI context and this range is guaranteed + /// to be within the same document/cell: + /// - Selection ranges within a `LocationLink` (where `target_uri` provides context) + /// - Additional ranges in the same cell (e.g., `selection_range` when you already have `target_range`) + /// + /// Do NOT use this for standalone ranges - use [`Self::to_location`] instead to ensure + /// the URI and range are consistent. + pub(crate) fn local_range(&self) -> lsp_types::Range { + self.range + } + + /// Converts this range into an LSP location. + /// + /// Returns `None` if the URI for this file couldn't be resolved. + pub(crate) fn to_location(&self) -> Option { + Some(lsp_types::Location { + uri: self.uri.clone()?, + range: self.range, + }) + } + + pub(crate) fn into_location(self) -> Option { + Some(lsp_types::Location { + uri: self.uri?, + range: self.range, + }) + } +} + +/// A position in an LSP text document (cell or a regular document). +#[derive(Clone, Debug, Default)] +pub(crate) struct LspPosition { + position: lsp_types::Position, + + /// The URI of this range's text document + uri: Option, +} + +impl LspPosition { + /// Returns the position within this document. + /// + /// Only use [`Self::local_position`] when you already have a URI context and this position is guaranteed + /// to be within the same document/cell + /// + /// Do NOT use this for standalone positions - use [`Self::to_location`] instead to ensure + /// the URI and position are consistent. + pub(crate) fn local_position(&self) -> lsp_types::Position { + self.position + } + + /// Returns the uri of the text document this position belongs to. + #[expect(unused)] + pub(crate) fn uri(&self) -> Option<&lsp_types::Url> { + self.uri.as_ref() + } } pub(crate) trait RangeExt { - fn to_text_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) - -> TextRange; + /// Convert an LSP Range to internal [`TextRange`]. + /// + /// Returns `None` if `file` is a notebook and the + /// cell identified by `url` can't be looked up or if the notebook + /// isn't open in the editor. + fn to_text_range( + &self, + db: &dyn Db, + file: File, + url: &lsp_types::Url, + encoding: PositionEncoding, + ) -> Option; +} + +impl RangeExt for lsp_types::Range { + fn to_text_range( + &self, + db: &dyn Db, + file: File, + url: &lsp_types::Url, + encoding: PositionEncoding, + ) -> Option { + let start = self.start.to_text_size(db, file, url, encoding)?; + let end = self.end.to_text_size(db, file, url, encoding)?; + + Some(TextRange::new(start, end)) + } } pub(crate) trait PositionExt { - fn to_text_size(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> TextSize; + /// Convert an LSP Position to internal `TextSize`. + /// + /// Returns `None` if `file` is a notebook and the + /// cell identified by `url` can't be looked up or if the notebook + /// isn't open in the editor. + fn to_text_size( + &self, + db: &dyn Db, + file: File, + url: &lsp_types::Url, + encoding: PositionEncoding, + ) -> Option; +} + +impl PositionExt for lsp_types::Position { + fn to_text_size( + &self, + db: &dyn Db, + file: File, + _url: &lsp_types::Url, + encoding: PositionEncoding, + ) -> Option { + let source = source_text(db, file); + let index = line_index(db, file); + + Some(lsp_position_to_text_size(*self, &source, &index, encoding)) + } } pub(crate) trait TextSizeExt { - fn to_position( - self, - text: &str, - index: &LineIndex, + /// Converts self into a position into an LSP text document (can be a cell or regular document). + /// + /// Returns `None` if the position can't be converted: + /// + /// * If `file` is a notebook but the notebook isn't open in the editor, + /// preventing us from looking up the corresponding cell. + /// * If `position` is out of bounds. + fn to_lsp_position( + &self, + db: &dyn Db, + file: File, encoding: PositionEncoding, - ) -> types::Position + ) -> Option where Self: Sized; } impl TextSizeExt for TextSize { - fn to_position( - self, - text: &str, - index: &LineIndex, + fn to_lsp_position( + &self, + db: &dyn Db, + file: File, encoding: PositionEncoding, - ) -> types::Position { - let source_location = index.source_location(self, text, encoding.into()); - source_location_to_position(&source_location) + ) -> Option { + let source = source_text(db, file); + let index = line_index(db, file); + + let uri = file_to_url(db, file); + let position = text_size_to_lsp_position(*self, &source, &index, encoding); + + Some(LspPosition { position, uri }) } } pub(crate) trait ToRangeExt { - fn to_lsp_range( - &self, - text: &str, - index: &LineIndex, - encoding: PositionEncoding, - ) -> types::Range; - - #[expect(dead_code)] - fn to_notebook_range( - &self, - text: &str, - source_index: &LineIndex, - notebook_index: &NotebookIndex, - encoding: PositionEncoding, - ) -> NotebookRange; + /// Converts self into a range into an LSP text document (can be a cell or regular document). + /// + /// Returns `None` if the range can't be converted: + /// + /// * If `file` is a notebook but the notebook isn't open in the editor, + /// preventing us from looking up the corresponding cell. + /// * If range is out of bounds. + fn to_lsp_range(&self, db: &dyn Db, file: File, encoding: PositionEncoding) + -> Option; } fn u32_index_to_usize(index: u32) -> usize { usize::try_from(index).expect("u32 fits in usize") } -impl PositionExt for lsp_types::Position { - fn to_text_size(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> TextSize { - index.offset( - SourceLocation { - line: OneIndexed::from_zero_indexed(u32_index_to_usize(self.line)), - character_offset: OneIndexed::from_zero_indexed(u32_index_to_usize(self.character)), - }, - text, - encoding.into(), - ) +fn text_size_to_lsp_position( + offset: TextSize, + text: &str, + index: &LineIndex, + encoding: PositionEncoding, +) -> lsp_types::Position { + let source_location = index.source_location(offset, text, encoding.into()); + source_location_to_position(&source_location) +} + +fn text_range_to_lsp_range( + range: TextRange, + text: &str, + index: &LineIndex, + encoding: PositionEncoding, +) -> lsp_types::Range { + lsp_types::Range { + start: text_size_to_lsp_position(range.start(), text, index, encoding), + end: text_size_to_lsp_position(range.end(), text, index, encoding), } } -impl RangeExt for lsp_types::Range { - fn to_text_range( - &self, - text: &str, - index: &LineIndex, - encoding: PositionEncoding, - ) -> TextRange { - TextRange::new( - self.start.to_text_size(text, index, encoding), - self.end.to_text_size(text, index, encoding), - ) - } +/// Helper function to convert an LSP Position to internal `TextSize`. +/// This is used internally by the `PositionExt` trait and other helpers. +fn lsp_position_to_text_size( + position: lsp_types::Position, + text: &str, + index: &LineIndex, + encoding: PositionEncoding, +) -> TextSize { + index.offset( + SourceLocation { + line: OneIndexed::from_zero_indexed(u32_index_to_usize(position.line)), + character_offset: OneIndexed::from_zero_indexed(u32_index_to_usize(position.character)), + }, + text, + encoding.into(), + ) +} + +/// Helper function to convert an LSP Range to internal `TextRange`. +/// This is used internally by the `RangeExt` trait and in special cases +/// where `db` and `file` are not available (e.g., when applying document changes). +pub(crate) fn lsp_range_to_text_range( + range: lsp_types::Range, + text: &str, + index: &LineIndex, + encoding: PositionEncoding, +) -> TextRange { + TextRange::new( + lsp_position_to_text_size(range.start, text, index, encoding), + lsp_position_to_text_size(range.end, text, index, encoding), + ) } impl ToRangeExt for TextRange { fn to_lsp_range( &self, - text: &str, - index: &LineIndex, + db: &dyn Db, + file: File, encoding: PositionEncoding, - ) -> types::Range { - types::Range { - start: self.start().to_position(text, index, encoding), - end: self.end().to_position(text, index, encoding), - } - } + ) -> Option { + let source = source_text(db, file); + let index = line_index(db, file); + let range = text_range_to_lsp_range(*self, &source, &index, encoding); - fn to_notebook_range( - &self, - text: &str, - source_index: &LineIndex, - notebook_index: &NotebookIndex, - encoding: PositionEncoding, - ) -> NotebookRange { - let start = source_index.source_location(self.start(), text, encoding.into()); - let mut end = source_index.source_location(self.end(), text, encoding.into()); - let starting_cell = notebook_index.cell(start.line); - - // weird edge case here - if the end of the range is where the newline after the cell got added (making it 'out of bounds') - // we need to move it one character back (which should place it at the end of the last line). - // we test this by checking if the ending offset is in a different (or nonexistent) cell compared to the cell of the starting offset. - if notebook_index.cell(end.line) != starting_cell { - end.line = end.line.saturating_sub(1); - let offset = self.end().checked_sub(1.into()).unwrap_or_default(); - end.character_offset = source_index - .source_location(offset, text, encoding.into()) - .character_offset; - } - - let start = source_location_to_position(¬ebook_index.translate_source_location(&start)); - let end = source_location_to_position(¬ebook_index.translate_source_location(&end)); - - NotebookRange { - cell: starting_cell - .map(OneIndexed::to_zero_indexed) - .unwrap_or_default(), - range: types::Range { start, end }, - } + let uri = file_to_url(db, file); + Some(LspRange { range, uri }) } } -fn source_location_to_position(location: &SourceLocation) -> types::Position { - types::Position { +fn source_location_to_position(location: &SourceLocation) -> lsp_types::Position { + lsp_types::Position { line: u32::try_from(location.line.to_zero_indexed()).expect("line usize fits in u32"), character: u32::try_from(location.character_offset.to_zero_indexed()) .expect("character usize fits in u32"), @@ -156,17 +268,13 @@ fn source_location_to_position(location: &SourceLocation) -> types::Position { } pub(crate) trait FileRangeExt { - fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option; + /// Converts this file range to an `LspRange`, which then requires an explicit + /// decision about how to use it (as a local range or as a location). + fn to_lsp_range(&self, db: &dyn Db, encoding: PositionEncoding) -> Option; } impl FileRangeExt for FileRange { - fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option { - let file = self.file(); - let uri = file_to_url(db, file)?; - let source = source_text(db, file); - let line_index = line_index(db, file); - - let range = self.range().to_lsp_range(&source, &line_index, encoding); - Some(Location { uri, range }) + fn to_lsp_range(&self, db: &dyn Db, encoding: PositionEncoding) -> Option { + self.range().to_lsp_range(db, self.file(), encoding) } } diff --git a/crates/ty_server/src/document/text_document.rs b/crates/ty_server/src/document/text_document.rs index e5d00ff0cf..e6cd4c4e0b 100644 --- a/crates/ty_server/src/document/text_document.rs +++ b/crates/ty_server/src/document/text_document.rs @@ -1,9 +1,9 @@ -use lsp_types::TextDocumentContentChangeEvent; +use lsp_types::{TextDocumentContentChangeEvent, Url}; use ruff_source_file::LineIndex; use crate::PositionEncoding; -use super::RangeExt; +use super::range::lsp_range_to_text_range; pub(crate) type DocumentVersion = i32; @@ -11,6 +11,9 @@ pub(crate) type DocumentVersion = i32; /// with changes made by the user, including unsaved changes. #[derive(Debug, Clone)] pub struct TextDocument { + /// The URL as sent by the client + url: Url, + /// The string contents of the document. contents: String, /// A computed line index for the document. This should always reflect @@ -40,9 +43,10 @@ impl From<&str> for LanguageId { } impl TextDocument { - pub fn new(contents: String, version: DocumentVersion) -> Self { + pub fn new(url: Url, contents: String, version: DocumentVersion) -> Self { let index = LineIndex::from_source_text(&contents); Self { + url, contents, index, version, @@ -60,6 +64,10 @@ impl TextDocument { self.contents } + pub(crate) fn url(&self) -> &Url { + &self.url + } + pub fn contents(&self) -> &str { &self.contents } @@ -106,7 +114,7 @@ impl TextDocument { } in changes { if let Some(range) = range { - let range = range.to_text_range(&new_contents, &active_index, encoding); + let range = lsp_range_to_text_range(range, &new_contents, &active_index, encoding); new_contents.replace_range( usize::from(range.start())..usize::from(range.end()), @@ -154,11 +162,12 @@ impl TextDocument { #[cfg(test)] mod tests { use crate::{PositionEncoding, TextDocument}; - use lsp_types::{Position, TextDocumentContentChangeEvent}; + use lsp_types::{Position, TextDocumentContentChangeEvent, Url}; #[test] fn redo_edit() { let mut document = TextDocument::new( + Url::parse("file:///test").unwrap(), r#"""" 测试comment 一些测试内容 diff --git a/crates/ty_server/src/lib.rs b/crates/ty_server/src/lib.rs index a56a95cb38..122f50d277 100644 --- a/crates/ty_server/src/lib.rs +++ b/crates/ty_server/src/lib.rs @@ -4,13 +4,15 @@ use anyhow::Context; use lsp_server::Connection; use ruff_db::system::{OsSystem, SystemPathBuf}; +use crate::db::Db; pub use crate::logging::{LogLevel, init_logging}; pub use crate::server::{PartialWorkspaceProgress, PartialWorkspaceProgressParams, Server}; pub use crate::session::{ClientOptions, DiagnosticMode}; pub use document::{NotebookDocument, PositionEncoding, TextDocument}; -pub(crate) use session::{DocumentQuery, Session}; +pub(crate) use session::Session; mod capabilities; +mod db; mod document; mod logging; mod server; diff --git a/crates/ty_server/src/server/api.rs b/crates/ty_server/src/server/api.rs index 6fd1cde43a..a56866791b 100644 --- a/crates/ty_server/src/server/api.rs +++ b/crates/ty_server/src/server/api.rs @@ -1,6 +1,5 @@ use crate::server::schedule::Task; use crate::session::Session; -use crate::system::AnySystemPath; use anyhow::anyhow; use lsp_server as server; use lsp_server::RequestId; @@ -208,7 +207,7 @@ where // SAFETY: The `snapshot` is safe to move across the unwind boundary because it is not used // after unwinding. - let snapshot = AssertUnwindSafe(session.take_session_snapshot()); + let snapshot = AssertUnwindSafe(session.snapshot_session()); Box::new(move |client| { let _span = tracing::debug_span!("request", %id, method = R::METHOD).entered(); @@ -253,10 +252,10 @@ where .cancellation_token(&id) .expect("request should have been tested for cancellation before scheduling"); - let url = R::document_url(¶ms).into_owned(); + let url = R::document_url(¶ms); - let Ok(path) = AnySystemPath::try_from_url(&url) else { - let reason = format!("URL `{url}` isn't a valid system path"); + let Ok(document) = session.snapshot_document(&url) else { + let reason = format!("Document {url} is not open in the session"); tracing::warn!( "Ignoring request id={id} method={} because {reason}", R::METHOD @@ -274,8 +273,8 @@ where }); }; + let path = document.to_file_path(); let db = session.project_db(&path).clone(); - let snapshot = session.take_document_snapshot(url); Box::new(move |client| { let _span = tracing::debug_span!("request", %id, method = R::METHOD).entered(); @@ -294,7 +293,7 @@ where } if let Err(error) = ruff_db::panic::catch_unwind(|| { - R::handle_request(&id, &db, snapshot, client, params); + R::handle_request(&id, &db, document, client, params); }) { panic_response::(&id, client, &error, retry); } @@ -371,7 +370,15 @@ where let (id, params) = cast_notification::(req)?; Ok(Task::background(schedule, move |session: &Session| { let url = N::document_url(¶ms); - let snapshot = session.take_document_snapshot((*url).clone()); + let Ok(snapshot) = session.snapshot_document(&url) else { + let reason = format!("Document {url} is not open in the session"); + tracing::warn!( + "Ignoring notification id={id} method={} because {reason}", + N::METHOD + ); + return Box::new(|_| {}); + }; + Box::new(move |client| { let _span = tracing::debug_span!("notification", method = N::METHOD).entered(); diff --git a/crates/ty_server/src/server/api/diagnostics.rs b/crates/ty_server/src/server/api/diagnostics.rs index d43b176a9b..98d927cf2e 100644 --- a/crates/ty_server/src/server/api/diagnostics.rs +++ b/crates/ty_server/src/server/api/diagnostics.rs @@ -9,20 +9,20 @@ use rustc_hash::FxHashMap; use ruff_db::diagnostic::{Annotation, Severity, SubDiagnostic}; use ruff_db::files::FileRange; -use ruff_db::source::{line_index, source_text}; use ruff_db::system::SystemPathBuf; -use ty_project::{Db, ProjectDatabase}; +use ty_project::{Db as _, ProjectDatabase}; -use crate::document::{DocumentKey, FileRangeExt, ToRangeExt}; +use crate::Db; +use crate::document::{FileRangeExt, ToRangeExt}; use crate::session::DocumentSnapshot; use crate::session::client::Client; use crate::system::{AnySystemPath, file_to_url}; -use crate::{DocumentQuery, PositionEncoding, Session}; +use crate::{NotebookDocument, PositionEncoding, Session}; pub(super) struct Diagnostics<'a> { items: Vec, encoding: PositionEncoding, - document: &'a DocumentQuery, + notebook: Option<&'a NotebookDocument>, } impl Diagnostics<'_> { @@ -53,7 +53,7 @@ impl Diagnostics<'_> { } pub(super) fn to_lsp_diagnostics(&self, db: &ProjectDatabase) -> LspDiagnostics { - if let Some(notebook) = self.document.as_notebook() { + if let Some(notebook) = self.notebook { let mut cell_diagnostics: FxHashMap> = FxHashMap::default(); // Populates all relevant URLs with an empty diagnostic list. This ensures that documents @@ -115,23 +115,18 @@ impl LspDiagnostics { } } -/// Clears the diagnostics for the document identified by `key`. +/// Clears the diagnostics for the document identified by `uri`. /// /// This is done by notifying the client with an empty list of diagnostics for the document. /// For notebook cells, this clears diagnostics for the specific cell. /// For other document types, this clears diagnostics for the main document. -pub(super) fn clear_diagnostics(session: &Session, key: &DocumentKey, client: &Client) { +pub(super) fn clear_diagnostics(session: &Session, uri: &lsp_types::Url, client: &Client) { if session.client_capabilities().supports_pull_diagnostics() { return; } - let Some(uri) = key.to_url() else { - // If we can't convert to URL, we can't clear diagnostics - return; - }; - client.send_notification::(PublishDiagnosticsParams { - uri, + uri: uri.clone(), diagnostics: vec![], version: None, }); @@ -143,18 +138,12 @@ pub(super) fn clear_diagnostics(session: &Session, key: &DocumentKey, client: &C /// This function is a no-op if the client supports pull diagnostics. /// /// [publish diagnostics notification]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_publishDiagnostics -pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client: &Client) { +pub(super) fn publish_diagnostics(session: &Session, url: &lsp_types::Url, client: &Client) { if session.client_capabilities().supports_pull_diagnostics() { return; } - let Some(url) = key.to_url() else { - return; - }; - - let snapshot = session.take_document_snapshot(url.clone()); - - let document = match snapshot.document() { + let snapshot = match session.snapshot_document(url) { Ok(document) => document, Err(err) => { tracing::debug!("Failed to resolve document for URL `{}`: {}", url, err); @@ -162,7 +151,7 @@ pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client: } }; - let db = session.project_db(key.path()); + let db = session.project_db(&snapshot.to_file_path()); let Some(diagnostics) = compute_diagnostics(db, &snapshot) else { return; @@ -173,13 +162,13 @@ pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client: client.send_notification::(PublishDiagnosticsParams { uri, diagnostics, - version: Some(document.version()), + version: Some(snapshot.document().version()), }); }; match diagnostics.to_lsp_diagnostics(db) { LspDiagnostics::TextDocument(diagnostics) => { - publish_diagnostics_notification(url, diagnostics); + publish_diagnostics_notification(url.clone(), diagnostics); } LspDiagnostics::NotebookDocument(cell_diagnostics) => { for (cell_url, diagnostics) in cell_diagnostics { @@ -264,16 +253,11 @@ pub(super) fn compute_diagnostics<'a>( db: &ProjectDatabase, snapshot: &'a DocumentSnapshot, ) -> Option> { - let document = match snapshot.document() { - Ok(document) => document, - Err(err) => { - tracing::info!("Failed to resolve document for snapshot: {}", err); - return None; - } - }; - - let Some(file) = document.file(db) else { - tracing::info!("No file found for snapshot for `{}`", document.file_path()); + let Some(file) = snapshot.to_file(db) else { + tracing::info!( + "No file found for snapshot for `{}`", + snapshot.to_file_path() + ); return None; }; @@ -282,7 +266,7 @@ pub(super) fn compute_diagnostics<'a>( Some(Diagnostics { items: diagnostics, encoding: snapshot.encoding(), - document, + notebook: snapshot.notebook(), }) } @@ -295,12 +279,11 @@ pub(super) fn to_lsp_diagnostic( ) -> Diagnostic { let range = if let Some(span) = diagnostic.primary_span() { let file = span.expect_ty_file(); - let index = line_index(db, file); - let source = source_text(db, file); span.range() - .map(|range| range.to_lsp_range(&source, &index, encoding)) + .and_then(|range| range.to_lsp_range(db, file, encoding)) .unwrap_or_default() + .local_range() } else { Range::default() }; @@ -381,7 +364,7 @@ fn annotation_to_related_information( let annotation_message = annotation.get_message()?; let range = FileRange::try_from(span).ok()?; - let location = range.to_location(db, encoding)?; + let location = range.to_lsp_range(db, encoding)?.into_location()?; Some(DiagnosticRelatedInformation { location, @@ -399,7 +382,7 @@ fn sub_diagnostic_to_related_information( let span = primary_annotation.get_span(); let range = FileRange::try_from(span).ok()?; - let location = range.to_location(db, encoding)?; + let location = range.to_lsp_range(db, encoding)?.into_location()?; Some(DiagnosticRelatedInformation { location, diff --git a/crates/ty_server/src/server/api/notifications/did_change.rs b/crates/ty_server/src/server/api/notifications/did_change.rs index 68f6f883e0..3cb52c3daa 100644 --- a/crates/ty_server/src/server/api/notifications/did_change.rs +++ b/crates/ty_server/src/server/api/notifications/did_change.rs @@ -28,19 +28,16 @@ impl SyncNotificationHandler for DidChangeTextDocumentHandler { content_changes, } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; - - session - .update_text_document(&key, content_changes, version) + let document = session + .document_handle(&uri) .with_failure_code(ErrorCode::InternalError)?; - let changes = match key.path() { + document + .update_text_document(session, content_changes, version) + .with_failure_code(ErrorCode::InternalError)?; + + let path = document.to_file_path(); + let changes = match &*path { AnySystemPath::System(system_path) => { vec![ChangeEvent::file_content_changed(system_path.clone())] } @@ -49,9 +46,9 @@ impl SyncNotificationHandler for DidChangeTextDocumentHandler { } }; - session.apply_changes(key.path(), changes); + session.apply_changes(&path, changes); - publish_diagnostics(session, &key, client); + publish_diagnostics(session, document.url(), client); Ok(()) } diff --git a/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs b/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs index 21285f461f..2d9c308f36 100644 --- a/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs +++ b/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs @@ -1,3 +1,4 @@ +use crate::document::DocumentKey; use crate::server::Result; use crate::server::api::diagnostics::{publish_diagnostics, publish_settings_diagnostics}; use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler}; @@ -7,7 +8,7 @@ use crate::system::AnySystemPath; use lsp_types as types; use lsp_types::{FileChangeType, notification as notif}; use rustc_hash::FxHashMap; -use ty_project::Db; +use ty_project::Db as _; use ty_project::watch::{ChangeEvent, ChangedKind, CreatedKind, DeletedKind}; pub(crate) struct DidChangeWatchedFiles; @@ -25,16 +26,8 @@ impl SyncNotificationHandler for DidChangeWatchedFiles { let mut events_by_db: FxHashMap<_, Vec> = FxHashMap::default(); for change in params.changes { - let path = match AnySystemPath::try_from_url(&change.uri) { - Ok(path) => path, - Err(err) => { - tracing::warn!( - "Failed to convert URI '{}` to system path: {err:?}", - change.uri - ); - continue; - } - }; + let key = DocumentKey::from_url(&change.uri); + let path = key.to_file_path(); let system_path = match path { AnySystemPath::System(system) => system, @@ -99,8 +92,8 @@ impl SyncNotificationHandler for DidChangeWatchedFiles { |_, ()| {}, ); } else { - for key in session.text_document_keys() { - publish_diagnostics(session, &key, client); + for key in session.text_document_handles() { + publish_diagnostics(session, key.url(), client); } } // TODO: always publish diagnostics for notebook files (since they don't use pull diagnostics) diff --git a/crates/ty_server/src/server/api/notifications/did_close.rs b/crates/ty_server/src/server/api/notifications/did_close.rs index 60097df67b..5c5747ee05 100644 --- a/crates/ty_server/src/server/api/notifications/did_close.rs +++ b/crates/ty_server/src/server/api/notifications/did_close.rs @@ -27,22 +27,20 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler { text_document: TextDocumentIdentifier { uri }, } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; - - session - .close_document(&key) + let document = session + .document_handle(&uri) .with_failure_code(ErrorCode::InternalError)?; - let path = key.path(); - let db = session.project_db_mut(path); + let path = document.to_file_path().into_owned(); + let url = document.url().clone(); - match path { + document + .close(session) + .with_failure_code(ErrorCode::InternalError)?; + + let db = session.project_db_mut(&path); + + match &path { AnySystemPath::System(system_path) => { if let Some(file) = db.files().try_system(db, system_path) { db.project().close_file(db, file); @@ -65,7 +63,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler { .diagnostic_mode() .is_open_files_only() { - clear_diagnostics(session, &key, client); + clear_diagnostics(session, &url, client); } } AnySystemPath::SystemVirtual(virtual_path) => { @@ -78,7 +76,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler { // Always clear diagnostics for virtual files, as they don't really exist on disk // which means closing them is like deleting the file. - clear_diagnostics(session, &key, client); + clear_diagnostics(session, &url, client); } } diff --git a/crates/ty_server/src/server/api/notifications/did_close_notebook.rs b/crates/ty_server/src/server/api/notifications/did_close_notebook.rs index f934f6832e..9b03651496 100644 --- a/crates/ty_server/src/server/api/notifications/did_close_notebook.rs +++ b/crates/ty_server/src/server/api/notifications/did_close_notebook.rs @@ -26,21 +26,19 @@ impl SyncNotificationHandler for DidCloseNotebookHandler { .. } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; - - session - .close_document(&key) + let document = session + .document_handle(&uri) .with_failure_code(lsp_server::ErrorCode::InternalError)?; - if let AnySystemPath::SystemVirtual(virtual_path) = key.path() { + let path = document.to_file_path().into_owned(); + + document + .close(session) + .with_failure_code(lsp_server::ErrorCode::InternalError)?; + + if let AnySystemPath::SystemVirtual(virtual_path) = &path { session.apply_changes( - key.path(), + &path, vec![ChangeEvent::DeletedVirtual(virtual_path.clone())], ); } diff --git a/crates/ty_server/src/server/api/notifications/did_open.rs b/crates/ty_server/src/server/api/notifications/did_open.rs index 5647bb2781..b2561e9c6c 100644 --- a/crates/ty_server/src/server/api/notifications/did_open.rs +++ b/crates/ty_server/src/server/api/notifications/did_open.rs @@ -35,30 +35,23 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler { }, } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; + let document = session.open_text_document( + TextDocument::new(uri, text, version).with_language_id(&language_id), + ); - let document = TextDocument::new(text, version).with_language_id(&language_id); - session.open_text_document(key.path(), document); - - let path = key.path(); + let path = document.to_file_path(); // This is a "maybe" because the `File` might've not been interned yet i.e., the // `try_system` call will return `None` which doesn't mean that the file is new, it's just // that the server didn't need the file yet. let is_maybe_new_system_file = path.as_system().is_some_and(|system_path| { - let db = session.project_db(path); + let db = session.project_db(&path); db.files() .try_system(db, system_path) .is_none_or(|file| !file.exists(db)) }); - match path { + match &*path { AnySystemPath::System(system_path) => { let event = if is_maybe_new_system_file { ChangeEvent::Created { @@ -68,22 +61,22 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler { } else { ChangeEvent::Opened(system_path.clone()) }; - session.apply_changes(path, vec![event]); + session.apply_changes(&path, vec![event]); - let db = session.project_db_mut(path); + let db = session.project_db_mut(&path); match system_path_to_file(db, system_path) { Ok(file) => db.project().open_file(db, file), Err(err) => tracing::warn!("Failed to open file {system_path}: {err}"), } } AnySystemPath::SystemVirtual(virtual_path) => { - let db = session.project_db_mut(path); + let db = session.project_db_mut(&path); let virtual_file = db.files().virtual_file(db, virtual_path); db.project().open_file(db, virtual_file.file()); } } - publish_diagnostics(session, &key, client); + publish_diagnostics(session, document.url(), client); Ok(()) } diff --git a/crates/ty_server/src/server/api/notifications/did_open_notebook.rs b/crates/ty_server/src/server/api/notifications/did_open_notebook.rs index 201add9587..b61f2aeef6 100644 --- a/crates/ty_server/src/server/api/notifications/did_open_notebook.rs +++ b/crates/ty_server/src/server/api/notifications/did_open_notebook.rs @@ -25,20 +25,27 @@ impl SyncNotificationHandler for DidOpenNotebookHandler { _client: &Client, params: DidOpenNotebookDocumentParams, ) -> Result<()> { - let Ok(path) = AnySystemPath::try_from_url(¶ms.notebook_document.uri) else { - return Ok(()); - }; + let lsp_types::NotebookDocument { + version, + cells, + metadata, + uri: notebook_uri, + .. + } = params.notebook_document; let notebook = NotebookDocument::new( - params.notebook_document.version, - params.notebook_document.cells, - params.notebook_document.metadata.unwrap_or_default(), + notebook_uri, + version, + cells, + metadata.unwrap_or_default(), params.cell_text_documents, ) .with_failure_code(ErrorCode::InternalError)?; - session.open_notebook_document(&path, notebook); - match &path { + let document = session.open_notebook_document(notebook); + let path = document.to_file_path(); + + match &*path { AnySystemPath::System(system_path) => { session.apply_changes(&path, vec![ChangeEvent::Opened(system_path.clone())]); } diff --git a/crates/ty_server/src/server/api/requests/completion.rs b/crates/ty_server/src/server/api/requests/completion.rs index a3e7d91f94..e99de7fb39 100644 --- a/crates/ty_server/src/server/api/requests/completion.rs +++ b/crates/ty_server/src/server/api/requests/completion.rs @@ -6,7 +6,6 @@ use lsp_types::{ CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionList, CompletionParams, CompletionResponse, Documentation, TextEdit, Url, }; -use ruff_db::source::{line_index, source_text}; use ruff_source_file::OneIndexed; use ruff_text_size::Ranged; use ty_ide::{CompletionKind, CompletionSettings, completion}; @@ -45,17 +44,19 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; + let settings = CompletionSettings { auto_import: snapshot.global_settings().is_auto_import_enabled(), }; @@ -72,14 +73,15 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler { .map(|(i, comp)| { let kind = comp.kind(db).map(ty_kind_to_lsp_kind); let type_display = comp.ty.map(|ty| ty.display(db).to_string()); - let import_edit = comp.import.as_ref().map(|edit| { - let range = - edit.range() - .to_lsp_range(&source, &line_index, snapshot.encoding()); - TextEdit { + let import_edit = comp.import.as_ref().and_then(|edit| { + let range = edit + .range() + .to_lsp_range(db, file, snapshot.encoding())? + .local_range(); + Some(TextEdit { range, new_text: edit.content().map(ToString::to_string).unwrap_or_default(), - } + }) }); let name = comp.name.to_string(); diff --git a/crates/ty_server/src/server/api/requests/doc_highlights.rs b/crates/ty_server/src/server/api/requests/doc_highlights.rs index 9750bdc190..bf10a95310 100644 --- a/crates/ty_server/src/server/api/requests/doc_highlights.rs +++ b/crates/ty_server/src/server/api/requests/doc_highlights.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use lsp_types::request::DocumentHighlightRequest; use lsp_types::{DocumentHighlight, DocumentHighlightKind, DocumentHighlightParams, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::{ReferenceKind, document_highlights}; use ty_project::ProjectDatabase; @@ -37,17 +36,18 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position_params.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position_params.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; let Some(highlights_result) = document_highlights(db, file, offset) else { return Ok(None); @@ -55,10 +55,11 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler { let highlights: Vec<_> = highlights_result .into_iter() - .map(|target| { + .filter_map(|target| { let range = target .range() - .to_lsp_range(&source, &line_index, snapshot.encoding()); + .to_lsp_range(db, file, snapshot.encoding())? + .local_range(); let kind = match target.kind() { ReferenceKind::Read => Some(DocumentHighlightKind::READ), @@ -66,7 +67,7 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler { ReferenceKind::Other => Some(DocumentHighlightKind::TEXT), }; - DocumentHighlight { range, kind } + Some(DocumentHighlight { range, kind }) }) .collect(); diff --git a/crates/ty_server/src/server/api/requests/document_symbols.rs b/crates/ty_server/src/server/api/requests/document_symbols.rs index 46c4c3eb2e..1001d33648 100644 --- a/crates/ty_server/src/server/api/requests/document_symbols.rs +++ b/crates/ty_server/src/server/api/requests/document_symbols.rs @@ -2,11 +2,11 @@ use std::borrow::Cow; use lsp_types::request::DocumentSymbolRequest; use lsp_types::{DocumentSymbol, DocumentSymbolParams, SymbolInformation, Url}; -use ruff_db::source::{line_index, source_text}; -use ruff_source_file::LineIndex; +use ruff_db::files::File; use ty_ide::{HierarchicalSymbols, SymbolId, SymbolInfo, document_symbols}; use ty_project::ProjectDatabase; +use crate::Db; use crate::document::{PositionEncoding, ToRangeExt}; use crate::server::api::symbols::{convert_symbol_kind, convert_to_lsp_symbol_information}; use crate::server::api::traits::{ @@ -30,7 +30,7 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler { db: &ProjectDatabase, snapshot: &DocumentSnapshot, _client: &Client, - params: DocumentSymbolParams, + _params: DocumentSymbolParams, ) -> crate::server::Result> { if snapshot .workspace_settings() @@ -39,13 +39,10 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - // Check if the client supports hierarchical document symbols let supports_hierarchical = snapshot .resolved_client_capabilities() @@ -60,13 +57,13 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler { let symbols = symbols.to_hierarchical(); let lsp_symbols: Vec = symbols .iter() - .map(|(id, symbol)| { + .filter_map(|(id, symbol)| { convert_to_lsp_document_symbol( + db, + file, &symbols, id, symbol, - &source, - &line_index, snapshot.encoding(), ) }) @@ -77,14 +74,8 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler { // Return flattened symbols as SymbolInformation let lsp_symbols: Vec = symbols .iter() - .map(|(_, symbol)| { - convert_to_lsp_symbol_information( - symbol, - ¶ms.text_document.uri, - &source, - &line_index, - snapshot.encoding(), - ) + .filter_map(|(_, symbol)| { + convert_to_lsp_symbol_information(db, file, symbol, snapshot.encoding()) }) .collect(); @@ -96,33 +87,37 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler { impl RetriableRequestHandler for DocumentSymbolRequestHandler {} fn convert_to_lsp_document_symbol( + db: &dyn Db, + file: File, symbols: &HierarchicalSymbols, id: SymbolId, symbol: SymbolInfo<'_>, - source: &str, - line_index: &LineIndex, encoding: PositionEncoding, -) -> DocumentSymbol { +) -> Option { let symbol_kind = convert_symbol_kind(symbol.kind); - DocumentSymbol { + Some(DocumentSymbol { name: symbol.name.into_owned(), detail: None, kind: symbol_kind, tags: None, #[allow(deprecated)] deprecated: None, - range: symbol.full_range.to_lsp_range(source, line_index, encoding), - selection_range: symbol.name_range.to_lsp_range(source, line_index, encoding), + range: symbol + .full_range + .to_lsp_range(db, file, encoding)? + .local_range(), + selection_range: symbol + .name_range + .to_lsp_range(db, file, encoding)? + .local_range(), children: Some( symbols .children(id) - .map(|(child_id, child)| { - convert_to_lsp_document_symbol( - symbols, child_id, child, source, line_index, encoding, - ) + .filter_map(|(child_id, child)| { + convert_to_lsp_document_symbol(db, file, symbols, child_id, child, encoding) }) .collect(), ), - } + }) } diff --git a/crates/ty_server/src/server/api/requests/execute_command.rs b/crates/ty_server/src/server/api/requests/execute_command.rs index 8a2fc52fd1..8c0c0f9076 100644 --- a/crates/ty_server/src/server/api/requests/execute_command.rs +++ b/crates/ty_server/src/server/api/requests/execute_command.rs @@ -9,7 +9,7 @@ use lsp_server::ErrorCode; use lsp_types::{self as types, request as req}; use std::fmt::Write; use std::str::FromStr; -use ty_project::Db; +use ty_project::Db as _; pub(crate) struct ExecuteCommand; @@ -52,7 +52,7 @@ fn debug_information(session: &Session) -> crate::Result { writeln!( buffer, "Open text documents: {}", - session.text_document_keys().count() + session.text_document_handles().count() )?; writeln!(buffer)?; diff --git a/crates/ty_server/src/server/api/requests/goto_declaration.rs b/crates/ty_server/src/server/api/requests/goto_declaration.rs index 07444746f7..7d8864ced9 100644 --- a/crates/ty_server/src/server/api/requests/goto_declaration.rs +++ b/crates/ty_server/src/server/api/requests/goto_declaration.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use lsp_types::request::{GotoDeclaration, GotoDeclarationParams}; use lsp_types::{GotoDefinitionResponse, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::goto_declaration; use ty_project::ProjectDatabase; @@ -37,17 +36,18 @@ impl BackgroundDocumentRequestHandler for GotoDeclarationRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position_params.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position_params.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; let Some(ranged) = goto_declaration(db, file, offset) else { return Ok(None); diff --git a/crates/ty_server/src/server/api/requests/goto_definition.rs b/crates/ty_server/src/server/api/requests/goto_definition.rs index 793ae54bf1..24dd781032 100644 --- a/crates/ty_server/src/server/api/requests/goto_definition.rs +++ b/crates/ty_server/src/server/api/requests/goto_definition.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use lsp_types::request::GotoDefinition; use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::goto_definition; use ty_project::ProjectDatabase; @@ -37,17 +36,18 @@ impl BackgroundDocumentRequestHandler for GotoDefinitionRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position_params.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position_params.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; let Some(ranged) = goto_definition(db, file, offset) else { return Ok(None); diff --git a/crates/ty_server/src/server/api/requests/goto_references.rs b/crates/ty_server/src/server/api/requests/goto_references.rs index 129afcecdc..a2bba12569 100644 --- a/crates/ty_server/src/server/api/requests/goto_references.rs +++ b/crates/ty_server/src/server/api/requests/goto_references.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use lsp_types::request::References; use lsp_types::{Location, ReferenceParams, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::goto_references; use ty_project::ProjectDatabase; @@ -37,17 +36,18 @@ impl BackgroundDocumentRequestHandler for ReferencesRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; let include_declaration = params.context.include_declaration; diff --git a/crates/ty_server/src/server/api/requests/goto_type_definition.rs b/crates/ty_server/src/server/api/requests/goto_type_definition.rs index 5695c5a6ab..31e2816225 100644 --- a/crates/ty_server/src/server/api/requests/goto_type_definition.rs +++ b/crates/ty_server/src/server/api/requests/goto_type_definition.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use lsp_types::request::{GotoTypeDefinition, GotoTypeDefinitionParams}; use lsp_types::{GotoDefinitionResponse, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::goto_type_definition; use ty_project::ProjectDatabase; @@ -37,17 +36,18 @@ impl BackgroundDocumentRequestHandler for GotoTypeDefinitionRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position_params.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position_params.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; let Some(ranged) = goto_type_definition(db, file, offset) else { return Ok(None); diff --git a/crates/ty_server/src/server/api/requests/hover.rs b/crates/ty_server/src/server/api/requests/hover.rs index be81eca472..d9e7ec6430 100644 --- a/crates/ty_server/src/server/api/requests/hover.rs +++ b/crates/ty_server/src/server/api/requests/hover.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; -use crate::document::{PositionExt, ToRangeExt}; +use crate::document::{FileRangeExt, PositionExt}; use crate::server::api::traits::{ BackgroundDocumentRequestHandler, RequestHandler, RetriableRequestHandler, }; @@ -8,8 +8,6 @@ use crate::session::DocumentSnapshot; use crate::session::client::Client; use lsp_types::request::HoverRequest; use lsp_types::{HoverContents, HoverParams, MarkupContent, Url}; -use ruff_db::source::{line_index, source_text}; -use ruff_text_size::Ranged; use ty_ide::{MarkupKind, hover}; use ty_project::ProjectDatabase; @@ -37,17 +35,18 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position_params.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position_params.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; let Some(range_info) = hover(db, file, offset) else { return Ok(None); @@ -69,11 +68,10 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler { kind: lsp_markup_kind, value: contents, }), - range: Some(range_info.file_range().range().to_lsp_range( - &source, - &line_index, - snapshot.encoding(), - )), + range: range_info + .file_range() + .to_lsp_range(db, snapshot.encoding()) + .map(|lsp_range| lsp_range.local_range()), })) } } diff --git a/crates/ty_server/src/server/api/requests/inlay_hints.rs b/crates/ty_server/src/server/api/requests/inlay_hints.rs index ec8464fc6b..2698456970 100644 --- a/crates/ty_server/src/server/api/requests/inlay_hints.rs +++ b/crates/ty_server/src/server/api/requests/inlay_hints.rs @@ -8,7 +8,6 @@ use crate::session::DocumentSnapshot; use crate::session::client::Client; use lsp_types::request::InlayHintRequest; use lsp_types::{InlayHintParams, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::{InlayHintKind, InlayHintLabel, inlay_hints}; use ty_project::ProjectDatabase; @@ -36,32 +35,35 @@ impl BackgroundDocumentRequestHandler for InlayHintRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let index = line_index(db, file); - let source = source_text(db, file); - - let range = params + let Some(range) = params .range - .to_text_range(&source, &index, snapshot.encoding()); + .to_text_range(db, file, snapshot.url(), snapshot.encoding()) + else { + return Ok(None); + }; let inlay_hints = inlay_hints(db, file, range, workspace_settings.inlay_hints()); let inlay_hints = inlay_hints .into_iter() - .map(|hint| lsp_types::InlayHint { - position: hint - .position - .to_position(&source, &index, snapshot.encoding()), - label: inlay_hint_label(&hint.label), - kind: Some(inlay_hint_kind(&hint.kind)), - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - text_edits: None, + .filter_map(|hint| { + Some(lsp_types::InlayHint { + position: hint + .position + .to_lsp_position(db, file, snapshot.encoding())? + .local_position(), + label: inlay_hint_label(&hint.label), + kind: Some(inlay_hint_kind(&hint.kind)), + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + text_edits: None, + }) }) .collect(); diff --git a/crates/ty_server/src/server/api/requests/prepare_rename.rs b/crates/ty_server/src/server/api/requests/prepare_rename.rs index 7f11961bee..f12dde90b7 100644 --- a/crates/ty_server/src/server/api/requests/prepare_rename.rs +++ b/crates/ty_server/src/server/api/requests/prepare_rename.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use lsp_types::request::PrepareRenameRequest; use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::can_rename; use ty_project::ProjectDatabase; @@ -37,21 +36,28 @@ impl BackgroundDocumentRequestHandler for PrepareRenameRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params - .position - .to_text_size(&source, &line_index, snapshot.encoding()); + let Some(offset) = + params + .position + .to_text_size(db, file, snapshot.url(), snapshot.encoding()) + else { + return Ok(None); + }; let Some(range) = can_rename(db, file, offset) else { return Ok(None); }; - let lsp_range = range.to_lsp_range(&source, &line_index, snapshot.encoding()); + let Some(lsp_range) = range + .to_lsp_range(db, file, snapshot.encoding()) + .map(|lsp_range| lsp_range.local_range()) + else { + return Ok(None); + }; Ok(Some(PrepareRenameResponse::Range(lsp_range))) } diff --git a/crates/ty_server/src/server/api/requests/rename.rs b/crates/ty_server/src/server/api/requests/rename.rs index 117891ebba..978e1769df 100644 --- a/crates/ty_server/src/server/api/requests/rename.rs +++ b/crates/ty_server/src/server/api/requests/rename.rs @@ -3,7 +3,6 @@ use std::collections::HashMap; use lsp_types::request::Rename; use lsp_types::{RenameParams, TextEdit, Url, WorkspaceEdit}; -use ruff_db::source::{line_index, source_text}; use ty_ide::rename; use ty_project::ProjectDatabase; @@ -38,17 +37,18 @@ impl BackgroundDocumentRequestHandler for RenameRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; let Some(rename_results) = rename(db, file, offset, ¶ms.new_name) else { return Ok(None); diff --git a/crates/ty_server/src/server/api/requests/selection_range.rs b/crates/ty_server/src/server/api/requests/selection_range.rs index 684b230cd3..46518810f6 100644 --- a/crates/ty_server/src/server/api/requests/selection_range.rs +++ b/crates/ty_server/src/server/api/requests/selection_range.rs @@ -2,7 +2,6 @@ use std::borrow::Cow; use lsp_types::request::SelectionRangeRequest; use lsp_types::{SelectionRange as LspSelectionRange, SelectionRangeParams, Url}; -use ruff_db::source::{line_index, source_text}; use ty_ide::selection_range; use ty_project::ProjectDatabase; @@ -37,25 +36,32 @@ impl BackgroundDocumentRequestHandler for SelectionRangeRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let mut results = Vec::new(); for position in params.positions { - let offset = position.to_text_size(&source, &line_index, snapshot.encoding()); + let Some(offset) = position.to_text_size(db, file, snapshot.url(), snapshot.encoding()) + else { + continue; + }; let ranges = selection_range(db, file, offset); if !ranges.is_empty() { // Convert ranges to nested LSP SelectionRange structure let mut lsp_range = None; for &range in &ranges { + let Some(range) = range + .to_lsp_range(db, file, snapshot.encoding()) + .map(|lsp_range| lsp_range.local_range()) + else { + break; + }; + lsp_range = Some(LspSelectionRange { - range: range.to_lsp_range(&source, &line_index, snapshot.encoding()), + range, parent: lsp_range.map(Box::new), }); } diff --git a/crates/ty_server/src/server/api/requests/semantic_tokens.rs b/crates/ty_server/src/server/api/requests/semantic_tokens.rs index 58f245d4ae..adc6142189 100644 --- a/crates/ty_server/src/server/api/requests/semantic_tokens.rs +++ b/crates/ty_server/src/server/api/requests/semantic_tokens.rs @@ -33,7 +33,7 @@ impl BackgroundDocumentRequestHandler for SemanticTokensRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs index 6112405249..1cd0484f14 100644 --- a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs +++ b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs @@ -8,7 +8,6 @@ use crate::server::api::traits::{ use crate::session::DocumentSnapshot; use crate::session::client::Client; use lsp_types::{SemanticTokens, SemanticTokensRangeParams, SemanticTokensRangeResult, Url}; -use ruff_db::source::{line_index, source_text}; use ty_project::ProjectDatabase; pub(crate) struct SemanticTokensRangeRequestHandler; @@ -35,17 +34,18 @@ impl BackgroundDocumentRequestHandler for SemanticTokensRangeRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - // Convert LSP range to text offsets - let requested_range = params - .range - .to_text_range(&source, &line_index, snapshot.encoding()); + let Some(requested_range) = + params + .range + .to_text_range(db, file, snapshot.url(), snapshot.encoding()) + else { + return Ok(None); + }; let lsp_tokens = generate_semantic_tokens( db, diff --git a/crates/ty_server/src/server/api/requests/signature_help.rs b/crates/ty_server/src/server/api/requests/signature_help.rs index e9b9f160b6..81c31adc1b 100644 --- a/crates/ty_server/src/server/api/requests/signature_help.rs +++ b/crates/ty_server/src/server/api/requests/signature_help.rs @@ -11,7 +11,6 @@ use lsp_types::{ Documentation, ParameterInformation, ParameterLabel, SignatureHelp, SignatureHelpParams, SignatureInformation, Url, }; -use ruff_db::source::{line_index, source_text}; use ty_ide::signature_help; use ty_project::ProjectDatabase; @@ -39,17 +38,18 @@ impl BackgroundDocumentRequestHandler for SignatureHelpRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; - let source = source_text(db, file); - let line_index = line_index(db, file); - let offset = params.text_document_position_params.position.to_text_size( - &source, - &line_index, + let Some(offset) = params.text_document_position_params.position.to_text_size( + db, + file, + snapshot.url(), snapshot.encoding(), - ); + ) else { + return Ok(None); + }; // Extract signature help capabilities from the client let resolved_capabilities = snapshot.resolved_client_capabilities(); diff --git a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs index c990d4f4af..87c7e4c77c 100644 --- a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs +++ b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs @@ -1,4 +1,5 @@ use crate::PositionEncoding; +use crate::document::DocumentKey; use crate::server::api::diagnostics::{Diagnostics, to_lsp_diagnostic}; use crate::server::api::traits::{ BackgroundRequestHandler, RequestHandler, RetriableRequestHandler, @@ -8,7 +9,7 @@ use crate::server::{Action, Result}; use crate::session::client::Client; use crate::session::index::Index; use crate::session::{SessionSnapshot, SuspendedWorkspaceDiagnosticRequest}; -use crate::system::{AnySystemPath, file_to_url}; +use crate::system::file_to_url; use lsp_server::RequestId; use lsp_types::request::WorkspaceDiagnosticRequest; use lsp_types::{ @@ -25,7 +26,7 @@ use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; use std::sync::Mutex; use std::time::{Duration, Instant}; -use ty_project::{Db, ProgressReporter}; +use ty_project::{ProgressReporter, ProjectDatabase}; /// Handler for [Workspace diagnostics](workspace-diagnostics) /// @@ -229,7 +230,7 @@ impl ProgressReporter for WorkspaceDiagnosticsProgressReporter<'_> { state.report_progress(&self.work_done); } - fn report_checked_file(&self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]) { + fn report_checked_file(&self, db: &ProjectDatabase, file: File, diagnostics: &[Diagnostic]) { // Another thread might have panicked at this point because of a salsa cancellation which // poisoned the result. If the response is poisoned, just don't report and wait for our thread // to unwind with a salsa cancellation next. @@ -259,7 +260,7 @@ impl ProgressReporter for WorkspaceDiagnosticsProgressReporter<'_> { state.response.maybe_flush(); } - fn report_diagnostics(&mut self, db: &dyn Db, diagnostics: Vec) { + fn report_diagnostics(&mut self, db: &ProjectDatabase, diagnostics: Vec) { let mut by_file: BTreeMap> = BTreeMap::new(); for diagnostic in diagnostics { @@ -317,7 +318,7 @@ struct ResponseWriter<'a> { // It's important that we use `AnySystemPath` over `Url` here because // `file_to_url` isn't guaranteed to return the exact same URL as the one provided // by the client. - previous_result_ids: FxHashMap, + previous_result_ids: FxHashMap, } impl<'a> ResponseWriter<'a> { @@ -346,12 +347,7 @@ impl<'a> ResponseWriter<'a> { let previous_result_ids = previous_result_ids .into_iter() - .filter_map(|prev| { - Some(( - AnySystemPath::try_from_url(&prev.uri).ok()?, - (prev.uri, prev.value), - )) - }) + .map(|prev| (DocumentKey::from_url(&prev.uri), (prev.uri, prev.value))) .collect(); Self { @@ -362,25 +358,26 @@ impl<'a> ResponseWriter<'a> { } } - fn write_diagnostics_for_file(&mut self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]) { + fn write_diagnostics_for_file( + &mut self, + db: &ProjectDatabase, + file: File, + diagnostics: &[Diagnostic], + ) { let Some(url) = file_to_url(db, file) else { tracing::debug!("Failed to convert file path to URL at {}", file.path(db)); return; }; - + let key = DocumentKey::from_url(&url); let version = self .index - .key_from_url(url.clone()) - .ok() - .and_then(|key| self.index.make_document_ref(key).ok()) - .map(|doc| i64::from(doc.version())); + .document_handle(&url) + .map(|doc| i64::from(doc.version())) + .ok(); let result_id = Diagnostics::result_id_from_hash(diagnostics); - let previous_result_id = AnySystemPath::try_from_url(&url) - .ok() - .and_then(|path| self.previous_result_ids.remove(&path)) - .map(|(_url, id)| id); + let previous_result_id = self.previous_result_ids.remove(&key).map(|(_url, id)| id); let report = match result_id { Some(new_id) if Some(&new_id) == previous_result_id.as_ref() => { @@ -444,13 +441,12 @@ impl<'a> ResponseWriter<'a> { // Handle files that had diagnostics in previous request but no longer have any // Any remaining entries in previous_results are files that were fixed - for (previous_url, previous_result_id) in self.previous_result_ids.into_values() { + for (key, (previous_url, previous_result_id)) in self.previous_result_ids { // This file had diagnostics before but doesn't now, so we need to report it as having no diagnostics let version = self .index - .key_from_url(previous_url.clone()) + .document(&key) .ok() - .and_then(|key| self.index.make_document_ref(key).ok()) .map(|doc| i64::from(doc.version())); let new_result_id = Diagnostics::result_id_from_hash(&[]); diff --git a/crates/ty_server/src/server/api/requests/workspace_symbols.rs b/crates/ty_server/src/server/api/requests/workspace_symbols.rs index a964954546..252857e7e4 100644 --- a/crates/ty_server/src/server/api/requests/workspace_symbols.rs +++ b/crates/ty_server/src/server/api/requests/workspace_symbols.rs @@ -8,8 +8,6 @@ use crate::server::api::traits::{ }; use crate::session::SessionSnapshot; use crate::session::client::Client; -use crate::system::file_to_url; -use ruff_db::source::{line_index, source_text}; pub(crate) struct WorkspaceSymbolRequestHandler; @@ -41,23 +39,19 @@ impl BackgroundRequestHandler for WorkspaceSymbolRequestHandler { for workspace_symbol_info in workspace_symbol_infos { let WorkspaceSymbolInfo { symbol, file } = workspace_symbol_info; - // Get file information for URL conversion - let source = source_text(db, file); - let line_index = line_index(db, file); - - // Convert file to URL - let Some(url) = file_to_url(db, file) else { - tracing::debug!("Failed to convert file to URL at {}", file.path(db)); - continue; - }; - // Get position encoding from session let encoding = snapshot.position_encoding(); - let lsp_symbol = - convert_to_lsp_symbol_information(symbol, &url, &source, &line_index, encoding); + let Some(symbol) = convert_to_lsp_symbol_information(db, file, symbol, encoding) + else { + tracing::debug!( + "Failed to convert symbol '{}' to LSP symbol information", + file.path(db) + ); + continue; + }; - all_symbols.push(lsp_symbol); + all_symbols.push(symbol); } } diff --git a/crates/ty_server/src/server/api/semantic_tokens.rs b/crates/ty_server/src/server/api/semantic_tokens.rs index b168ef7877..a6208087c0 100644 --- a/crates/ty_server/src/server/api/semantic_tokens.rs +++ b/crates/ty_server/src/server/api/semantic_tokens.rs @@ -1,5 +1,5 @@ use lsp_types::SemanticToken; -use ruff_db::source::{line_index, source_text}; +use ruff_db::source::source_text; use ruff_text_size::{Ranged, TextRange}; use ty_ide::semantic_tokens; use ty_project::ProjectDatabase; @@ -16,7 +16,6 @@ pub(crate) fn generate_semantic_tokens( multiline_token_support: bool, ) -> Vec { let source = source_text(db, file); - let line_index = line_index(db, file); let semantic_token_data = semantic_tokens(db, file, range); // Convert semantic tokens to LSP format @@ -25,7 +24,14 @@ pub(crate) fn generate_semantic_tokens( let mut prev_start = 0u32; for token in &*semantic_token_data { - let lsp_range = token.range().to_lsp_range(&source, &line_index, encoding); + let Some(lsp_range) = token + .range() + .to_lsp_range(db, file, encoding) + .map(|lsp_range| lsp_range.local_range()) + else { + continue; + }; + let line = lsp_range.start.line; let character = lsp_range.start.character; diff --git a/crates/ty_server/src/server/api/symbols.rs b/crates/ty_server/src/server/api/symbols.rs index 396f236e8d..dd0dc67dcb 100644 --- a/crates/ty_server/src/server/api/symbols.rs +++ b/crates/ty_server/src/server/api/symbols.rs @@ -1,10 +1,10 @@ //! Utility functions common to language server request handlers //! that return symbol information. -use lsp_types::{SymbolInformation, SymbolKind, Url}; -use ruff_source_file::LineIndex; +use lsp_types::{SymbolInformation, SymbolKind}; use ty_ide::SymbolInfo; +use crate::Db; use crate::document::{PositionEncoding, ToRangeExt}; /// Convert `ty_ide` `SymbolKind` to LSP `SymbolKind` @@ -27,24 +27,25 @@ pub(crate) fn convert_symbol_kind(kind: ty_ide::SymbolKind) -> SymbolKind { /// Convert a `ty_ide` `SymbolInfo` to LSP `SymbolInformation` pub(crate) fn convert_to_lsp_symbol_information( + db: &dyn Db, + file: ruff_db::files::File, symbol: SymbolInfo<'_>, - uri: &Url, - source: &str, - line_index: &LineIndex, encoding: PositionEncoding, -) -> SymbolInformation { +) -> Option { let symbol_kind = convert_symbol_kind(symbol.kind); - SymbolInformation { + let location = symbol + .full_range + .to_lsp_range(db, file, encoding)? + .to_location()?; + + Some(SymbolInformation { name: symbol.name.into_owned(), kind: symbol_kind, tags: None, #[allow(deprecated)] deprecated: None, - location: lsp_types::Location { - uri: uri.clone(), - range: symbol.full_range.to_lsp_range(source, line_index, encoding), - }, + location, container_name: None, - } + }) } diff --git a/crates/ty_server/src/session.rs b/crates/ty_server/src/session.rs index 24ad0ef55e..9cc3553342 100644 --- a/crates/ty_server/src/session.rs +++ b/crates/ty_server/src/session.rs @@ -1,7 +1,7 @@ //! Data model, state management, and configuration resolution. use anyhow::{Context, anyhow}; -use index::DocumentQueryError; +use index::DocumentError; use lsp_server::{Message, RequestId}; use lsp_types::notification::{DidChangeWatchedFiles, Exit, Notification}; use lsp_types::request::{ @@ -15,8 +15,9 @@ use lsp_types::{ }; use options::GlobalOptions; use ruff_db::Db; -use ruff_db::files::File; +use ruff_db::files::{File, system_path_to_file}; use ruff_db::system::{System, SystemPath, SystemPathBuf}; +use std::borrow::Cow; use std::collections::{BTreeMap, HashSet, VecDeque}; use std::ops::{Deref, DerefMut}; use std::panic::RefUnwindSafe; @@ -26,7 +27,6 @@ use ty_project::metadata::Options; use ty_project::watch::ChangeEvent; use ty_project::{ChangeResult, CheckMode, Db as _, ProjectDatabase, ProjectMetadata}; -pub(crate) use self::index::DocumentQuery; pub(crate) use self::options::InitializationOptions; pub use self::options::{ClientOptions, DiagnosticMode}; pub(crate) use self::settings::{GlobalSettings, WorkspaceSettings}; @@ -439,13 +439,6 @@ impl Session { self.projects.values_mut().chain(default_project) } - /// Returns the [`DocumentKey`] for the given URL. - /// - /// Refer to [`Index::key_from_url`] for more details. - pub(crate) fn key_from_url(&self, url: Url) -> Result { - self.index().key_from_url(url) - } - pub(crate) fn initialize_workspaces( &mut self, workspace_settings: Vec<(Url, ClientOptions)>, @@ -819,25 +812,34 @@ impl Session { } /// Creates a document snapshot with the URL referencing the document to snapshot. - pub(crate) fn take_document_snapshot(&self, url: Url) -> DocumentSnapshot { - let key = self - .key_from_url(url) - .map_err(DocumentQueryError::InvalidUrl); - DocumentSnapshot { + pub(crate) fn snapshot_document(&self, url: &Url) -> Result { + let index = self.index(); + let document_handle = index.document_handle(url)?; + + let notebook = if let Some(notebook_path) = &document_handle.notebook_path { + index + .notebook_arc(&DocumentKey::from(notebook_path.clone())) + .ok() + } else { + None + }; + + Ok(DocumentSnapshot { resolved_client_capabilities: self.resolved_client_capabilities, global_settings: self.global_settings.clone(), - workspace_settings: key - .as_ref() - .ok() - .and_then(|key| self.workspaces.settings_for_path(key.path().as_system()?)) + workspace_settings: document_handle + .to_file_path() + .as_system() + .and_then(|path| self.workspaces.settings_for_path(path)) .unwrap_or_else(|| Arc::new(WorkspaceSettings::default())), position_encoding: self.position_encoding, - document_query_result: key.and_then(|key| self.index().make_document_ref(key)), - } + document: document_handle, + notebook, + }) } /// Creates a snapshot of the current state of the [`Session`]. - pub(crate) fn take_session_snapshot(&self) -> SessionSnapshot { + pub(crate) fn snapshot_session(&self) -> SessionSnapshot { SessionSnapshot { projects: self .projects @@ -855,56 +857,49 @@ impl Session { } /// Iterates over the document keys for all open text documents. - pub(super) fn text_document_keys(&self) -> impl Iterator + '_ { + pub(super) fn text_document_handles(&self) -> impl Iterator + '_ { self.index() - .text_document_paths() - .map(|path| DocumentKey::Text(path.clone())) + .text_documents() + .map(|(key, document)| DocumentHandle { + key: key.clone(), + url: document.url().clone(), + version: document.version(), + // TODO: Set notebook path if text document is part of a notebook + notebook_path: None, + }) + } + + /// Returns a handle to the document specified by its URL. + /// + /// # Errors + /// + /// If the document is not found. + pub(crate) fn document_handle( + &self, + url: &lsp_types::Url, + ) -> Result { + self.index().document_handle(url) } /// Registers a notebook document at the provided `path`. /// If a document is already open here, it will be overwritten. - pub(crate) fn open_notebook_document( - &mut self, - path: &AnySystemPath, - document: NotebookDocument, - ) { - self.index_mut().open_notebook_document(path, document); + /// + /// Returns a handle to the opened document. + pub(crate) fn open_notebook_document(&mut self, document: NotebookDocument) -> DocumentHandle { + let handle = self.index_mut().open_notebook_document(document); self.bump_revision(); + handle } /// Registers a text document at the provided `path`. /// If a document is already open here, it will be overwritten. - pub(crate) fn open_text_document(&mut self, path: &AnySystemPath, document: TextDocument) { - self.index_mut().open_text_document(path, document); - self.bump_revision(); - } - - /// Updates a text document at the associated `key`. /// - /// The document key must point to a text document, or this will throw an error. - pub(crate) fn update_text_document( - &mut self, - key: &DocumentKey, - content_changes: Vec, - new_version: DocumentVersion, - ) -> crate::Result<()> { - let position_encoding = self.position_encoding; - self.index_mut().update_text_document( - key, - content_changes, - new_version, - position_encoding, - )?; - self.bump_revision(); - Ok(()) - } + /// Returns a handle to the opened document. + pub(crate) fn open_text_document(&mut self, document: TextDocument) -> DocumentHandle { + let handle = self.index_mut().open_text_document(document); - /// De-registers a document, specified by its key. - /// Calling this multiple times for the same document is a logic error. - pub(crate) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> { - self.index_mut().close_document(key)?; self.bump_revision(); - Ok(()) + handle } /// Returns a reference to the index. @@ -1003,7 +998,8 @@ pub(crate) struct DocumentSnapshot { global_settings: Arc, workspace_settings: Arc, position_encoding: PositionEncoding, - document_query_result: Result, + document: DocumentHandle, + notebook: Option>, } impl DocumentSnapshot { @@ -1028,27 +1024,33 @@ impl DocumentSnapshot { } /// Returns the result of the document query for this snapshot. - pub(crate) fn document(&self) -> Result<&DocumentQuery, &DocumentQueryError> { - self.document_query_result.as_ref() + pub(crate) fn document(&self) -> &DocumentHandle { + &self.document } - pub(crate) fn file(&self, db: &dyn Db) -> Option { - let document = match self.document() { - Ok(document) => document, - Err(err) => { - tracing::debug!("Failed to resolve file: {}", err); - return None; - } - }; - let file = document.file(db); + /// Returns the URL of the document. + pub(crate) fn url(&self) -> &lsp_types::Url { + self.document.url() + } + + pub(crate) fn notebook(&self) -> Option<&NotebookDocument> { + self.notebook.as_deref() + } + + pub(crate) fn to_file(&self, db: &dyn Db) -> Option { + let file = self.document.to_file(db); if file.is_none() { tracing::debug!( - "Failed to resolve file: file not found for path `{}`", - document.file_path() + "Failed to resolve file: file not found for `{}`", + self.document.url() ); } file } + + pub(crate) fn to_file_path(&self) -> Cow<'_, AnySystemPath> { + self.document.to_file_path() + } } /// An immutable snapshot of the current state of [`Session`]. @@ -1320,3 +1322,90 @@ impl SuspendedWorkspaceDiagnosticRequest { None } } + +/// A handle to a document stored within [`Index`]. +/// +/// Allows identifying the document within the index but it also carries the URL used by the +/// client to reference the document as well as the version of the document. +/// +/// It also exposes methods to get the file-path of the corresponding ty-file. +#[derive(Clone, Debug)] +pub(crate) struct DocumentHandle { + /// The key that uniquely identifies this document in the index. + key: DocumentKey, + url: lsp_types::Url, + /// The path to the enclosing notebook file if this document is a notebook or a notebook cell. + notebook_path: Option, + version: DocumentVersion, +} + +impl DocumentHandle { + pub(crate) const fn version(&self) -> DocumentVersion { + self.version + } + + /// The URL as used by the client to reference this document. + pub(crate) fn url(&self) -> &lsp_types::Url { + &self.url + } + + /// The path to the enclosing file for this document. + /// + /// This is the path corresponding to the URL, except for notebook cells where the + /// path corresponds to the notebook file. + pub(crate) fn to_file_path(&self) -> Cow<'_, AnySystemPath> { + if let Some(path) = self.notebook_path.as_ref() { + Cow::Borrowed(path) + } else { + Cow::Owned(self.key.to_file_path()) + } + } + + /// Returns the salsa interned [`File`] for the document selected by this query. + /// + /// It returns [`None`] for the following cases: + /// - For virtual file, if it's not yet opened + /// - For regular file, if it does not exists or is a directory + pub(crate) fn to_file(&self, db: &dyn Db) -> Option { + match &*self.to_file_path() { + AnySystemPath::System(path) => system_path_to_file(db, path).ok(), + AnySystemPath::SystemVirtual(virtual_path) => db + .files() + .try_virtual_file(virtual_path) + .map(|virtual_file| virtual_file.file()), + } + } + + pub(crate) fn update_text_document( + &self, + session: &mut Session, + content_changes: Vec, + new_version: DocumentVersion, + ) -> crate::Result<()> { + let position_encoding = session.position_encoding(); + let mut index = session.index_mut(); + + let document_mut = index.document_mut(&self.key)?; + + let Some(document) = document_mut.as_text_mut() else { + anyhow::bail!("Text document path does not point to a text document"); + }; + + if content_changes.is_empty() { + document.update_version(new_version); + return Ok(()); + } + + document.apply_changes(content_changes, new_version, position_encoding); + + Ok(()) + } + + /// De-registers a document, specified by its key. + /// Calling this multiple times for the same document is a logic error. + pub(crate) fn close(self, session: &mut Session) -> crate::Result<()> { + session.index_mut().close_document(&self.key)?; + session.bump_revision(); + Ok(()) + } +} diff --git a/crates/ty_server/src/session/index.rs b/crates/ty_server/src/session/index.rs index 89d310f2ab..95cc515a35 100644 --- a/crates/ty_server/src/session/index.rs +++ b/crates/ty_server/src/session/index.rs @@ -1,24 +1,24 @@ use std::sync::Arc; -use lsp_types::Url; -use ruff_db::Db; -use ruff_db::files::{File, system_path_to_file}; -use rustc_hash::FxHashMap; - +use crate::document::DocumentKey; +use crate::session::DocumentHandle; use crate::{ PositionEncoding, TextDocument, - document::{DocumentKey, DocumentVersion, NotebookDocument}, + document::{DocumentVersion, NotebookDocument}, system::AnySystemPath, }; +use ruff_db::system::SystemVirtualPath; +use rustc_hash::FxHashMap; + /// Stores and tracks all open documents in a session, along with their associated settings. #[derive(Debug)] pub(crate) struct Index { /// Maps all document file paths to the associated document controller - documents: FxHashMap, + documents: FxHashMap, /// Maps opaque cell URLs to a notebook path (document) - notebook_cells: FxHashMap, + notebook_cells: FxHashMap, } impl Index { @@ -29,68 +29,55 @@ impl Index { } } - pub(super) fn text_document_paths(&self) -> impl Iterator + '_ { - self.documents - .iter() - .filter_map(|(path, doc)| doc.as_text().and(Some(path))) + pub(super) fn text_documents( + &self, + ) -> impl Iterator + '_ { + self.documents.iter().filter_map(|(key, doc)| { + let text_document = doc.as_text()?; + Some((key, text_document)) + }) + } + + pub(crate) fn document_handle( + &self, + url: &lsp_types::Url, + ) -> Result { + let key = DocumentKey::from_url(url); + let Some(document) = self.documents.get(&key) else { + return Err(DocumentError::NotFound(key)); + }; + + if let Some(path) = key.as_opaque() { + if let Some(notebook_path) = self.notebook_cells.get(path) { + return Ok(DocumentHandle { + key: key.clone(), + notebook_path: Some(notebook_path.clone()), + url: url.clone(), + version: document.version(), + }); + } + } + + Ok(DocumentHandle { + key: key.clone(), + notebook_path: None, + url: url.clone(), + version: document.version(), + }) } #[expect(dead_code)] - pub(super) fn notebook_document_paths(&self) -> impl Iterator + '_ { + pub(super) fn notebook_document_keys(&self) -> impl Iterator + '_ { self.documents .iter() .filter(|(_, doc)| doc.as_notebook().is_some()) - .map(|(path, _)| path) - } - - pub(super) fn update_text_document( - &mut self, - key: &DocumentKey, - content_changes: Vec, - new_version: DocumentVersion, - encoding: PositionEncoding, - ) -> crate::Result<()> { - let controller = self.document_controller_for_key(key)?; - let Some(document) = controller.as_text_mut() else { - anyhow::bail!("Text document path does not point to a text document"); - }; - - if content_changes.is_empty() { - document.update_version(new_version); - return Ok(()); - } - - document.apply_changes(content_changes, new_version, encoding); - - Ok(()) - } - - /// Returns the [`DocumentKey`] corresponding to the given URL. - /// - /// It returns [`Err`] with the original URL if it cannot be converted to a [`AnySystemPath`]. - pub(crate) fn key_from_url(&self, url: Url) -> Result { - if let Some(notebook_path) = self.notebook_cells.get(&url) { - Ok(DocumentKey::NotebookCell { - cell_url: url, - notebook_path: notebook_path.clone(), - }) - } else { - let path = AnySystemPath::try_from_url(&url).map_err(|()| url)?; - if path - .extension() - .is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb")) - { - Ok(DocumentKey::Notebook(path)) - } else { - Ok(DocumentKey::Text(path)) - } - } + .map(|(key, _)| key) } #[expect(dead_code)] pub(super) fn update_notebook_document( &mut self, - key: &DocumentKey, + notebook_key: &DocumentKey, cells: Option, metadata: Option>, new_version: DocumentVersion, @@ -102,17 +89,16 @@ impl Index { .. }) = cells.as_ref().and_then(|cells| cells.structure.as_ref()) { - let notebook_path = key.path().clone(); - for opened_cell in did_open { + let cell_path = SystemVirtualPath::new(opened_cell.uri.as_str()); self.notebook_cells - .insert(opened_cell.uri.clone(), notebook_path.clone()); + .insert(cell_path.to_string(), notebook_key.to_file_path()); } // deleted notebook cells are closed via textDocument/didClose - we don't close them here. } - let controller = self.document_controller_for_key(key)?; - let Some(notebook) = controller.as_notebook_mut() else { + let document = self.document_mut(notebook_key)?; + let Some(notebook) = document.as_notebook_mut() else { anyhow::bail!("Notebook document path does not point to a notebook document"); }; @@ -123,44 +109,64 @@ impl Index { /// Create a document reference corresponding to the given document key. /// /// Returns an error if the document is not found or if the path cannot be converted to a URL. - pub(crate) fn make_document_ref( + pub(crate) fn document(&self, key: &DocumentKey) -> Result<&Document, DocumentError> { + let Some(document) = self.documents.get(key) else { + return Err(DocumentError::NotFound(key.clone())); + }; + + Ok(document) + } + + pub(crate) fn notebook_arc( &self, - key: DocumentKey, - ) -> Result { - let path = key.path(); - let Some(controller) = self.documents.get(path) else { - return Err(DocumentQueryError::NotFound(key)); + key: &DocumentKey, + ) -> Result, DocumentError> { + let Some(document) = self.documents.get(key) else { + return Err(DocumentError::NotFound(key.clone())); }; - // TODO: The `to_url` conversion shouldn't be an error because the paths themselves are - // constructed from the URLs but the `Index` APIs don't maintain this invariant. - let (cell_url, file_path) = match key { - DocumentKey::NotebookCell { - cell_url, - notebook_path, - } => (Some(cell_url), notebook_path), - DocumentKey::Notebook(path) | DocumentKey::Text(path) => (None, path), - }; - Ok(controller.make_ref(cell_url, file_path)) + + if let Document::Notebook(notebook) = document { + Ok(notebook.clone()) + } else { + Err(DocumentError::NotFound(key.clone())) + } } - pub(super) fn open_text_document(&mut self, path: &AnySystemPath, document: TextDocument) { - self.documents - .insert(path.clone(), DocumentController::new_text(document)); + pub(super) fn open_text_document(&mut self, document: TextDocument) -> DocumentHandle { + let key = DocumentKey::from_url(document.url()); + + // TODO: Fix file path for notebook cells + let handle = DocumentHandle { + key: key.clone(), + notebook_path: None, + url: document.url().clone(), + version: document.version(), + }; + + self.documents.insert(key, Document::new_text(document)); + + handle } - pub(super) fn open_notebook_document( - &mut self, - notebook_path: &AnySystemPath, - document: NotebookDocument, - ) { + pub(super) fn open_notebook_document(&mut self, document: NotebookDocument) -> DocumentHandle { + let notebook_key = DocumentKey::from_url(document.url()); + let url = document.url().clone(); + let version = document.version(); + for cell_url in document.cell_urls() { self.notebook_cells - .insert(cell_url.clone(), notebook_path.clone()); + .insert(cell_url.to_string(), notebook_key.to_file_path()); + } + + self.documents + .insert(notebook_key.clone(), Document::new_notebook(document)); + + DocumentHandle { + notebook_path: Some(notebook_key.to_file_path()), + key: notebook_key, + url, + version, } - self.documents.insert( - notebook_path.clone(), - DocumentController::new_notebook(document), - ); } pub(super) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> { @@ -169,27 +175,23 @@ impl Index { // is requested to be `closed` by VS Code after the notebook gets updated. // This is not documented in the LSP specification explicitly, and this assumption // may need revisiting in the future as we support more editors with notebook support. - if let DocumentKey::NotebookCell { cell_url, .. } = key { - if self.notebook_cells.remove(cell_url).is_none() { - tracing::warn!("Tried to remove a notebook cell that does not exist: {cell_url}"); - } - return Ok(()); + if let DocumentKey::Opaque(uri) = key { + self.notebook_cells.remove(uri); } - let path = key.path(); - let Some(_) = self.documents.remove(path) else { + let Some(_) = self.documents.remove(key) else { anyhow::bail!("tried to close document that didn't exist at {key}") }; + Ok(()) } - fn document_controller_for_key( + pub(super) fn document_mut( &mut self, key: &DocumentKey, - ) -> crate::Result<&mut DocumentController> { - let path = key.path(); - let Some(controller) = self.documents.get_mut(path) else { - anyhow::bail!("Document controller not available at `{key}`"); + ) -> Result<&mut Document, DocumentError> { + let Some(controller) = self.documents.get_mut(key) else { + return Err(DocumentError::NotFound(key.clone())); }; Ok(controller) } @@ -197,31 +199,24 @@ impl Index { /// A mutable handler to an underlying document. #[derive(Debug)] -enum DocumentController { +pub(crate) enum Document { Text(Arc), Notebook(Arc), } -impl DocumentController { - fn new_text(document: TextDocument) -> Self { +impl Document { + pub(super) fn new_text(document: TextDocument) -> Self { Self::Text(Arc::new(document)) } - fn new_notebook(document: NotebookDocument) -> Self { + pub(super) fn new_notebook(document: NotebookDocument) -> Self { Self::Notebook(Arc::new(document)) } - fn make_ref(&self, cell_url: Option, file_path: AnySystemPath) -> DocumentQuery { - match &self { - Self::Notebook(notebook) => DocumentQuery::Notebook { - cell_url, - file_path, - notebook: notebook.clone(), - }, - Self::Text(document) => DocumentQuery::Text { - file_path, - document: document.clone(), - }, + pub(crate) fn version(&self) -> DocumentVersion { + match self { + Self::Text(document) => document.version(), + Self::Notebook(notebook) => notebook.version(), } } @@ -254,85 +249,8 @@ impl DocumentController { } } -/// A read-only query to an open document. -/// -/// This query can 'select' a text document, full notebook, or a specific notebook cell. -/// It also includes document settings. -#[derive(Debug, Clone)] -pub(crate) enum DocumentQuery { - Text { - file_path: AnySystemPath, - document: Arc, - }, - Notebook { - /// The selected notebook cell, if it exists. - cell_url: Option, - /// The path to the notebook. - file_path: AnySystemPath, - notebook: Arc, - }, -} - -impl DocumentQuery { - /// Attempts to access the underlying notebook document that this query is selecting. - pub(crate) fn as_notebook(&self) -> Option<&NotebookDocument> { - match self { - Self::Notebook { notebook, .. } => Some(notebook), - Self::Text { .. } => None, - } - } - - /// Get the version of document selected by this query. - pub(crate) fn version(&self) -> DocumentVersion { - match self { - Self::Text { document, .. } => document.version(), - Self::Notebook { notebook, .. } => notebook.version(), - } - } - - /// Get the system path for the document selected by this query. - pub(crate) fn file_path(&self) -> &AnySystemPath { - match self { - Self::Text { file_path, .. } | Self::Notebook { file_path, .. } => file_path, - } - } - - /// Attempt to access the single inner text document selected by the query. - /// If this query is selecting an entire notebook document, this will return `None`. - #[expect(dead_code)] - pub(crate) fn as_single_document(&self) -> Option<&TextDocument> { - match self { - Self::Text { document, .. } => Some(document), - Self::Notebook { - notebook, - cell_url: cell_uri, - .. - } => cell_uri - .as_ref() - .and_then(|cell_uri| notebook.cell_document_by_uri(cell_uri)), - } - } - - /// Returns the salsa interned [`File`] for the document selected by this query. - /// - /// It returns [`None`] for the following cases: - /// - For virtual file, if it's not yet opened - /// - For regular file, if it does not exists or is a directory - pub(crate) fn file(&self, db: &dyn Db) -> Option { - match self.file_path() { - AnySystemPath::System(path) => system_path_to_file(db, path).ok(), - AnySystemPath::SystemVirtual(virtual_path) => db - .files() - .try_virtual_file(virtual_path) - .map(|virtual_file| virtual_file.file()), - } - } -} - #[derive(Debug, Clone, thiserror::Error)] -pub(crate) enum DocumentQueryError { - #[error("invalid URL: {0}")] - InvalidUrl(Url), +pub(crate) enum DocumentError { #[error("document not found for key: {0}")] NotFound(DocumentKey), } diff --git a/crates/ty_server/src/session/options.rs b/crates/ty_server/src/session/options.rs index c646c767de..982dccd484 100644 --- a/crates/ty_server/src/session/options.rs +++ b/crates/ty_server/src/session/options.rs @@ -213,7 +213,7 @@ impl WorkspaceOptions { if let Some(python) = &overrides.fallback_python { tracing::debug!( - "Using the Python environment selected in the VS Code Python extension \ + "Using the Python environment selected in your editor \ in case the configuration doesn't specify a Python environment: {python}", python = python.path() ); @@ -221,7 +221,7 @@ impl WorkspaceOptions { if let Some(version) = &overrides.fallback_python_version { tracing::debug!( - "Using the Python version selected in the VS Code Python extension: {version} \ + "Using the Python version selected in your editor: {version} \ in case the configuration doesn't specify a Python version", ); } diff --git a/crates/ty_server/src/system.rs b/crates/ty_server/src/system.rs index 323e4a6846..ce93d9636b 100644 --- a/crates/ty_server/src/system.rs +++ b/crates/ty_server/src/system.rs @@ -4,6 +4,9 @@ use std::fmt::Display; use std::panic::RefUnwindSafe; use std::sync::Arc; +use crate::Db; +use crate::document::DocumentKey; +use crate::session::index::{Document, Index}; use lsp_types::Url; use ruff_db::file_revision::FileRevision; use ruff_db::files::{File, FilePath}; @@ -14,11 +17,6 @@ use ruff_db::system::{ }; use ruff_notebook::{Notebook, NotebookError}; use ty_ide::cached_vendored_path; -use ty_python_semantic::Db; - -use crate::DocumentQuery; -use crate::document::DocumentKey; -use crate::session::index::Index; /// Returns a [`Url`] for the given [`File`]. pub(crate) fn file_to_url(db: &dyn Db, file: File) -> Option { @@ -41,26 +39,6 @@ pub(crate) enum AnySystemPath { } impl AnySystemPath { - /// Converts the given [`Url`] to an [`AnySystemPath`]. - /// - /// If the URL scheme is `file`, then the path is converted to a [`SystemPathBuf`]. Otherwise, the - /// URL is converted to a [`SystemVirtualPathBuf`]. - /// - /// This fails in the following cases: - /// * The URL cannot be converted to a file path (refer to [`Url::to_file_path`]). - /// * If the URL is not a valid UTF-8 string. - pub(crate) fn try_from_url(url: &Url) -> std::result::Result { - if url.scheme() == "file" { - Ok(AnySystemPath::System( - SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?, - )) - } else { - Ok(AnySystemPath::SystemVirtual( - SystemVirtualPath::new(url.as_str()).to_path_buf(), - )) - } - } - pub(crate) const fn as_system(&self) -> Option<&SystemPathBuf> { match self { AnySystemPath::System(system_path_buf) => Some(system_path_buf), @@ -68,21 +46,11 @@ impl AnySystemPath { } } - /// Returns the extension of the path, if any. - pub(crate) fn extension(&self) -> Option<&str> { + #[expect(unused)] + pub(crate) const fn as_virtual(&self) -> Option<&SystemVirtualPath> { match self { - AnySystemPath::System(system_path) => system_path.extension(), - AnySystemPath::SystemVirtual(virtual_path) => virtual_path.extension(), - } - } - - /// Converts the path to a URL. - pub(crate) fn to_url(&self) -> Option { - match self { - AnySystemPath::System(system_path) => { - Url::from_file_path(system_path.as_std_path()).ok() - } - AnySystemPath::SystemVirtual(virtual_path) => Url::parse(virtual_path.as_str()).ok(), + AnySystemPath::SystemVirtual(path) => Some(path.as_path()), + AnySystemPath::System(_) => None, } } } @@ -144,33 +112,32 @@ impl LSPSystem { self.index.as_ref().unwrap() } - fn make_document_ref(&self, path: AnySystemPath) -> Option { + fn document(&self, path: AnySystemPath) -> Option<&Document> { let index = self.index(); - let key = DocumentKey::from_path(path); - index.make_document_ref(key).ok() + index.document(&DocumentKey::from(path)).ok() } - fn system_path_to_document_ref(&self, path: &SystemPath) -> Option { + pub(crate) fn system_path_to_document(&self, path: &SystemPath) -> Option<&Document> { let any_path = AnySystemPath::System(path.to_path_buf()); - self.make_document_ref(any_path) + self.document(any_path) } - fn system_virtual_path_to_document_ref( + pub(crate) fn system_virtual_path_to_document( &self, path: &SystemVirtualPath, - ) -> Option { + ) -> Option<&Document> { let any_path = AnySystemPath::SystemVirtual(path.to_path_buf()); - self.make_document_ref(any_path) + self.document(any_path) } } impl System for LSPSystem { fn path_metadata(&self, path: &SystemPath) -> Result { - let document = self.system_path_to_document_ref(path); + let document = self.system_path_to_document(path); if let Some(document) = document { Ok(Metadata::new( - document_revision(&document), + document_revision(document), None, FileType::File, )) @@ -188,32 +155,30 @@ impl System for LSPSystem { } fn read_to_string(&self, path: &SystemPath) -> Result { - let document = self.system_path_to_document_ref(path); + let document = self.system_path_to_document(path); match document { - Some(DocumentQuery::Text { document, .. }) => Ok(document.contents().to_string()), + Some(Document::Text(document)) => Ok(document.contents().to_string()), _ => self.native_system.read_to_string(path), } } fn read_to_notebook(&self, path: &SystemPath) -> std::result::Result { - let document = self.system_path_to_document_ref(path); + let document = self.system_path_to_document(path); match document { - Some(DocumentQuery::Text { document, .. }) => { - Notebook::from_source_code(document.contents()) - } - Some(DocumentQuery::Notebook { notebook, .. }) => Ok(notebook.make_ruff_notebook()), + Some(Document::Text(document)) => Notebook::from_source_code(document.contents()), + Some(Document::Notebook(notebook)) => Ok(notebook.make_ruff_notebook()), None => self.native_system.read_to_notebook(path), } } fn read_virtual_path_to_string(&self, path: &SystemVirtualPath) -> Result { let document = self - .system_virtual_path_to_document_ref(path) + .system_virtual_path_to_document(path) .ok_or_else(|| virtual_path_not_found(path))?; - if let DocumentQuery::Text { document, .. } = &document { + if let Document::Text(document) = &document { Ok(document.contents().to_string()) } else { Err(not_a_text_document(path)) @@ -225,12 +190,12 @@ impl System for LSPSystem { path: &SystemVirtualPath, ) -> std::result::Result { let document = self - .system_virtual_path_to_document_ref(path) + .system_virtual_path_to_document(path) .ok_or_else(|| virtual_path_not_found(path))?; match document { - DocumentQuery::Text { document, .. } => Notebook::from_source_code(document.contents()), - DocumentQuery::Notebook { notebook, .. } => Ok(notebook.make_ruff_notebook()), + Document::Text(document) => Notebook::from_source_code(document.contents()), + Document::Notebook(notebook) => Ok(notebook.make_ruff_notebook()), } } @@ -307,7 +272,7 @@ fn virtual_path_not_found(path: impl Display) -> std::io::Error { } /// Helper function to get the [`FileRevision`] of the given document. -fn document_revision(document: &DocumentQuery) -> FileRevision { +fn document_revision(document: &Document) -> FileRevision { // The file revision is just an opaque number which doesn't have any significant meaning other // than that the file has changed if the revisions are different. #[expect(clippy::cast_sign_loss)] diff --git a/crates/ty_server/tests/e2e/inlay_hints.rs b/crates/ty_server/tests/e2e/inlay_hints.rs index 3dbbbee994..ea7c833b5f 100644 --- a/crates/ty_server/tests/e2e/inlay_hints.rs +++ b/crates/ty_server/tests/e2e/inlay_hints.rs @@ -17,7 +17,7 @@ x = 1 def foo(a: int) -> int: return a + 1 -foo(1) +y = foo(1) "; let mut server = TestServerBuilder::new()? @@ -39,7 +39,7 @@ foo(1) [ { "position": { - "line": 0, + "line": 5, "character": 1 }, "label": [ @@ -47,7 +47,7 @@ foo(1) "value": ": " }, { - "value": "Literal[1]" + "value": "int" } ], "kind": 1 @@ -55,7 +55,7 @@ foo(1) { "position": { "line": 5, - "character": 4 + "character": 8 }, "label": [ { diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap index ba3b75028c..ab96ebf4e8 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap @@ -59,8 +59,10 @@ Settings: Settings { "invalid-legacy-type-variable": Error (Default), "invalid-metaclass": Error (Default), "invalid-named-tuple": Error (Default), + "invalid-newtype": Error (Default), "invalid-overload": Error (Default), "invalid-parameter-default": Error (Default), + "invalid-paramspec": Error (Default), "invalid-protocol": Error (Default), "invalid-raise": Error (Default), "invalid-return-type": Error (Default), diff --git a/crates/ty_vendored/ty_extensions/ty_extensions.pyi b/crates/ty_vendored/ty_extensions/ty_extensions.pyi index 79cda64bef..744bd5af37 100644 --- a/crates/ty_vendored/ty_extensions/ty_extensions.pyi +++ b/crates/ty_vendored/ty_extensions/ty_extensions.pyi @@ -67,6 +67,23 @@ class ConstraintSet: .. _subtype: https://typing.python.org/en/latest/spec/concepts.html#subtype-supertype-and-type-equivalence """ + def satisfies(self, other: Self) -> Self: + """ + Returns whether this constraint set satisfies another — that is, whether + every specialization that satisfies this constraint set also satisfies + `other`. + """ + + def satisfied_by_all_typevars( + self, *, inferable: tuple[Any, ...] | None = None + ) -> bool: + """ + Returns whether this constraint set is satisfied by all of the typevars + that it mentions. You must provide a tuple of the typevars that should + be considered `inferable`. All other typevars mentioned in the + constraint set will be considered non-inferable. + """ + def __bool__(self) -> bool: ... def __eq__(self, other: ConstraintSet) -> bool: ... def __ne__(self, other: ConstraintSet) -> bool: ... diff --git a/crates/ty_vendored/vendor/typeshed/README.md b/crates/ty_vendored/vendor/typeshed/README.md index 1467aa20b4..d295b56bc0 100644 --- a/crates/ty_vendored/vendor/typeshed/README.md +++ b/crates/ty_vendored/vendor/typeshed/README.md @@ -7,10 +7,10 @@ ## About Typeshed contains external type annotations for the Python standard library -and Python builtins, as well as third party packages as contributed by +and Python builtins, as well as third-party packages that are contributed by people external to those projects. -This data can e.g. be used for static analysis, type checking, type inference, +This data can, e.g., be used for static analysis, type checking, type inference, and autocompletion. For information on how to use typeshed, read below. Information for @@ -29,8 +29,8 @@ If you're just using a type checker (e.g. [mypy](https://github.com/python/mypy/ [pyright](https://github.com/microsoft/pyright), or PyCharm's built-in type checker), as opposed to developing it, you don't need to interact with the typeshed repo at -all: a copy of standard library part of typeshed is bundled with type checkers. -And type stubs for third party packages and modules you are using can +all: a copy of the standard library part of typeshed is bundled with type checkers. +And type stubs for third-party packages and modules you are using can be installed from PyPI. For example, if you are using `html5lib` and `requests`, you can install the type stubs using @@ -70,7 +70,7 @@ package you're using, each with its own tradeoffs: type checking due to changes in the stubs. Another risk of this strategy is that stubs often lag behind - the package being stubbed. You might want to force the package being stubbed + the package that is being stubbed. You might want to force the package being stubbed to a certain minimum version because it fixes a critical bug, but if correspondingly updated stubs have not been released, your type checking results may not be fully accurate. @@ -119,6 +119,6 @@ a review of your type annotations or stubs outside of typeshed, head over to [our discussion forum](https://github.com/python/typing/discussions). For less formal discussion, try the typing chat room on [gitter.im](https://gitter.im/python/typing). Some typeshed maintainers -are almost always present; feel free to find us there and we're happy +are almost always present; feel free to find us there, and we're happy to chat. Substantive technical discussion will be directed to the issue tracker. diff --git a/crates/ty_vendored/vendor/typeshed/source_commit.txt b/crates/ty_vendored/vendor/typeshed/source_commit.txt index 54a8607d25..d0fd6efd8e 100644 --- a/crates/ty_vendored/vendor/typeshed/source_commit.txt +++ b/crates/ty_vendored/vendor/typeshed/source_commit.txt @@ -1 +1 @@ -d6f4a0f7102b1400a21742cf9b7ea93614e2b6ec +bf7214784877c52638844c065360d4814fae4c65 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi index bcacb3857b..4859bbe675 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi @@ -4525,6 +4525,10 @@ class BaseException: def __setstate__(self, state: dict[str, Any] | None, /) -> None: ... def with_traceback(self, tb: TracebackType | None, /) -> Self: """Set self.__traceback__ to tb and return self.""" + # Necessary for security-focused static analyzers (e.g, pysa) + # See https://github.com/python/typeshed/pull/14900 + def __str__(self) -> str: ... # noqa: Y029 + def __repr__(self) -> str: ... # noqa: Y029 if sys.version_info >= (3, 11): # only present after add_note() is called __notes__: list[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi index 575f2bf95d..659595046b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi @@ -67,7 +67,7 @@ def isinf(z: _C, /) -> bool: def isnan(z: _C, /) -> bool: """Checks if the real or imaginary part of z not a number (NaN).""" -def log(x: _C, base: _C = ..., /) -> complex: +def log(z: _C, base: _C = ..., /) -> complex: """log(z[, base]) -> the logarithm of z to the given base. If the base is not specified, returns the natural logarithm (base e) of z. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi index 2b05511c33..85baf55925 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi @@ -6,7 +6,7 @@ from _typeshed import FileDescriptorOrPath, Unused from abc import ABC, abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType -from typing import IO, Any, Generic, Protocol, TypeVar, overload, runtime_checkable, type_check_only +from typing import Any, Generic, Protocol, TypeVar, overload, runtime_checkable, type_check_only from typing_extensions import ParamSpec, Self, TypeAlias __all__ = [ @@ -32,7 +32,6 @@ if sys.version_info >= (3, 11): _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) -_T_io = TypeVar("_T_io", bound=IO[str] | None) _ExitT_co = TypeVar("_ExitT_co", covariant=True, bound=bool | None, default=bool | None) _F = TypeVar("_F", bound=Callable[..., Any]) _G_co = TypeVar("_G_co", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) @@ -275,13 +274,23 @@ class suppress(AbstractContextManager[None, bool]): self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> bool: ... -class _RedirectStream(AbstractContextManager[_T_io, None]): - def __init__(self, new_target: _T_io) -> None: ... +# This is trying to describe what is needed for (most?) uses +# of `redirect_stdout` and `redirect_stderr`. +# https://github.com/python/typeshed/issues/14903 +@type_check_only +class _SupportsRedirect(Protocol): + def write(self, s: str, /) -> int: ... + def flush(self) -> None: ... + +_SupportsRedirectT = TypeVar("_SupportsRedirectT", bound=_SupportsRedirect | None) + +class _RedirectStream(AbstractContextManager[_SupportsRedirectT, None]): + def __init__(self, new_target: _SupportsRedirectT) -> None: ... def __exit__( self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> None: ... -class redirect_stdout(_RedirectStream[_T_io]): +class redirect_stdout(_RedirectStream[_SupportsRedirectT]): """Context manager for temporarily redirecting stdout to another file. # How to send help() to stderr @@ -294,7 +303,7 @@ class redirect_stdout(_RedirectStream[_T_io]): help(pow) """ -class redirect_stderr(_RedirectStream[_T_io]): +class redirect_stderr(_RedirectStream[_SupportsRedirectT]): """Context manager for temporarily redirecting stderr to another file.""" class _BaseExitStack(Generic[_ExitT_co]): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi index b9933de380..825340e75b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi @@ -623,6 +623,8 @@ if sys.version_info >= (3, 11): the module is the last module in case of a multi-module name """ + def show_flag_values(value: int) -> list[int]: ... + if sys.version_info >= (3, 12): # The body of the class is the same, but the base classes are different. class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi index 88f6a919a1..1ea3e4ea80 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi @@ -752,6 +752,9 @@ environ: _Environ[str] if sys.platform != "win32": environb: _Environ[bytes] +if sys.version_info >= (3, 14): + def reload_environ() -> None: ... + if sys.version_info >= (3, 11) or sys.platform != "win32": EX_OK: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi index 21514c7609..0ecc8e2693 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi @@ -578,6 +578,21 @@ def _getframe(depth: int = 0, /) -> FrameType: only. """ +# documented -- see https://docs.python.org/3/library/sys.html#sys._current_exceptions +if sys.version_info >= (3, 12): + def _current_exceptions() -> dict[int, BaseException | None]: + """Return a dict mapping each thread's identifier to its current raised exception. + + This function should be used for specialized purposes only. + """ + +else: + def _current_exceptions() -> dict[int, OptExcInfo]: + """Return a dict mapping each thread's identifier to its current raised exception. + + This function should be used for specialized purposes only. + """ + if sys.version_info >= (3, 12): def _getframemodulename(depth: int = 0) -> str | None: """Return the name of the module for a calling frame. @@ -627,6 +642,9 @@ def exit(status: _ExitCode = None, /) -> NoReturn: exit status will be one (i.e., failure). """ +if sys.platform == "android": # noqa: Y008 + def getandroidapilevel() -> int: ... + def getallocatedblocks() -> int: """Return the number of memory blocks currently allocated.""" @@ -949,3 +967,9 @@ if sys.version_info >= (3, 14): script (str|bytes): The path to a file containing the Python code to be executed. """ + + def _is_immortal(op: object, /) -> bool: + """Return True if the given object is "immortal" per PEP 683. + + This function should be used for specialized purposes only. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi index 8cdd3b1b2f..1dfb9f3cfe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi @@ -2,7 +2,7 @@ import sys from typing import IO, Any, Literal, overload -from typing_extensions import deprecated +from typing_extensions import LiteralString, deprecated __all__ = [ "get_config_h_filename", @@ -47,8 +47,10 @@ def get_scheme_names() -> tuple[str, ...]: """Return a tuple containing the schemes names.""" if sys.version_info >= (3, 10): - def get_default_scheme() -> str: ... - def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... + def get_default_scheme() -> LiteralString: ... + def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> LiteralString: ... + # Documented -- see https://docs.python.org/3/library/sysconfig.html#sysconfig._get_preferred_schemes + def _get_preferred_schemes() -> dict[Literal["prefix", "home", "user"], LiteralString]: ... def get_path_names() -> tuple[str, ...]: """Return a tuple containing the paths names.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi index 1f31c1fbb4..1d8e299023 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi @@ -1721,17 +1721,22 @@ class Wm: if sys.platform == "darwin": @overload def wm_attributes(self, option: Literal["-modified"], /) -> bool: - """Return or sets platform specific attributes. + """This subcommand returns or sets platform specific attributes - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. + The first form returns a list of the platform specific flags and + their values. The second form returns the value for the specific + option. The third form sets one or more of the values. The values + are as follows: - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. + On Windows, -disabled gets or sets whether the window is in a + disabled state. -toolwindow gets or sets the style of the window + to toolwindow (as defined in the MSDN). -topmost gets or sets + whether this is a topmost window (displays above all other + windows). + + On Macintosh, XXXXX + + On Unix, there are currently no special attribute values. """ @overload @@ -1803,20 +1808,7 @@ class Wm: def wm_attributes(self, option: Literal["topmost"], /) -> bool: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["modified"], /) -> bool: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["modified"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["notify"], /) -> bool: ... @overload @@ -1876,17 +1868,22 @@ class Wm: if sys.platform == "darwin": @overload def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: - """Return or sets platform specific attributes. + """This subcommand returns or sets platform specific attributes - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. + The first form returns a list of the platform specific flags and + their values. The second form returns the value for the specific + option. The third form sets one or more of the values. The values + are as follows: - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. + On Windows, -disabled gets or sets whether the window is in a + disabled state. -toolwindow gets or sets the style of the window + to toolwindow (as defined in the MSDN). -topmost gets or sets + whether this is a topmost window (displays above all other + windows). + + On Macintosh, XXXXX + + On Unix, there are currently no special attribute values. """ @overload @@ -1950,19 +1947,7 @@ class Wm: titlepath: str = ..., topmost: bool = ..., transparent: bool = ..., - ) -> None: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ + ) -> None: ... elif sys.platform == "win32": @overload def wm_attributes( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi index b0e7c1bf29..61cd2e44b5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi @@ -669,7 +669,7 @@ class TurtleScreen(TurtleScreenBase): ['arrow', 'blank', 'circle', ... , 'turtle'] """ - def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-click event on canvas. Arguments: @@ -2540,7 +2540,7 @@ def getshapes() -> list[str]: ['arrow', 'blank', 'circle', ... , 'turtle'] """ -def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-click event on this turtle on canvas. Arguments: @@ -3960,7 +3960,7 @@ def getturtle() -> Turtle: getpen = getturtle -def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-button-release event on this turtle on canvas. Arguments: @@ -3983,7 +3983,7 @@ def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | No transparent. """ -def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-move event on this turtle on canvas. Arguments: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi index 97d70804a3..a8231f62ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi @@ -41,8 +41,8 @@ Z_RLE: Final = 3 Z_SYNC_FLUSH: Final = 2 Z_TREES: Final = 6 -if sys.version_info >= (3, 14) and sys.platform == "win32": - # Available when zlib was built with zlib-ng, usually only on Windows +if sys.version_info >= (3, 14): + # Available when zlib was built with zlib-ng ZLIBNG_VERSION: Final[str] class error(Exception): ... diff --git a/dist-workspace.toml b/dist-workspace.toml index 20f123b05a..ddc157cdf3 100644 --- a/dist-workspace.toml +++ b/dist-workspace.toml @@ -5,7 +5,7 @@ packages = ["ruff"] # Config for 'dist' [dist] # The preferred dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.30.0" +cargo-dist-version = "0.30.2" # Whether to consider the binaries in a package for distribution (defaults true) dist = false # CI backends to support diff --git a/docs/configuration.md b/docs/configuration.md index 8d3297fbca..7a5f62fc60 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -618,8 +618,9 @@ Options: notebooks, use `--extension ipy:ipynb` --statistics Show counts for every rule with at least one violation - --add-noqa - Enable automatic additions of `noqa` directives to failing lines + --add-noqa[=] + Enable automatic additions of `noqa` directives to failing lines. + Optionally provide a reason to append after the codes --show-files See the files Ruff will be run against with the current settings --show-settings diff --git a/docs/editors/setup.md b/docs/editors/setup.md index 3d81935465..17652539c3 100644 --- a/docs/editors/setup.md +++ b/docs/editors/setup.md @@ -422,29 +422,12 @@ bundle for TextMate. ## Zed -Ruff is available as an extension for the Zed editor. To install it: +Ruff support is now built into Zed (no separate extension required). -1. Open the command palette with `Cmd+Shift+P` -1. Search for "zed: extensions" -1. Search for "ruff" in the extensions list and click "Install" +By default, Zed uses Ruff for formatting and linting. -To configure Zed to use the Ruff language server for Python files, add the following -to your `settings.json` file: - -```json -{ - "languages": { - "Python": { - "language_servers": ["ruff"] - // Or, if there are other language servers you want to use with Python - // "language_servers": ["pyright", "ruff"] - } - } -} -``` - -To configure the language server, you can provide the [server settings](settings.md) -under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/configuring-zed#lsp) key: +To set up editor-wide Ruff options, provide the [server settings](settings.md) +under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/configuring-zed#lsp) key of your `settings.json` file: ```json { @@ -452,7 +435,7 @@ under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/conf "ruff": { "initialization_options": { "settings": { - // Ruff server settings goes here + // Ruff server settings go here "lineLength": 80, "lint": { "extendSelect": ["I"], @@ -464,22 +447,14 @@ under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/conf } ``` -You can configure Ruff to format Python code on-save by registering the Ruff formatter -and enabling the [`format_on_save`](https://zed.dev/docs/configuring-zed#format-on-save) setting: +[`format_on_save`](https://zed.dev/docs/configuring-zed#format-on-save) is enabled by default. +You can disable it for Python by changing `format_on_save` in your `settings.json` file: ```json { "languages": { "Python": { - "language_servers": ["ruff"], - "format_on_save": "on", - "formatter": [ - { - "language_server": { - "name": "ruff" - } - } - ] + "format_on_save": "off" } } } @@ -492,40 +467,12 @@ You can configure Ruff to fix lint violations and/or organize imports on-save by { "languages": { "Python": { - "language_servers": ["ruff"], - "format_on_save": "on", - "formatter": [ - // Fix all auto-fixable lint violations - { "code_action": "source.fixAll.ruff" }, + "code_actions_on_format": { // Organize imports - { "code_action": "source.organizeImports.ruff" } - ] - } - } -} -``` - -Taken together, you can configure Ruff to format, fix, and organize imports on-save via the -following `settings.json`: - -!!! note - - For this configuration, it is important to use the correct order of the code action and - formatter language server settings. The code actions should be defined before the formatter to - ensure that the formatter takes care of any remaining style issues after the code actions have - been applied. - -```json -{ - "languages": { - "Python": { - "language_servers": ["ruff"], - "format_on_save": "on", - "formatter": [ - { "code_action": "source.fixAll.ruff" }, - { "code_action": "source.organizeImports.ruff" }, - { "language_server": { "name": "ruff" } } - ] + "source.organizeImports.ruff": true, + // Fix all auto-fixable lint violations + "source.fixAll.ruff": true + } } } } diff --git a/docs/integrations.md b/docs/integrations.md index 441845a474..78850e7940 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -80,7 +80,7 @@ You can add the following configuration to `.gitlab-ci.yml` to run a `ruff forma stage: build interruptible: true image: - name: ghcr.io/astral-sh/ruff:0.14.2-alpine + name: ghcr.io/astral-sh/ruff:0.14.4-alpine before_script: - cd $CI_PROJECT_DIR - ruff --version @@ -106,7 +106,7 @@ Ruff can be used as a [pre-commit](https://pre-commit.com) hook via [`ruff-pre-c ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.4 hooks: # Run the linter. - id: ruff-check @@ -119,7 +119,7 @@ To enable lint fixes, add the `--fix` argument to the lint hook: ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.4 hooks: # Run the linter. - id: ruff-check @@ -133,7 +133,7 @@ To avoid running on Jupyter Notebooks, remove `jupyter` from the list of allowed ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.4 hooks: # Run the linter. - id: ruff-check diff --git a/docs/requirements-insiders.txt b/docs/requirements-insiders.txt index 127d4bfaa2..954e47c74c 100644 --- a/docs/requirements-insiders.txt +++ b/docs/requirements-insiders.txt @@ -1,5 +1,5 @@ PyYAML==6.0.3 -ruff==0.13.3 +ruff==0.14.4 mkdocs==1.6.1 mkdocs-material @ git+ssh://git@github.com/astral-sh/mkdocs-material-insiders.git@39da7a5e761410349e9a1b8abf593b0cdd5453ff mkdocs-redirects==1.2.2 diff --git a/docs/requirements.txt b/docs/requirements.txt index 9742b48785..01c34ac2f6 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,5 @@ PyYAML==6.0.3 -ruff==0.13.3 +ruff==0.14.4 mkdocs==1.6.1 mkdocs-material==9.5.38 mkdocs-redirects==1.2.2 diff --git a/docs/tutorial.md b/docs/tutorial.md index f3f2a8b3dd..a35a58787a 100644 --- a/docs/tutorial.md +++ b/docs/tutorial.md @@ -369,7 +369,7 @@ This tutorial has focused on Ruff's command-line interface, but Ruff can also be ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.4 hooks: # Run the linter. - id: ruff diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 359e59d1e1..c4e40423d3 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -30,7 +30,7 @@ ty_python_semantic = { path = "../crates/ty_python_semantic" } ty_vendored = { path = "../crates/ty_vendored" } libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false } -salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "cdd0b85516a52c18b8a6d17a2279a96ed6c3e198", default-features = false, features = [ +salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "05a9af7f554b64b8aadc2eeb6f2caf73d0408d09", default-features = false, features = [ "compact_str", "macros", "salsa_unstable", diff --git a/playground/package-lock.json b/playground/package-lock.json index c8ddfc60a9..3de5b851bf 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -1747,6 +1747,12 @@ "vite": "^5.2.0 || ^6 || ^7" } }, + "node_modules/@types/emscripten": { + "version": "1.41.5", + "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.41.5.tgz", + "integrity": "sha512-cMQm7pxu6BxtHyqJ7mQZ2kXWV5SLmugybFdHCBbJ5eHzOo6VhBckEgAT3//rP5FwPHNPeEiq4SmQ5ucBwsOo4Q==", + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -1789,12 +1795,6 @@ "@types/react": "^19.0.0" } }, - "node_modules/@types/trusted-types": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-1.0.6.tgz", - "integrity": "sha512-230RC8sFeHoT6sSUlRO6a8cAnclO06eeiq1QDfiv2FGCLWFvvERWgwIQD4FWqD9A69BN7Lzee4OXwoMVnnsWDw==", - "license": "MIT" - }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.38.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.38.0.tgz", @@ -2732,6 +2732,12 @@ "node": ">=0.10.0" } }, + "node_modules/dompurify": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.7.tgz", + "integrity": "sha512-VaTstWtsneJY8xzy7DekmYWEOZcmzIe3Qb3zPd4STve1OBTa+e+WmS1ITQec1fZYXI3HCsOZZiSMpG6oxoWMWQ==", + "license": "(MPL-2.0 OR Apache-2.0)" + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -4733,6 +4739,18 @@ "@jridgewell/sourcemap-codec": "^1.5.0" } }, + "node_modules/marked": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-14.0.0.tgz", + "integrity": "sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -4841,13 +4859,14 @@ } }, "node_modules/monaco-editor": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.53.0.tgz", - "integrity": "sha512-0WNThgC6CMWNXXBxTbaYYcunj08iB5rnx4/G56UOPeL9UVIUGGHA1GR0EWIh9Ebabj7NpCRawQ5b0hfN1jQmYQ==", + "version": "0.54.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.54.0.tgz", + "integrity": "sha512-hx45SEUoLatgWxHKCmlLJH81xBo0uXP4sRkESUpmDQevfi+e7K1VuiSprK6UpQ8u4zOcKNiH0pMvHvlMWA/4cw==", "license": "MIT", "peer": true, "dependencies": { - "@types/trusted-types": "^1.0.6" + "dompurify": "3.1.7", + "marked": "14.0.0" } }, "node_modules/ms": { @@ -5265,11 +5284,12 @@ } }, "node_modules/pyodide": { - "version": "0.28.0", - "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.28.0.tgz", - "integrity": "sha512-QML/Gh8eu50q5zZKLNpW6rgS0XUdK+94OSL54AUSKV8eJAxgwZrMebqj+CyM0EbF3EUX8JFJU3ryaxBViHammQ==", + "version": "0.29.0", + "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.29.0.tgz", + "integrity": "sha512-ObIvsTmcrxAWKg+FT1GjfSdDmQc5CabnYe/nn5BCuhr9BVVITeQ24DBdZuG5B2tIiAZ9YonBpnDB7cmHZyd2Rw==", "license": "MPL-2.0", "dependencies": { + "@types/emscripten": "^1.41.4", "ws": "^8.5.0" }, "engines": { @@ -6546,7 +6566,7 @@ "@monaco-editor/react": "^4.4.6", "classnames": "^2.3.2", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", + "monaco-editor": "^0.54.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", @@ -6575,8 +6595,8 @@ "@monaco-editor/react": "^4.7.0", "classnames": "^2.5.1", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", - "pyodide": "^0.28.0", + "monaco-editor": "^0.54.0", + "pyodide": "^0.29.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", diff --git a/playground/ruff/package.json b/playground/ruff/package.json index 50f87a38d1..abb46f73e9 100644 --- a/playground/ruff/package.json +++ b/playground/ruff/package.json @@ -18,7 +18,7 @@ "@monaco-editor/react": "^4.4.6", "classnames": "^2.3.2", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", + "monaco-editor": "^0.54.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", diff --git a/playground/ty/package.json b/playground/ty/package.json index ad2b546980..cf36dceece 100644 --- a/playground/ty/package.json +++ b/playground/ty/package.json @@ -18,8 +18,8 @@ "@monaco-editor/react": "^4.7.0", "classnames": "^2.5.1", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", - "pyodide": "^0.28.0", + "monaco-editor": "^0.54.0", + "pyodide": "^0.29.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", diff --git a/playground/ty/src/Editor/Editor.tsx b/playground/ty/src/Editor/Editor.tsx index c82250b9e1..b7755d54de 100644 --- a/playground/ty/src/Editor/Editor.tsx +++ b/playground/ty/src/Editor/Editor.tsx @@ -70,13 +70,16 @@ export default function Editor({ const serverRef = useRef(null); if (serverRef.current != null) { - serverRef.current.update({ - files, - workspace, - onOpenFile, - onVendoredFileChange, - onBackToUserFile, - }); + serverRef.current.update( + { + files, + workspace, + onOpenFile, + onVendoredFileChange, + onBackToUserFile, + }, + isViewingVendoredFile, + ); } // Update the diagnostics in the editor. @@ -200,6 +203,7 @@ class PlaygroundServer private rangeSemanticTokensDisposable: IDisposable; private signatureHelpDisposable: IDisposable; private documentHighlightDisposable: IDisposable; + private inVendoredFileCondition: editor.IContextKey; // Cache for vendored file handles private vendoredFileHandles = new Map(); @@ -249,8 +253,16 @@ class PlaygroundServer this.documentHighlightDisposable = monaco.languages.registerDocumentHighlightProvider("python", this); + this.inVendoredFileCondition = editor.createContextKey( + "inVendoredFile", + false, + ); // Register Esc key command - editor.addCommand(monaco.KeyCode.Escape, this.props.onBackToUserFile); + editor.addCommand( + monaco.KeyCode.Escape, + () => this.props.onBackToUserFile(), + "inVendoredFile", + ); } triggerCharacters: string[] = ["."]; @@ -452,8 +464,9 @@ class PlaygroundServer return undefined; } - update(props: PlaygroundServerProps) { + update(props: PlaygroundServerProps, isViewingVendoredFile: boolean) { this.props = props; + this.inVendoredFileCondition.set(isViewingVendoredFile); } private getOrCreateVendoredFileHandle(vendoredPath: string): FileHandle { diff --git a/playground/ty/src/Editor/SecondaryPanel.tsx b/playground/ty/src/Editor/SecondaryPanel.tsx index 7f68bb7153..6c8cd0189a 100644 --- a/playground/ty/src/Editor/SecondaryPanel.tsx +++ b/playground/ty/src/Editor/SecondaryPanel.tsx @@ -2,7 +2,7 @@ import MonacoEditor from "@monaco-editor/react"; import { AstralButton, Theme } from "shared"; import { ReadonlyFiles } from "../Playground"; import { Suspense, use, useState } from "react"; -import { loadPyodide, PyodideInterface } from "pyodide"; +import { loadPyodide } from "pyodide"; import classNames from "classnames"; export enum SecondaryTool { @@ -103,41 +103,18 @@ function Content({ } } -let pyodidePromise: Promise | null = null; +const SANDBOX_BASE_DIRECTORY = "/playground/"; function Run({ files, theme }: { files: ReadonlyFiles; theme: Theme }) { - if (pyodidePromise == null) { - pyodidePromise = loadPyodide(); - } + const [runOutput, setRunOutput] = useState | null>(null); + const handleRun = () => { + const output = (async () => { + const pyodide = await loadPyodide({ + env: { + HOME: SANDBOX_BASE_DIRECTORY, + }, + }); - return ( - Loading} - > - - - ); -} - -function RunWithPyiodide({ - files, - pyodidePromise, - theme, -}: { - files: ReadonlyFiles; - theme: Theme; - pyodidePromise: Promise; -}) { - const pyodide = use(pyodidePromise); - - const [output, setOutput] = useState(null); - - if (output == null) { - const handleRun = () => { let combined_output = ""; const outputHandler = (output: string) => { @@ -151,7 +128,17 @@ function RunWithPyiodide({ let fileName = "main.py"; for (const file of files.index) { - pyodide.FS.writeFile(file.name, files.contents[file.id]); + const last_separator = file.name.lastIndexOf("/"); + + if (last_separator !== -1) { + const directory = + SANDBOX_BASE_DIRECTORY + file.name.slice(0, last_separator); + pyodide.FS.mkdirTree(directory); + } + pyodide.FS.writeFile( + SANDBOX_BASE_DIRECTORY + file.name, + files.contents[file.id], + ); if (file.id === files.selected) { fileName = file.name; @@ -162,7 +149,7 @@ function RunWithPyiodide({ const globals = dict(); try { - // Patch up reveal types + // Patch `reveal_type` to print runtime values pyodide.runPython(` import builtins @@ -179,14 +166,18 @@ function RunWithPyiodide({ filename: fileName, }); - setOutput(combined_output); + return combined_output; } catch (e) { - setOutput(`Failed to run Python script: ${e}`); + return `Failed to run Python script: ${e}`; } finally { globals.destroy(); dict.destroy(); } - }; + })(); + setRunOutput(output); + }; + + if (runOutput == null) { return (
); } + + return ( + Loading
} + > + + + ); +} + +function RunOutput({ + runOutput, + theme, +}: { + theme: Theme; + runOutput: Promise; +}) { + const output = use(runOutput); + return (
 ResolvedCliArgs:
 
     if not args.test_executable:
         print(
-            "Running `cargo build --release` since no test executable was specified...",
+            "Running `cargo build --profile=profiling` since no test executable was specified...",
             flush=True,
         )
         cmd: list[str] = [
             "cargo",
             "build",
-            "--release",
+            "--profile",
+            "profiling",
             "--locked",
             "--color",
             "always",
diff --git a/python/ruff-ecosystem/pyproject.toml b/python/ruff-ecosystem/pyproject.toml
index f688f10094..dd5af5cfeb 100644
--- a/python/ruff-ecosystem/pyproject.toml
+++ b/python/ruff-ecosystem/pyproject.toml
@@ -6,7 +6,7 @@ build-backend = "hatchling.build"
 name = "ruff-ecosystem"
 version = "0.0.0"
 requires-python = ">=3.11"
-dependencies = ["unidiff==0.7.5", "tomli_w==1.2.0", "tomli==2.2.1"]
+dependencies = ["unidiff==0.7.5", "tomli_w==1.2.0", "tomli==2.3.0"]
 
 [project.scripts]
 ruff-ecosystem = "ruff_ecosystem.cli:entrypoint"
diff --git a/python/ruff-ecosystem/ruff_ecosystem/projects.py b/python/ruff-ecosystem/ruff_ecosystem/projects.py
index cf7b732409..38d5623159 100644
--- a/python/ruff-ecosystem/ruff_ecosystem/projects.py
+++ b/python/ruff-ecosystem/ruff_ecosystem/projects.py
@@ -206,9 +206,6 @@ class CheckOptions(CommandOptions):
             "check",
             "--no-cache",
             "--exit-zero",
-            # Ignore internal test rules
-            "--ignore",
-            "RUF9",
             # Never apply fixes, as they pollute the comparison results
             "--no-fix",
             # Use the concise format for comparing violations
diff --git a/ruff.schema.json b/ruff.schema.json
index 04ef3fcc3d..a16e91fbd7 100644
--- a/ruff.schema.json
+++ b/ruff.schema.json
@@ -71,7 +71,7 @@
       "deprecated": true
     },
     "extend": {
-      "description": "A path to a local `pyproject.toml` file to merge into this\nconfiguration. User home directory and environment variables will be\nexpanded.\n\nTo resolve the current `pyproject.toml` file, Ruff will first resolve\nthis base configuration file, then merge in any properties defined\nin the current configuration file.",
+      "description": "A path to a local `pyproject.toml` or `ruff.toml` file to merge into this\nconfiguration. User home directory and environment variables will be\nexpanded.\n\nTo resolve the current configuration file, Ruff will first load\nthis base configuration file, then merge in properties defined\nin the current configuration file. Most settings follow simple override\nbehavior where the child value replaces the parent value. However,\nrule selection (`lint.select` and `lint.ignore`) has special merging\nbehavior: if the child configuration specifies `lint.select`, it\nestablishes a new baseline rule set and the parent's `lint.ignore`\nrules are discarded; if the child configuration omits `lint.select`,\nthe parent's rule selection is inherited and both parent and child\n`lint.ignore` rules are accumulated together.",
       "type": [
         "string",
         "null"
diff --git a/rust-toolchain.toml b/rust-toolchain.toml
index 73328e053b..1a35d66439 100644
--- a/rust-toolchain.toml
+++ b/rust-toolchain.toml
@@ -1,2 +1,2 @@
 [toolchain]
-channel = "1.90"
+channel = "1.91"
diff --git a/scripts/benchmarks/pyproject.toml b/scripts/benchmarks/pyproject.toml
index df9f38e5db..6867f98577 100644
--- a/scripts/benchmarks/pyproject.toml
+++ b/scripts/benchmarks/pyproject.toml
@@ -1,6 +1,6 @@
 [project]
 name = "scripts"
-version = "0.14.2"
+version = "0.14.4"
 description = ""
 authors = ["Charles Marsh "]
 
diff --git a/scripts/release.sh b/scripts/release.sh
index ad97c20c8a..ae8d22f525 100755
--- a/scripts/release.sh
+++ b/scripts/release.sh
@@ -12,7 +12,7 @@ project_root="$(dirname "$script_root")"
 echo "Updating metadata with rooster..."
 cd "$project_root"
 uvx --python 3.12 --isolated -- \
-    rooster@0.1.0 release "$@"
+    rooster@0.1.1 release "$@"
 
 echo "Updating lockfile..."
 cargo update -p ruff
diff --git a/ty.schema.json b/ty.schema.json
index 270241fb28..3026102a5f 100644
--- a/ty.schema.json
+++ b/ty.schema.json
@@ -584,8 +584,8 @@
           ]
         },
         "invalid-key": {
-          "title": "detects invalid subscript accesses",
-          "description": "## What it does\nChecks for subscript accesses with invalid keys.\n\n## Why is this bad?\nUsing an invalid key will raise a `KeyError` at runtime.\n\n## Examples\n```python\nfrom typing import TypedDict\n\nclass Person(TypedDict):\n    name: str\n    age: int\n\nalice = Person(name=\"Alice\", age=30)\nalice[\"height\"]  # KeyError: 'height'\n```",
+          "title": "detects invalid subscript accesses or TypedDict literal keys",
+          "description": "## What it does\nChecks for subscript accesses with invalid keys and `TypedDict` construction with an\nunknown key.\n\n## Why is this bad?\nSubscripting with an invalid key will raise a `KeyError` at runtime.\n\nCreating a `TypedDict` with an unknown key is likely a mistake; if the `TypedDict` is\n`closed=true` it also violates the expectations of the type.\n\n## Examples\n```python\nfrom typing import TypedDict\n\nclass Person(TypedDict):\n    name: str\n    age: int\n\nalice = Person(name=\"Alice\", age=30)\nalice[\"height\"]  # KeyError: 'height'\n\nbob: Person = { \"name\": \"Bob\", \"age\": 30 }  # typo!\n\ncarol = Person(name=\"Carol\", age=25)  # typo!\n```",
           "default": "error",
           "oneOf": [
             {
@@ -623,6 +623,16 @@
             }
           ]
         },
+        "invalid-newtype": {
+          "title": "detects invalid NewType definitions",
+          "description": "## What it does\nChecks for the creation of invalid `NewType`s\n\n## Why is this bad?\nThere are several requirements that you must follow when creating a `NewType`.\n\n## Examples\n```python\nfrom typing import NewType\n\ndef get_name() -> str: ...\n\nFoo = NewType(\"Foo\", int)        # okay\nBar = NewType(get_name(), int)   # error: The first argument to `NewType` must be a string literal\nBaz = NewType(\"Baz\", int | str)  # error: invalid base for `typing.NewType`\n```",
+          "default": "error",
+          "oneOf": [
+            {
+              "$ref": "#/definitions/Level"
+            }
+          ]
+        },
         "invalid-overload": {
           "title": "detects invalid `@overload` usages",
           "description": "## What it does\nChecks for various invalid `@overload` usages.\n\n## Why is this bad?\nThe `@overload` decorator is used to define functions and methods that accepts different\ncombinations of arguments and return different types based on the arguments passed. This is\nmainly beneficial for type checkers. But, if the `@overload` usage is invalid, the type\nchecker may not be able to provide correct type information.\n\n## Example\n\nDefining only one overload:\n\n```py\nfrom typing import overload\n\n@overload\ndef foo(x: int) -> int: ...\ndef foo(x: int | None) -> int | None:\n    return x\n```\n\nOr, not providing an implementation for the overloaded definition:\n\n```py\nfrom typing import overload\n\n@overload\ndef foo() -> None: ...\n@overload\ndef foo(x: int) -> int: ...\n```\n\n## References\n- [Python documentation: `@overload`](https://docs.python.org/3/library/typing.html#typing.overload)",
@@ -643,6 +653,16 @@
             }
           ]
         },
+        "invalid-paramspec": {
+          "title": "detects invalid ParamSpec usage",
+          "description": "## What it does\nChecks for the creation of invalid `ParamSpec`s\n\n## Why is this bad?\nThere are several requirements that you must follow when creating a `ParamSpec`.\n\n## Examples\n```python\nfrom typing import ParamSpec\n\nP1 = ParamSpec(\"P1\")  # okay\nP2 = ParamSpec(\"S2\")  # error: ParamSpec name must match the variable it's assigned to\n```\n\n## References\n- [Typing spec: ParamSpec](https://typing.python.org/en/latest/spec/generics.html#paramspec)",
+          "default": "error",
+          "oneOf": [
+            {
+              "$ref": "#/definitions/Level"
+            }
+          ]
+        },
         "invalid-protocol": {
           "title": "detects invalid protocol class definitions",
           "description": "## What it does\nChecks for protocol classes that will raise `TypeError` at runtime.\n\n## Why is this bad?\nAn invalidly defined protocol class may lead to the type checker inferring\nunexpected things. It may also lead to `TypeError`s at runtime.\n\n## Examples\nA `Protocol` class cannot inherit from a non-`Protocol` class;\nthis raises a `TypeError` at runtime:\n\n```pycon\n>>> from typing import Protocol\n>>> class Foo(int, Protocol): ...\n...\nTraceback (most recent call last):\n  File \"\", line 1, in \n    class Foo(int, Protocol): ...\nTypeError: Protocols can only inherit from other protocols, got \n```",