mirror of https://github.com/astral-sh/ruff
Compare commits
No commits in common. "main" and "0.14.4" have entirely different histories.
|
|
@ -2,11 +2,12 @@
|
||||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
dependencyDashboard: true,
|
dependencyDashboard: true,
|
||||||
suppressNotifications: ["prEditedNotification"],
|
suppressNotifications: ["prEditedNotification"],
|
||||||
extends: ["github>astral-sh/renovate-config"],
|
extends: ["config:recommended"],
|
||||||
labels: ["internal"],
|
labels: ["internal"],
|
||||||
schedule: ["before 4am on Monday"],
|
schedule: ["before 4am on Monday"],
|
||||||
semanticCommits: "disabled",
|
semanticCommits: "disabled",
|
||||||
separateMajorMinor: false,
|
separateMajorMinor: false,
|
||||||
|
prHourlyLimit: 10,
|
||||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
enabledManagers: ["github-actions", "pre-commit", "cargo", "pep621", "pip_requirements", "npm"],
|
||||||
cargo: {
|
cargo: {
|
||||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||||
|
|
@ -15,7 +16,7 @@
|
||||||
pep621: {
|
pep621: {
|
||||||
// The default for this package manager is to only search for `pyproject.toml` files
|
// The default for this package manager is to only search for `pyproject.toml` files
|
||||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
// found at the repository root: https://docs.renovatebot.com/modules/manager/pep621/#file-matching
|
||||||
managerFilePatterns: ["^(python|scripts)/.*pyproject\\.toml$"],
|
fileMatch: ["^(python|scripts)/.*pyproject\\.toml$"],
|
||||||
},
|
},
|
||||||
pip_requirements: {
|
pip_requirements: {
|
||||||
// The default for this package manager is to run on all requirements.txt files:
|
// The default for this package manager is to run on all requirements.txt files:
|
||||||
|
|
@ -33,7 +34,7 @@
|
||||||
npm: {
|
npm: {
|
||||||
// The default for this package manager is to only search for `package.json` files
|
// The default for this package manager is to only search for `package.json` files
|
||||||
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
// found at the repository root: https://docs.renovatebot.com/modules/manager/npm/#file-matching
|
||||||
managerFilePatterns: ["^playground/.*package\\.json$"],
|
fileMatch: ["^playground/.*package\\.json$"],
|
||||||
},
|
},
|
||||||
"pre-commit": {
|
"pre-commit": {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
|
@ -75,6 +76,14 @@
|
||||||
matchManagers: ["cargo"],
|
matchManagers: ["cargo"],
|
||||||
enabled: false,
|
enabled: false,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
// `mkdocs-material` requires a manual update to keep the version in sync
|
||||||
|
// with `mkdocs-material-insider`.
|
||||||
|
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
|
||||||
|
matchManagers: ["pip_requirements"],
|
||||||
|
matchPackageNames: ["mkdocs-material"],
|
||||||
|
enabled: false,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
groupName: "pre-commit dependencies",
|
groupName: "pre-commit dependencies",
|
||||||
matchManagers: ["pre-commit"],
|
matchManagers: ["pre-commit"],
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
|
|
@ -72,7 +72,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
|
|
@ -114,7 +114,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: arm64
|
architecture: arm64
|
||||||
|
|
@ -170,7 +170,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: ${{ matrix.platform.arch }}
|
architecture: ${{ matrix.platform.arch }}
|
||||||
|
|
@ -223,7 +223,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
|
|
@ -300,7 +300,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
|
|
@ -365,7 +365,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
|
|
@ -431,7 +431,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
|
|
|
||||||
|
|
@ -24,8 +24,6 @@ env:
|
||||||
PACKAGE_NAME: ruff
|
PACKAGE_NAME: ruff
|
||||||
PYTHON_VERSION: "3.14"
|
PYTHON_VERSION: "3.14"
|
||||||
NEXTEST_PROFILE: ci
|
NEXTEST_PROFILE: ci
|
||||||
# Enable mdtests that require external dependencies
|
|
||||||
MDTEST_EXTERNAL: "1"
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
determine_changes:
|
determine_changes:
|
||||||
|
|
@ -232,9 +230,7 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: |
|
run: |
|
||||||
rustup component add clippy
|
rustup component add clippy
|
||||||
|
|
@ -254,24 +250,21 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
shared-key: ruff-linux-debug
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
- name: "Install cargo insta"
|
- name: "Install cargo insta"
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-insta
|
tool: cargo-insta
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
with:
|
with:
|
||||||
enable-cache: "true"
|
enable-cache: "true"
|
||||||
- name: ty mdtests (GitHub annotations)
|
- name: ty mdtests (GitHub annotations)
|
||||||
|
|
@ -286,10 +279,6 @@ jobs:
|
||||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
||||||
- name: Dogfood ty on py-fuzzer
|
- name: Dogfood ty on py-fuzzer
|
||||||
run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer
|
run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer
|
||||||
- name: Dogfood ty on the scripts directory
|
|
||||||
run: uv run --project=./scripts cargo run -p ty check --project=./scripts
|
|
||||||
- name: Dogfood ty on ty_benchmark
|
|
||||||
run: uv run --project=./scripts/ty_benchmark cargo run -p ty check --project=./scripts/ty_benchmark
|
|
||||||
# Check for broken links in the documentation.
|
# Check for broken links in the documentation.
|
||||||
- run: cargo doc --all --no-deps
|
- run: cargo doc --all --no-deps
|
||||||
env:
|
env:
|
||||||
|
|
@ -298,10 +287,18 @@ jobs:
|
||||||
# sync, not just public items. Eventually we should do this for all
|
# sync, not just public items. Eventually we should do this for all
|
||||||
# crates; for now add crates here as they are warning-clean to prevent
|
# crates; for now add crates here as they are warning-clean to prevent
|
||||||
# regression.
|
# regression.
|
||||||
- run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db -p ruff_python_formatter --document-private-items
|
- run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db --document-private-items
|
||||||
env:
|
env:
|
||||||
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
# Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025).
|
||||||
RUSTDOCFLAGS: "-D warnings"
|
RUSTDOCFLAGS: "-D warnings"
|
||||||
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: ruff
|
||||||
|
path: target/debug/ruff
|
||||||
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: ty
|
||||||
|
path: target/debug/ty
|
||||||
|
|
||||||
cargo-test-linux-release:
|
cargo-test-linux-release:
|
||||||
name: "cargo test (linux, release)"
|
name: "cargo test (linux, release)"
|
||||||
|
|
@ -317,25 +314,25 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
|
- name: "Install cargo insta"
|
||||||
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
|
with:
|
||||||
|
tool: cargo-insta
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
with:
|
with:
|
||||||
enable-cache: "true"
|
enable-cache: "true"
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
run: cargo nextest run --cargo-profile profiling --all-features
|
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||||
- name: "Run doctests"
|
|
||||||
run: cargo test --doc --profile profiling --all-features
|
|
||||||
|
|
||||||
cargo-test-other:
|
cargo-test-other:
|
||||||
strategy:
|
strategy:
|
||||||
|
|
@ -352,17 +349,15 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
with:
|
with:
|
||||||
enable-cache: "true"
|
enable-cache: "true"
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
|
|
@ -380,9 +375,7 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup target add wasm32-unknown-unknown
|
run: rustup target add wasm32-unknown-unknown
|
||||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||||
|
|
@ -417,9 +410,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
file: "Cargo.toml"
|
file: "Cargo.toml"
|
||||||
field: "workspace.package.rust-version"
|
field: "workspace.package.rust-version"
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
env:
|
env:
|
||||||
MSRV: ${{ steps.msrv.outputs.value }}
|
MSRV: ${{ steps.msrv.outputs.value }}
|
||||||
|
|
@ -441,16 +432,15 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
with:
|
||||||
workspaces: "fuzz -> target"
|
workspaces: "fuzz -> target"
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
- name: "Install cargo-binstall"
|
- name: "Install cargo-binstall"
|
||||||
uses: cargo-bins/cargo-binstall@3fc81674af4165a753833a94cae9f91d8849049f # v1.16.2
|
uses: cargo-bins/cargo-binstall@b3f755e95653da9a2d25b99154edfdbd5b356d0a # v1.15.10
|
||||||
- name: "Install cargo-fuzz"
|
- name: "Install cargo-fuzz"
|
||||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||||
|
|
@ -459,7 +449,9 @@ jobs:
|
||||||
fuzz-parser:
|
fuzz-parser:
|
||||||
name: "fuzz parser"
|
name: "fuzz parser"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: determine_changes
|
needs:
|
||||||
|
- cargo-test-linux
|
||||||
|
- determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
env:
|
env:
|
||||||
|
|
@ -468,24 +460,27 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
|
name: Download Ruff binary to test
|
||||||
|
id: download-cached-binary
|
||||||
with:
|
with:
|
||||||
shared-key: ruff-linux-debug
|
name: ruff
|
||||||
save-if: false
|
path: ruff-to-test
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
- name: Build Ruff binary
|
|
||||||
run: cargo build --bin ruff
|
|
||||||
- name: Fuzz
|
- name: Fuzz
|
||||||
|
env:
|
||||||
|
DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }}
|
||||||
run: |
|
run: |
|
||||||
|
# Make executable, since artifact download doesn't preserve this
|
||||||
|
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||||
|
|
||||||
(
|
(
|
||||||
uv run \
|
uv run \
|
||||||
--python="${PYTHON_VERSION}" \
|
--python="${PYTHON_VERSION}" \
|
||||||
--project=./python/py-fuzzer \
|
--project=./python/py-fuzzer \
|
||||||
--locked \
|
--locked \
|
||||||
fuzz \
|
fuzz \
|
||||||
--test-executable=target/debug/ruff \
|
--test-executable="${DOWNLOAD_PATH}/ruff" \
|
||||||
--bin=ruff \
|
--bin=ruff \
|
||||||
0-500
|
0-500
|
||||||
)
|
)
|
||||||
|
|
@ -500,10 +495,8 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup component add rustfmt
|
run: rustup component add rustfmt
|
||||||
# Run all code generation scripts, and verify that the current output is
|
# Run all code generation scripts, and verify that the current output is
|
||||||
|
|
@ -527,7 +520,9 @@ jobs:
|
||||||
ecosystem:
|
ecosystem:
|
||||||
name: "ecosystem"
|
name: "ecosystem"
|
||||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }}
|
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }}
|
||||||
needs: determine_changes
|
needs:
|
||||||
|
- cargo-test-linux
|
||||||
|
- determine_changes
|
||||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||||
# Ecosystem check needs linter and/or formatter changes.
|
# Ecosystem check needs linter and/or formatter changes.
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }}
|
||||||
|
|
@ -535,37 +530,26 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
ref: ${{ github.event.pull_request.base.ref }}
|
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
activate-environment: true
|
activate-environment: true
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
run: rustup show
|
name: Download comparison Ruff binary
|
||||||
|
id: ruff-target
|
||||||
- name: "Install mold"
|
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
with:
|
with:
|
||||||
shared-key: ruff-linux-debug
|
name: ruff
|
||||||
save-if: false
|
path: target/debug
|
||||||
|
|
||||||
- name: Build baseline version
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
run: |
|
name: Download baseline Ruff binary
|
||||||
cargo build --bin ruff
|
|
||||||
mv target/debug/ruff target/debug/ruff-baseline
|
|
||||||
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
name: ruff
|
||||||
clean: false
|
branch: ${{ github.event.pull_request.base.ref }}
|
||||||
|
workflow: "ci.yaml"
|
||||||
- name: Build comparison version
|
check_artifacts: true
|
||||||
run: cargo build --bin ruff
|
|
||||||
|
|
||||||
- name: Install ruff-ecosystem
|
- name: Install ruff-ecosystem
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -573,11 +557,16 @@ jobs:
|
||||||
|
|
||||||
- name: Run `ruff check` stable ecosystem check
|
- name: Run `ruff check` stable ecosystem check
|
||||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||||
|
env:
|
||||||
|
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||||
run: |
|
run: |
|
||||||
|
# Make executable, since artifact download doesn't preserve this
|
||||||
|
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||||
|
|
||||||
# Set pipefail to avoid hiding errors with tee
|
# Set pipefail to avoid hiding errors with tee
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable
|
||||||
|
|
||||||
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
|
cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY"
|
||||||
echo "### Linter (stable)" > ecosystem-result
|
echo "### Linter (stable)" > ecosystem-result
|
||||||
|
|
@ -586,11 +575,16 @@ jobs:
|
||||||
|
|
||||||
- name: Run `ruff check` preview ecosystem check
|
- name: Run `ruff check` preview ecosystem check
|
||||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||||
|
env:
|
||||||
|
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||||
run: |
|
run: |
|
||||||
|
# Make executable, since artifact download doesn't preserve this
|
||||||
|
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||||
|
|
||||||
# Set pipefail to avoid hiding errors with tee
|
# Set pipefail to avoid hiding errors with tee
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview
|
||||||
|
|
||||||
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
|
cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY"
|
||||||
echo "### Linter (preview)" >> ecosystem-result
|
echo "### Linter (preview)" >> ecosystem-result
|
||||||
|
|
@ -599,11 +593,16 @@ jobs:
|
||||||
|
|
||||||
- name: Run `ruff format` stable ecosystem check
|
- name: Run `ruff format` stable ecosystem check
|
||||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||||
|
env:
|
||||||
|
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||||
run: |
|
run: |
|
||||||
|
# Make executable, since artifact download doesn't preserve this
|
||||||
|
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||||
|
|
||||||
# Set pipefail to avoid hiding errors with tee
|
# Set pipefail to avoid hiding errors with tee
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable
|
||||||
|
|
||||||
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
|
cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY"
|
||||||
echo "### Formatter (stable)" >> ecosystem-result
|
echo "### Formatter (stable)" >> ecosystem-result
|
||||||
|
|
@ -612,19 +611,32 @@ jobs:
|
||||||
|
|
||||||
- name: Run `ruff format` preview ecosystem check
|
- name: Run `ruff format` preview ecosystem check
|
||||||
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
if: ${{ needs.determine_changes.outputs.formatter == 'true' }}
|
||||||
|
env:
|
||||||
|
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||||
run: |
|
run: |
|
||||||
|
# Make executable, since artifact download doesn't preserve this
|
||||||
|
chmod +x ./ruff "${DOWNLOAD_PATH}/ruff"
|
||||||
|
|
||||||
# Set pipefail to avoid hiding errors with tee
|
# Set pipefail to avoid hiding errors with tee
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview
|
||||||
|
|
||||||
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
|
cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY"
|
||||||
echo "### Formatter (preview)" >> ecosystem-result
|
echo "### Formatter (preview)" >> ecosystem-result
|
||||||
cat ecosystem-result-format-preview >> ecosystem-result
|
cat ecosystem-result-format-preview >> ecosystem-result
|
||||||
echo "" >> ecosystem-result
|
echo "" >> ecosystem-result
|
||||||
|
|
||||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
- name: Export pull request number
|
||||||
# Make sure to update the bot if you rename the artifact.
|
run: |
|
||||||
|
echo ${{ github.event.number }} > pr-number
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
name: Upload PR Number
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
path: pr-number
|
||||||
|
|
||||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
name: Upload Results
|
name: Upload Results
|
||||||
with:
|
with:
|
||||||
|
|
@ -644,10 +656,8 @@ jobs:
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
|
|
@ -690,7 +700,7 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: cargo-bins/cargo-binstall@3fc81674af4165a753833a94cae9f91d8849049f # v1.16.2
|
- uses: cargo-bins/cargo-binstall@b3f755e95653da9a2d25b99154edfdbd5b356d0a # v1.15.10
|
||||||
- run: cargo binstall --no-confirm cargo-shear
|
- run: cargo binstall --no-confirm cargo-shear
|
||||||
- run: cargo shear
|
- run: cargo shear
|
||||||
|
|
||||||
|
|
@ -703,16 +713,14 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
- name: "Run ty completion evaluation"
|
- name: "Run ty completion evaluation"
|
||||||
run: cargo run --profile profiling --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
|
run: cargo run --release --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
|
||||||
- name: "Ensure there are no changes"
|
- name: "Ensure there are no changes"
|
||||||
run: diff ./crates/ty_completion_eval/completion-evaluation-tasks.csv /tmp/completion-evaluation-tasks.csv
|
run: diff ./crates/ty_completion_eval/completion-evaluation-tasks.csv /tmp/completion-evaluation-tasks.csv
|
||||||
|
|
||||||
|
|
@ -725,13 +733,11 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
architecture: x64
|
architecture: x64
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
|
|
@ -754,10 +760,8 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||||
with:
|
with:
|
||||||
node-version: 22
|
node-version: 22
|
||||||
|
|
@ -781,21 +785,30 @@ jobs:
|
||||||
name: "mkdocs"
|
name: "mkdocs"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
|
env:
|
||||||
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
|
- name: "Add SSH key"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||||
with:
|
with:
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
with:
|
with:
|
||||||
python-version: 3.13
|
python-version: 3.13
|
||||||
activate-environment: true
|
activate-environment: true
|
||||||
|
- name: "Install Insiders dependencies"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
run: uv pip install -r docs/requirements-insiders.txt
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
run: uv pip install -r docs/requirements.txt
|
run: uv pip install -r docs/requirements.txt
|
||||||
- name: "Update README File"
|
- name: "Update README File"
|
||||||
run: python scripts/transform_readme.py --target mkdocs
|
run: python scripts/transform_readme.py --target mkdocs
|
||||||
|
|
@ -803,8 +816,12 @@ jobs:
|
||||||
run: python scripts/generate_mkdocs.py
|
run: python scripts/generate_mkdocs.py
|
||||||
- name: "Check docs formatting"
|
- name: "Check docs formatting"
|
||||||
run: python scripts/check_docs_formatted.py
|
run: python scripts/check_docs_formatted.py
|
||||||
|
- name: "Build Insiders docs"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||||
- name: "Build docs"
|
- name: "Build docs"
|
||||||
run: mkdocs build --strict -f mkdocs.yml
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
|
run: mkdocs build --strict -f mkdocs.public.yml
|
||||||
|
|
||||||
check-formatter-instability-and-black-similarity:
|
check-formatter-instability-and-black-similarity:
|
||||||
name: "formatter instabilities and black similarity"
|
name: "formatter instabilities and black similarity"
|
||||||
|
|
@ -816,9 +833,7 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Run checks"
|
- name: "Run checks"
|
||||||
|
|
@ -832,7 +847,9 @@ jobs:
|
||||||
name: "test ruff-lsp"
|
name: "test ruff-lsp"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 5
|
timeout-minutes: 5
|
||||||
needs: determine_changes
|
needs:
|
||||||
|
- cargo-test-linux
|
||||||
|
- determine_changes
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||||
steps:
|
steps:
|
||||||
- uses: extractions/setup-just@e33e0265a09d6d736e2ee1e0eb685ef1de4669ff # v3.0.0
|
- uses: extractions/setup-just@e33e0265a09d6d736e2ee1e0eb685ef1de4669ff # v3.0.0
|
||||||
|
|
@ -840,46 +857,37 @@ jobs:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
name: "Checkout ruff source"
|
name: "Download ruff-lsp source"
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
with:
|
|
||||||
shared-key: ruff-linux-debug
|
|
||||||
save-if: false
|
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
|
||||||
run: rustup show
|
|
||||||
|
|
||||||
- name: Build Ruff binary
|
|
||||||
run: cargo build -p ruff --bin ruff
|
|
||||||
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
name: "Checkout ruff-lsp source"
|
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
repository: "astral-sh/ruff-lsp"
|
repository: "astral-sh/ruff-lsp"
|
||||||
path: ruff-lsp
|
|
||||||
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
# installation fails on 3.13 and newer
|
# installation fails on 3.13 and newer
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
|
name: Download development ruff binary
|
||||||
|
id: ruff-target
|
||||||
|
with:
|
||||||
|
name: ruff
|
||||||
|
path: target/debug
|
||||||
|
|
||||||
- name: Install ruff-lsp dependencies
|
- name: Install ruff-lsp dependencies
|
||||||
run: |
|
run: |
|
||||||
cd ruff-lsp
|
|
||||||
just install
|
just install
|
||||||
|
|
||||||
- name: Run ruff-lsp tests
|
- name: Run ruff-lsp tests
|
||||||
|
env:
|
||||||
|
DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }}
|
||||||
run: |
|
run: |
|
||||||
# Setup development binary
|
# Setup development binary
|
||||||
pip uninstall --yes ruff
|
pip uninstall --yes ruff
|
||||||
export PATH="${PWD}/target/debug:${PATH}"
|
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||||
|
export PATH="${DOWNLOAD_PATH}:${PATH}"
|
||||||
ruff version
|
ruff version
|
||||||
|
|
||||||
cd ruff-lsp
|
|
||||||
just test
|
just test
|
||||||
|
|
||||||
check-playground:
|
check-playground:
|
||||||
|
|
@ -895,9 +903,7 @@ jobs:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup target add wasm32-unknown-unknown
|
run: rustup target add wasm32-unknown-unknown
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
|
||||||
with:
|
with:
|
||||||
node-version: 22
|
node-version: 22
|
||||||
|
|
@ -905,7 +911,7 @@ jobs:
|
||||||
cache-dependency-path: playground/package-lock.json
|
cache-dependency-path: playground/package-lock.json
|
||||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||||
- name: "Install Node dependencies"
|
- name: "Install Node dependencies"
|
||||||
run: npm ci --ignore-scripts
|
run: npm ci
|
||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Build playgrounds"
|
- name: "Build playgrounds"
|
||||||
run: npm run dev:wasm
|
run: npm run dev:wasm
|
||||||
|
|
@ -929,25 +935,20 @@ jobs:
|
||||||
needs.determine_changes.outputs.linter == 'true'
|
needs.determine_changes.outputs.linter == 'true'
|
||||||
)
|
)
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
permissions:
|
|
||||||
contents: read # required for actions/checkout
|
|
||||||
id-token: write # required for OIDC authentication with CodSpeed
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Branch"
|
- name: "Checkout Branch"
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- name: "Install codspeed"
|
- name: "Install codspeed"
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-codspeed
|
tool: cargo-codspeed
|
||||||
|
|
||||||
|
|
@ -955,10 +956,11 @@ jobs:
|
||||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
|
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||||
with:
|
with:
|
||||||
mode: simulation
|
mode: instrumentation
|
||||||
run: cargo codspeed run
|
run: cargo codspeed run
|
||||||
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|
||||||
benchmarks-instrumented-ty:
|
benchmarks-instrumented-ty:
|
||||||
name: "benchmarks instrumented (ty)"
|
name: "benchmarks instrumented (ty)"
|
||||||
|
|
@ -971,25 +973,20 @@ jobs:
|
||||||
needs.determine_changes.outputs.ty == 'true'
|
needs.determine_changes.outputs.ty == 'true'
|
||||||
)
|
)
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
permissions:
|
|
||||||
contents: read # required for actions/checkout
|
|
||||||
id-token: write # required for OIDC authentication with CodSpeed
|
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Branch"
|
- name: "Checkout Branch"
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- name: "Install codspeed"
|
- name: "Install codspeed"
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-codspeed
|
tool: cargo-codspeed
|
||||||
|
|
||||||
|
|
@ -997,10 +994,11 @@ jobs:
|
||||||
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
|
run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
|
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||||
with:
|
with:
|
||||||
mode: simulation
|
mode: instrumentation
|
||||||
run: cargo codspeed run
|
run: cargo codspeed run
|
||||||
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|
||||||
benchmarks-walltime:
|
benchmarks-walltime:
|
||||||
name: "benchmarks walltime (${{ matrix.benchmarks }})"
|
name: "benchmarks walltime (${{ matrix.benchmarks }})"
|
||||||
|
|
@ -1008,9 +1006,6 @@ jobs:
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.ty == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ github.repository == 'astral-sh/ruff' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.ty == 'true' || github.ref == 'refs/heads/main') }}
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
permissions:
|
|
||||||
contents: read # required for actions/checkout
|
|
||||||
id-token: write # required for OIDC authentication with CodSpeed
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
benchmarks:
|
benchmarks:
|
||||||
|
|
@ -1022,16 +1017,14 @@ jobs:
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- name: "Install codspeed"
|
- name: "Install codspeed"
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-codspeed
|
tool: cargo-codspeed
|
||||||
|
|
||||||
|
|
@ -1039,7 +1032,7 @@ jobs:
|
||||||
run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
|
run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@346a2d8a8d9d38909abd0bc3d23f773110f076ad # v4.4.1
|
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||||
env:
|
env:
|
||||||
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
|
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
|
||||||
# appear to provide much useful insight for our walltime benchmarks right now
|
# appear to provide much useful insight for our walltime benchmarks right now
|
||||||
|
|
@ -1048,3 +1041,4 @@ jobs:
|
||||||
with:
|
with:
|
||||||
mode: walltime
|
mode: walltime
|
||||||
run: cargo codspeed run --bench ty_walltime "${{ matrix.benchmarks }}"
|
run: cargo codspeed run --bench ty_walltime "${{ matrix.benchmarks }}"
|
||||||
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|
|
||||||
|
|
@ -34,12 +34,12 @@ jobs:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
- name: Build ruff
|
- name: Build ruff
|
||||||
# A debug build means the script runs slower once it gets started,
|
# A debug build means the script runs slower once it gets started,
|
||||||
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
# but this is outweighed by the fact that a release build takes *much* longer to compile in CI
|
||||||
|
|
|
||||||
|
|
@ -43,11 +43,10 @@ jobs:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install the latest version of uv
|
- name: Install the latest version of uv
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
with:
|
||||||
shared-key: "mypy-primer"
|
|
||||||
workspaces: "ruff"
|
workspaces: "ruff"
|
||||||
|
|
||||||
- name: Install Rust toolchain
|
- name: Install Rust toolchain
|
||||||
|
|
@ -56,20 +55,24 @@ jobs:
|
||||||
- name: Run mypy_primer
|
- name: Run mypy_primer
|
||||||
env:
|
env:
|
||||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||||
CLICOLOR_FORCE: "1"
|
|
||||||
DIFF_FILE: mypy_primer.diff
|
DIFF_FILE: mypy_primer.diff
|
||||||
run: |
|
run: |
|
||||||
cd ruff
|
cd ruff
|
||||||
scripts/mypy_primer.sh
|
scripts/mypy_primer.sh
|
||||||
|
echo ${{ github.event.number }} > ../pr-number
|
||||||
|
|
||||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
|
||||||
# Make sure to update the bot if you rename the artifact.
|
|
||||||
- name: Upload diff
|
- name: Upload diff
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: mypy_primer_diff
|
name: mypy_primer_diff
|
||||||
path: mypy_primer.diff
|
path: mypy_primer.diff
|
||||||
|
|
||||||
|
- name: Upload pr-number
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
path: pr-number
|
||||||
|
|
||||||
memory_usage:
|
memory_usage:
|
||||||
name: Run memory statistics
|
name: Run memory statistics
|
||||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||||
|
|
@ -82,12 +85,11 @@ jobs:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install the latest version of uv
|
- name: Install the latest version of uv
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
with:
|
||||||
workspaces: "ruff"
|
workspaces: "ruff"
|
||||||
shared-key: "mypy-primer"
|
|
||||||
|
|
||||||
- name: Install Rust toolchain
|
- name: Install Rust toolchain
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
@ -107,54 +109,3 @@ jobs:
|
||||||
with:
|
with:
|
||||||
name: mypy_primer_memory_diff
|
name: mypy_primer_memory_diff
|
||||||
path: mypy_primer_memory.diff
|
path: mypy_primer_memory.diff
|
||||||
|
|
||||||
# Runs mypy twice against the same ty version to catch any non-deterministic behavior (ideally).
|
|
||||||
# The job is disabled for now because there are some non-deterministic diagnostics.
|
|
||||||
mypy_primer_same_revision:
|
|
||||||
name: Run mypy_primer on same revision
|
|
||||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
|
||||||
timeout-minutes: 20
|
|
||||||
# TODO: Enable once we fixed the non-deterministic diagnostics
|
|
||||||
if: false
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
with:
|
|
||||||
path: ruff
|
|
||||||
fetch-depth: 0
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Install the latest version of uv
|
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
with:
|
|
||||||
workspaces: "ruff"
|
|
||||||
shared-key: "mypy-primer"
|
|
||||||
|
|
||||||
- name: Install Rust toolchain
|
|
||||||
run: rustup show
|
|
||||||
|
|
||||||
- name: Run determinism check
|
|
||||||
env:
|
|
||||||
BASE_REVISION: ${{ github.event.pull_request.head.sha }}
|
|
||||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
|
||||||
CLICOLOR_FORCE: "1"
|
|
||||||
DIFF_FILE: mypy_primer_determinism.diff
|
|
||||||
run: |
|
|
||||||
cd ruff
|
|
||||||
scripts/mypy_primer.sh
|
|
||||||
|
|
||||||
- name: Check for non-determinism
|
|
||||||
run: |
|
|
||||||
# Remove ANSI color codes for checking
|
|
||||||
sed -e 's/\x1b\[[0-9;]*m//g' mypy_primer_determinism.diff > mypy_primer_determinism_clean.diff
|
|
||||||
|
|
||||||
# Check if there are any differences (non-determinism)
|
|
||||||
if [ -s mypy_primer_determinism_clean.diff ]; then
|
|
||||||
echo "ERROR: Non-deterministic output detected!"
|
|
||||||
echo "The following differences were found when running ty twice on the same commit:"
|
|
||||||
cat mypy_primer_determinism_clean.diff
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "✓ Output is deterministic"
|
|
||||||
fi
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,122 @@
|
||||||
|
name: PR comment (mypy_primer)
|
||||||
|
|
||||||
|
on: # zizmor: ignore[dangerous-triggers]
|
||||||
|
workflow_run:
|
||||||
|
workflows: [Run mypy_primer]
|
||||||
|
types: [completed]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
workflow_run_id:
|
||||||
|
description: The mypy_primer workflow that triggers the workflow run
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
comment:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: Download PR number
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Parse pull request number
|
||||||
|
id: pr-number
|
||||||
|
run: |
|
||||||
|
if [[ -f pr-number ]]
|
||||||
|
then
|
||||||
|
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: "Download mypy_primer results"
|
||||||
|
id: download-mypy_primer_diff
|
||||||
|
if: steps.pr-number.outputs.pr-number
|
||||||
|
with:
|
||||||
|
name: mypy_primer_diff
|
||||||
|
workflow: mypy_primer.yaml
|
||||||
|
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
path: pr/mypy_primer_diff
|
||||||
|
workflow_conclusion: completed
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: "Download mypy_primer memory results"
|
||||||
|
id: download-mypy_primer_memory_diff
|
||||||
|
if: steps.pr-number.outputs.pr-number
|
||||||
|
with:
|
||||||
|
name: mypy_primer_memory_diff
|
||||||
|
workflow: mypy_primer.yaml
|
||||||
|
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
path: pr/mypy_primer_memory_diff
|
||||||
|
workflow_conclusion: completed
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Generate comment content
|
||||||
|
id: generate-comment
|
||||||
|
if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
|
||||||
|
run: |
|
||||||
|
# Guard against malicious mypy_primer results that symlink to a secret
|
||||||
|
# file on this runner
|
||||||
|
if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
|
||||||
|
then
|
||||||
|
echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Note this identifier is used to find the comment to update on
|
||||||
|
# subsequent runs
|
||||||
|
echo '<!-- generated-comment mypy_primer -->' >> comment.txt
|
||||||
|
|
||||||
|
echo '## `mypy_primer` results' >> comment.txt
|
||||||
|
if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
|
||||||
|
echo '<details>' >> comment.txt
|
||||||
|
echo '<summary>Changes were detected when running on open source projects</summary>' >> comment.txt
|
||||||
|
echo '' >> comment.txt
|
||||||
|
echo '```diff' >> comment.txt
|
||||||
|
cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt
|
||||||
|
echo '```' >> comment.txt
|
||||||
|
echo '</details>' >> comment.txt
|
||||||
|
else
|
||||||
|
echo 'No ecosystem changes detected ✅' >> comment.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then
|
||||||
|
echo '<details>' >> comment.txt
|
||||||
|
echo '<summary>Memory usage changes were detected when running on open source projects</summary>' >> comment.txt
|
||||||
|
echo '' >> comment.txt
|
||||||
|
echo '```diff' >> comment.txt
|
||||||
|
cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt
|
||||||
|
echo '```' >> comment.txt
|
||||||
|
echo '</details>' >> comment.txt
|
||||||
|
else
|
||||||
|
echo 'No memory usage changes detected ✅' >> comment.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Find existing comment
|
||||||
|
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||||
|
if: steps.generate-comment.outcome == 'success'
|
||||||
|
id: find-comment
|
||||||
|
with:
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
comment-author: "github-actions[bot]"
|
||||||
|
body-includes: "<!-- generated-comment mypy_primer -->"
|
||||||
|
|
||||||
|
- name: Create or update comment
|
||||||
|
if: steps.find-comment.outcome == 'success'
|
||||||
|
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||||
|
with:
|
||||||
|
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
body-path: comment.txt
|
||||||
|
edit-mode: replace
|
||||||
|
|
@ -0,0 +1,88 @@
|
||||||
|
name: Ecosystem check comment
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows: [CI]
|
||||||
|
types: [completed]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
workflow_run_id:
|
||||||
|
description: The ecosystem workflow that triggers the workflow run
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
comment:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: Download pull request number
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Parse pull request number
|
||||||
|
id: pr-number
|
||||||
|
run: |
|
||||||
|
if [[ -f pr-number ]]
|
||||||
|
then
|
||||||
|
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: "Download ecosystem results"
|
||||||
|
id: download-ecosystem-result
|
||||||
|
if: steps.pr-number.outputs.pr-number
|
||||||
|
with:
|
||||||
|
name: ecosystem-result
|
||||||
|
workflow: ci.yaml
|
||||||
|
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
path: pr/ecosystem
|
||||||
|
workflow_conclusion: completed
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Generate comment content
|
||||||
|
id: generate-comment
|
||||||
|
if: steps.download-ecosystem-result.outputs.found_artifact == 'true'
|
||||||
|
run: |
|
||||||
|
# Guard against malicious ecosystem results that symlink to a secret
|
||||||
|
# file on this runner
|
||||||
|
if [[ -L pr/ecosystem/ecosystem-result ]]
|
||||||
|
then
|
||||||
|
echo "Error: ecosystem-result cannot be a symlink"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Note this identifier is used to find the comment to update on
|
||||||
|
# subsequent runs
|
||||||
|
echo '<!-- generated-comment ecosystem -->' >> comment.txt
|
||||||
|
|
||||||
|
echo '## `ruff-ecosystem` results' >> comment.txt
|
||||||
|
cat pr/ecosystem/ecosystem-result >> comment.txt
|
||||||
|
echo "" >> comment.txt
|
||||||
|
|
||||||
|
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Find existing comment
|
||||||
|
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||||
|
if: steps.generate-comment.outcome == 'success'
|
||||||
|
id: find-comment
|
||||||
|
with:
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
comment-author: "github-actions[bot]"
|
||||||
|
body-includes: "<!-- generated-comment ecosystem -->"
|
||||||
|
|
||||||
|
- name: Create or update comment
|
||||||
|
if: steps.find-comment.outcome == 'success'
|
||||||
|
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||||
|
with:
|
||||||
|
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
body-path: comment.txt
|
||||||
|
edit-mode: replace
|
||||||
|
|
@ -20,13 +20,15 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
mkdocs:
|
mkdocs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
with:
|
with:
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
persist-credentials: true
|
persist-credentials: true
|
||||||
|
|
||||||
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
|
|
||||||
|
|
@ -57,12 +59,23 @@ jobs:
|
||||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
|
||||||
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
echo "timestamp=$timestamp" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
|
- name: "Add SSH key"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||||
|
with:
|
||||||
|
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
|
|
||||||
|
- name: "Install Insiders dependencies"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
run: pip install -r docs/requirements-insiders.txt
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
run: pip install -r docs/requirements.txt
|
run: pip install -r docs/requirements.txt
|
||||||
|
|
||||||
- name: "Copy README File"
|
- name: "Copy README File"
|
||||||
|
|
@ -70,8 +83,13 @@ jobs:
|
||||||
python scripts/transform_readme.py --target mkdocs
|
python scripts/transform_readme.py --target mkdocs
|
||||||
python scripts/generate_mkdocs.py
|
python scripts/generate_mkdocs.py
|
||||||
|
|
||||||
|
- name: "Build Insiders docs"
|
||||||
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
|
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||||
|
|
||||||
- name: "Build docs"
|
- name: "Build docs"
|
||||||
run: mkdocs build --strict -f mkdocs.yml
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||||
|
run: mkdocs build --strict -f mkdocs.public.yml
|
||||||
|
|
||||||
- name: "Clone docs repo"
|
- name: "Clone docs repo"
|
||||||
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||||
|
|
|
||||||
|
|
@ -37,7 +37,7 @@ jobs:
|
||||||
package-manager-cache: false
|
package-manager-cache: false
|
||||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||||
- name: "Install Node dependencies"
|
- name: "Install Node dependencies"
|
||||||
run: npm ci --ignore-scripts
|
run: npm ci
|
||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Run TypeScript checks"
|
- name: "Run TypeScript checks"
|
||||||
run: npm run check
|
run: npm run check
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,7 @@ jobs:
|
||||||
id-token: write
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: wheels-*
|
pattern: wheels-*
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,7 @@ jobs:
|
||||||
package-manager-cache: false
|
package-manager-cache: false
|
||||||
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
- uses: jetli/wasm-bindgen-action@20b33e20595891ab1a0ed73145d8a21fc96e7c29 # v0.2.0
|
||||||
- name: "Install Node dependencies"
|
- name: "Install Node dependencies"
|
||||||
run: npm ci --ignore-scripts
|
run: npm ci
|
||||||
working-directory: playground
|
working-directory: playground
|
||||||
- name: "Run TypeScript checks"
|
- name: "Run TypeScript checks"
|
||||||
run: npm run check
|
run: npm run check
|
||||||
|
|
|
||||||
|
|
@ -60,7 +60,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
@ -68,7 +68,7 @@ jobs:
|
||||||
# we specify bash to get pipefail; it guards against the `curl` command
|
# we specify bash to get pipefail; it guards against the `curl` command
|
||||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||||
shell: bash
|
shell: bash
|
||||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh"
|
||||||
- name: Cache dist
|
- name: Cache dist
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
|
||||||
with:
|
with:
|
||||||
|
|
@ -123,7 +123,7 @@ jobs:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
@ -166,15 +166,15 @@ jobs:
|
||||||
- custom-build-binaries
|
- custom-build-binaries
|
||||||
- custom-build-docker
|
- custom-build-docker
|
||||||
- build-global-artifacts
|
- build-global-artifacts
|
||||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "depot-ubuntu-latest-4"
|
||||||
outputs:
|
outputs:
|
||||||
val: ${{ steps.host.outputs.manifest }}
|
val: ${{ steps.host.outputs.manifest }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
@ -250,7 +250,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
- uses: actions/checkout@ff7abcd0c3c05ccf6adc123a8cd1fd4fb30fb493
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
|
||||||
|
|
@ -77,7 +77,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name typeshedbot
|
git config --global user.name typeshedbot
|
||||||
git config --global user.email '<>'
|
git config --global user.email '<>'
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- name: Sync typeshed stubs
|
- name: Sync typeshed stubs
|
||||||
run: |
|
run: |
|
||||||
rm -rf "ruff/${VENDORED_TYPESHED}"
|
rm -rf "ruff/${VENDORED_TYPESHED}"
|
||||||
|
|
@ -131,7 +131,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
persist-credentials: true
|
persist-credentials: true
|
||||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- name: Setup git
|
- name: Setup git
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name typeshedbot
|
git config --global user.name typeshedbot
|
||||||
|
|
@ -170,7 +170,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
persist-credentials: true
|
persist-credentials: true
|
||||||
ref: ${{ env.UPSTREAM_BRANCH}}
|
ref: ${{ env.UPSTREAM_BRANCH}}
|
||||||
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
- uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
- name: Setup git
|
- name: Setup git
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name typeshedbot
|
git config --global user.name typeshedbot
|
||||||
|
|
@ -198,7 +198,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||||
git commit -am "Remove pyproject.toml file"
|
git commit -am "Remove pyproject.toml file"
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
@ -207,22 +207,17 @@ jobs:
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
- name: "Install cargo nextest"
|
- name: "Install cargo nextest"
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-nextest
|
tool: cargo-nextest
|
||||||
- name: "Install cargo insta"
|
- name: "Install cargo insta"
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
uses: taiki-e/install-action@3575e532701a5fc614b0c842e4119af4cc5fd16d # v2.62.60
|
uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45
|
||||||
with:
|
with:
|
||||||
tool: cargo-insta
|
tool: cargo-insta
|
||||||
- name: Update snapshots
|
- name: Update snapshots
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
run: |
|
run: |
|
||||||
cargo r \
|
|
||||||
--profile=profiling \
|
|
||||||
-p ty_completion_eval \
|
|
||||||
-- all --tasks ./crates/ty_completion_eval/completion-evaluation-tasks.csv
|
|
||||||
|
|
||||||
# The `cargo insta` docs indicate that `--unreferenced=delete` might be a good option,
|
# The `cargo insta` docs indicate that `--unreferenced=delete` might be a good option,
|
||||||
# but from local testing it appears to just revert all changes made by `cargo insta test --accept`.
|
# but from local testing it appears to just revert all changes made by `cargo insta test --accept`.
|
||||||
#
|
#
|
||||||
|
|
|
||||||
|
|
@ -33,11 +33,11 @@ jobs:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install the latest version of uv
|
- name: Install the latest version of uv
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
with:
|
with:
|
||||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
with:
|
||||||
workspaces: "ruff"
|
workspaces: "ruff"
|
||||||
lookup-only: false # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
lookup-only: false # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||||
|
|
@ -67,7 +67,7 @@ jobs:
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
|
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||||
|
|
||||||
ecosystem-analyzer \
|
ecosystem-analyzer \
|
||||||
--repository ruff \
|
--repository ruff \
|
||||||
|
|
@ -112,6 +112,8 @@ jobs:
|
||||||
|
|
||||||
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY"
|
||||||
|
|
||||||
|
echo ${{ github.event.number }} > pr-number
|
||||||
|
|
||||||
- name: "Deploy to Cloudflare Pages"
|
- name: "Deploy to Cloudflare Pages"
|
||||||
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }}
|
||||||
id: deploy
|
id: deploy
|
||||||
|
|
@ -129,14 +131,18 @@ jobs:
|
||||||
echo >> comment.md
|
echo >> comment.md
|
||||||
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md
|
echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md
|
||||||
|
|
||||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
|
||||||
# Make sure to update the bot if you rename the artifact.
|
|
||||||
- name: Upload comment
|
- name: Upload comment
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: comment.md
|
name: comment.md
|
||||||
path: comment.md
|
path: comment.md
|
||||||
|
|
||||||
|
- name: Upload pr-number
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
path: pr-number
|
||||||
|
|
||||||
- name: Upload diagnostics diff
|
- name: Upload diagnostics diff
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,85 @@
|
||||||
|
name: PR comment (ty ecosystem-analyzer)
|
||||||
|
|
||||||
|
on: # zizmor: ignore[dangerous-triggers]
|
||||||
|
workflow_run:
|
||||||
|
workflows: [ty ecosystem-analyzer]
|
||||||
|
types: [completed]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
workflow_run_id:
|
||||||
|
description: The ty ecosystem-analyzer workflow that triggers the workflow run
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
comment:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: Download PR number
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Parse pull request number
|
||||||
|
id: pr-number
|
||||||
|
run: |
|
||||||
|
if [[ -f pr-number ]]
|
||||||
|
then
|
||||||
|
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: "Download comment.md"
|
||||||
|
id: download-comment
|
||||||
|
if: steps.pr-number.outputs.pr-number
|
||||||
|
with:
|
||||||
|
name: comment.md
|
||||||
|
workflow: ty-ecosystem-analyzer.yaml
|
||||||
|
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
path: pr/comment
|
||||||
|
workflow_conclusion: completed
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Generate comment content
|
||||||
|
id: generate-comment
|
||||||
|
if: ${{ steps.download-comment.outputs.found_artifact == 'true' }}
|
||||||
|
run: |
|
||||||
|
# Guard against malicious ty ecosystem-analyzer results that symlink to a secret
|
||||||
|
# file on this runner
|
||||||
|
if [[ -L pr/comment/comment.md ]]
|
||||||
|
then
|
||||||
|
echo "Error: comment.md cannot be a symlink"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Note: this identifier is used to find the comment to update on subsequent runs
|
||||||
|
echo '<!-- generated-comment ty ecosystem-analyzer -->' > comment.md
|
||||||
|
echo >> comment.md
|
||||||
|
cat pr/comment/comment.md >> comment.md
|
||||||
|
|
||||||
|
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
cat comment.md >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Find existing comment
|
||||||
|
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||||
|
if: steps.generate-comment.outcome == 'success'
|
||||||
|
id: find-comment
|
||||||
|
with:
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
comment-author: "github-actions[bot]"
|
||||||
|
body-includes: "<!-- generated-comment ty ecosystem-analyzer -->"
|
||||||
|
|
||||||
|
- name: Create or update comment
|
||||||
|
if: steps.find-comment.outcome == 'success'
|
||||||
|
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||||
|
with:
|
||||||
|
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
body-path: comment.md
|
||||||
|
edit-mode: replace
|
||||||
|
|
@ -29,11 +29,11 @@ jobs:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- name: Install the latest version of uv
|
- name: Install the latest version of uv
|
||||||
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4
|
uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
|
||||||
with:
|
with:
|
||||||
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
with:
|
||||||
workspaces: "ruff"
|
workspaces: "ruff"
|
||||||
lookup-only: false # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
lookup-only: false # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact
|
||||||
|
|
@ -52,7 +52,7 @@ jobs:
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
|
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||||
|
|
||||||
ecosystem-analyzer \
|
ecosystem-analyzer \
|
||||||
--verbose \
|
--verbose \
|
||||||
|
|
|
||||||
|
|
@ -45,7 +45,7 @@ jobs:
|
||||||
path: typing
|
path: typing
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||||
with:
|
with:
|
||||||
workspaces: "ruff"
|
workspaces: "ruff"
|
||||||
|
|
||||||
|
|
@ -94,18 +94,21 @@ jobs:
|
||||||
touch typing_conformance_diagnostics.diff
|
touch typing_conformance_diagnostics.diff
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
echo ${{ github.event.number }} > pr-number
|
||||||
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
|
echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit
|
||||||
|
|
||||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
|
||||||
# Make sure to update the bot if you rename the artifact.
|
|
||||||
- name: Upload diff
|
- name: Upload diff
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: typing_conformance_diagnostics_diff
|
name: typing_conformance_diagnostics_diff
|
||||||
path: typing_conformance_diagnostics.diff
|
path: typing_conformance_diagnostics.diff
|
||||||
|
|
||||||
# NOTE: astral-sh-bot uses this artifact to post comments on PRs.
|
- name: Upload pr-number
|
||||||
# Make sure to update the bot if you rename the artifact.
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
path: pr-number
|
||||||
|
|
||||||
- name: Upload conformance suite commit
|
- name: Upload conformance suite commit
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,112 @@
|
||||||
|
name: PR comment (typing_conformance)
|
||||||
|
|
||||||
|
on: # zizmor: ignore[dangerous-triggers]
|
||||||
|
workflow_run:
|
||||||
|
workflows: [Run typing conformance]
|
||||||
|
types: [completed]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
workflow_run_id:
|
||||||
|
description: The typing_conformance workflow that triggers the workflow run
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
comment:
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: Download PR number
|
||||||
|
with:
|
||||||
|
name: pr-number
|
||||||
|
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Parse pull request number
|
||||||
|
id: pr-number
|
||||||
|
run: |
|
||||||
|
if [[ -f pr-number ]]
|
||||||
|
then
|
||||||
|
echo "pr-number=$(<pr-number)" >> "$GITHUB_OUTPUT"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: Download typing conformance suite commit
|
||||||
|
with:
|
||||||
|
name: conformance-suite-commit
|
||||||
|
run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }}
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
|
||||||
|
name: "Download typing_conformance results"
|
||||||
|
id: download-typing_conformance_diff
|
||||||
|
if: steps.pr-number.outputs.pr-number
|
||||||
|
with:
|
||||||
|
name: typing_conformance_diagnostics_diff
|
||||||
|
workflow: typing_conformance.yaml
|
||||||
|
pr: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
path: pr/typing_conformance_diagnostics_diff
|
||||||
|
workflow_conclusion: completed
|
||||||
|
if_no_artifact_found: ignore
|
||||||
|
allow_forks: true
|
||||||
|
|
||||||
|
- name: Generate comment content
|
||||||
|
id: generate-comment
|
||||||
|
if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }}
|
||||||
|
run: |
|
||||||
|
# Guard against malicious typing_conformance results that symlink to a secret
|
||||||
|
# file on this runner
|
||||||
|
if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]]
|
||||||
|
then
|
||||||
|
echo "Error: typing_conformance_diagnostics.diff cannot be a symlink"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Note this identifier is used to find the comment to update on
|
||||||
|
# subsequent runs
|
||||||
|
echo '<!-- generated-comment typing_conformance_diagnostics_diff -->' >> comment.txt
|
||||||
|
|
||||||
|
if [[ -f conformance-suite-commit ]]
|
||||||
|
then
|
||||||
|
echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(<conformance-suite-commit)/conformance)" >> comment.txt
|
||||||
|
else
|
||||||
|
echo "conformance-suite-commit file not found"
|
||||||
|
echo "## Diagnostic diff on typing conformance tests" >> comment.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then
|
||||||
|
echo '<details>' >> comment.txt
|
||||||
|
echo '<summary>Changes were detected when running ty on typing conformance tests</summary>' >> comment.txt
|
||||||
|
echo '' >> comment.txt
|
||||||
|
echo '```diff' >> comment.txt
|
||||||
|
cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt
|
||||||
|
echo '```' >> comment.txt
|
||||||
|
echo '</details>' >> comment.txt
|
||||||
|
else
|
||||||
|
echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo 'comment<<EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
cat comment.txt >> "$GITHUB_OUTPUT"
|
||||||
|
echo 'EOF' >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Find existing comment
|
||||||
|
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0
|
||||||
|
if: steps.generate-comment.outcome == 'success'
|
||||||
|
id: find-comment
|
||||||
|
with:
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
comment-author: "github-actions[bot]"
|
||||||
|
body-includes: "<!-- generated-comment typing_conformance_diagnostics_diff -->"
|
||||||
|
|
||||||
|
- name: Create or update comment
|
||||||
|
if: steps.find-comment.outcome == 'success'
|
||||||
|
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4
|
||||||
|
with:
|
||||||
|
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||||
|
issue-number: ${{ steps.pr-number.outputs.pr-number }}
|
||||||
|
body-path: comment.txt
|
||||||
|
edit-mode: replace
|
||||||
|
|
@ -3,6 +3,9 @@
|
||||||
#
|
#
|
||||||
# TODO: can we remove the ignores here so that our workflows are more secure?
|
# TODO: can we remove the ignores here so that our workflows are more secure?
|
||||||
rules:
|
rules:
|
||||||
|
dangerous-triggers:
|
||||||
|
ignore:
|
||||||
|
- pr-comment.yaml
|
||||||
cache-poisoning:
|
cache-poisoning:
|
||||||
ignore:
|
ignore:
|
||||||
- build-docker.yml
|
- build-docker.yml
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,5 @@
|
||||||
"rust-analyzer.check.command": "clippy",
|
"rust-analyzer.check.command": "clippy",
|
||||||
"search.exclude": {
|
"search.exclude": {
|
||||||
"**/*.snap": true
|
"**/*.snap": true
|
||||||
},
|
}
|
||||||
"ty.diagnosticMode": "openFilesOnly"
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
202
CHANGELOG.md
202
CHANGELOG.md
|
|
@ -1,207 +1,5 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
## 0.14.9
|
|
||||||
|
|
||||||
Released on 2025-12-11.
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`ruff`\] New `RUF100` diagnostics for unused range suppressions ([#21783](https://github.com/astral-sh/ruff/pull/21783))
|
|
||||||
- \[`pylint`\] Detect subclasses of builtin exceptions (`PLW0133`) ([#21382](https://github.com/astral-sh/ruff/pull/21382))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix comment placement in lambda parameters ([#21868](https://github.com/astral-sh/ruff/pull/21868))
|
|
||||||
- Skip over trivia tokens after re-lexing ([#21895](https://github.com/astral-sh/ruff/pull/21895))
|
|
||||||
- \[`flake8-bandit`\] Fix false positive when using non-standard `CSafeLoader` path (S506). ([#21830](https://github.com/astral-sh/ruff/pull/21830))
|
|
||||||
- \[`flake8-bugbear`\] Accept immutable slice default arguments (`B008`) ([#21823](https://github.com/astral-sh/ruff/pull/21823))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`pydocstyle`\] Suppress `D417` for parameters with `Unpack` annotations ([#21816](https://github.com/astral-sh/ruff/pull/21816))
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
|
|
||||||
- Use `memchr` for computing line indexes ([#21838](https://github.com/astral-sh/ruff/pull/21838))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Document `*.pyw` is included by default in preview ([#21885](https://github.com/astral-sh/ruff/pull/21885))
|
|
||||||
- Document range suppressions, reorganize suppression docs ([#21884](https://github.com/astral-sh/ruff/pull/21884))
|
|
||||||
- Update mkdocs-material to 9.7.0 (Insiders now free) ([#21797](https://github.com/astral-sh/ruff/pull/21797))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
|
|
||||||
- [@Avasam](https://github.com/Avasam)
|
|
||||||
- [@MichaReiser](https://github.com/MichaReiser)
|
|
||||||
- [@charliermarsh](https://github.com/charliermarsh)
|
|
||||||
- [@amyreese](https://github.com/amyreese)
|
|
||||||
- [@phongddo](https://github.com/phongddo)
|
|
||||||
- [@prakhar1144](https://github.com/prakhar1144)
|
|
||||||
- [@mahiro72](https://github.com/mahiro72)
|
|
||||||
- [@ntBre](https://github.com/ntBre)
|
|
||||||
- [@LoicRiegel](https://github.com/LoicRiegel)
|
|
||||||
|
|
||||||
## 0.14.8
|
|
||||||
|
|
||||||
Released on 2025-12-04.
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`flake8-bugbear`\] Catch `yield` expressions within other statements (`B901`) ([#21200](https://github.com/astral-sh/ruff/pull/21200))
|
|
||||||
- \[`flake8-use-pathlib`\] Mark fixes unsafe for return type changes (`PTH104`, `PTH105`, `PTH109`, `PTH115`) ([#21440](https://github.com/astral-sh/ruff/pull/21440))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix syntax error false positives for `await` outside functions ([#21763](https://github.com/astral-sh/ruff/pull/21763))
|
|
||||||
- \[`flake8-simplify`\] Fix truthiness assumption for non-iterable arguments in tuple/list/set calls (`SIM222`, `SIM223`) ([#21479](https://github.com/astral-sh/ruff/pull/21479))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Suggest using `--output-file` option in GitLab integration ([#21706](https://github.com/astral-sh/ruff/pull/21706))
|
|
||||||
|
|
||||||
### Other changes
|
|
||||||
|
|
||||||
- [syntax-error] Default type parameter followed by non-default type parameter ([#21657](https://github.com/astral-sh/ruff/pull/21657))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
|
|
||||||
- [@kieran-ryan](https://github.com/kieran-ryan)
|
|
||||||
- [@11happy](https://github.com/11happy)
|
|
||||||
- [@danparizher](https://github.com/danparizher)
|
|
||||||
- [@ntBre](https://github.com/ntBre)
|
|
||||||
|
|
||||||
## 0.14.7
|
|
||||||
|
|
||||||
Released on 2025-11-28.
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`flake8-bandit`\] Handle string literal bindings in suspicious-url-open-usage (`S310`) ([#21469](https://github.com/astral-sh/ruff/pull/21469))
|
|
||||||
- \[`pylint`\] Fix `PLR1708` false positives on nested functions ([#21177](https://github.com/astral-sh/ruff/pull/21177))
|
|
||||||
- \[`pylint`\] Fix suppression for empty dict without tuple key annotation (`PLE1141`) ([#21290](https://github.com/astral-sh/ruff/pull/21290))
|
|
||||||
- \[`ruff`\] Add rule `RUF066` to detect unnecessary class properties ([#21535](https://github.com/astral-sh/ruff/pull/21535))
|
|
||||||
- \[`ruff`\] Catch more dummy variable uses (`RUF052`) ([#19799](https://github.com/astral-sh/ruff/pull/19799))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- [server] Set severity for non-rule diagnostics ([#21559](https://github.com/astral-sh/ruff/pull/21559))
|
|
||||||
- \[`flake8-implicit-str-concat`\] Avoid invalid fix in (`ISC003`) ([#21517](https://github.com/astral-sh/ruff/pull/21517))
|
|
||||||
- \[`parser`\] Fix panic when parsing IPython escape command expressions ([#21480](https://github.com/astral-sh/ruff/pull/21480))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Show partial fixability indicator in statistics output ([#21513](https://github.com/astral-sh/ruff/pull/21513))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
|
|
||||||
- [@mikeleppane](https://github.com/mikeleppane)
|
|
||||||
- [@senekor](https://github.com/senekor)
|
|
||||||
- [@ShaharNaveh](https://github.com/ShaharNaveh)
|
|
||||||
- [@JumboBear](https://github.com/JumboBear)
|
|
||||||
- [@prakhar1144](https://github.com/prakhar1144)
|
|
||||||
- [@tsvikas](https://github.com/tsvikas)
|
|
||||||
- [@danparizher](https://github.com/danparizher)
|
|
||||||
- [@chirizxc](https://github.com/chirizxc)
|
|
||||||
- [@AlexWaygood](https://github.com/AlexWaygood)
|
|
||||||
- [@MichaReiser](https://github.com/MichaReiser)
|
|
||||||
|
|
||||||
## 0.14.6
|
|
||||||
|
|
||||||
Released on 2025-11-21.
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`flake8-bandit`\] Support new PySNMP API paths (`S508`, `S509`) ([#21374](https://github.com/astral-sh/ruff/pull/21374))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Adjust own-line comment placement between branches ([#21185](https://github.com/astral-sh/ruff/pull/21185))
|
|
||||||
- Avoid syntax error when formatting attribute expressions with outer parentheses, parenthesized value, and trailing comment on value ([#20418](https://github.com/astral-sh/ruff/pull/20418))
|
|
||||||
- Fix panic when formatting comments in unary expressions ([#21501](https://github.com/astral-sh/ruff/pull/21501))
|
|
||||||
- Respect `fmt: skip` for compound statements on a single line ([#20633](https://github.com/astral-sh/ruff/pull/20633))
|
|
||||||
- \[`refurb`\] Fix `FURB103` autofix ([#21454](https://github.com/astral-sh/ruff/pull/21454))
|
|
||||||
- \[`ruff`\] Fix false positive for complex conversion specifiers in `logging-eager-conversion` (`RUF065`) ([#21464](https://github.com/astral-sh/ruff/pull/21464))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`ruff`\] Avoid false positive on `ClassVar` reassignment (`RUF012`) ([#21478](https://github.com/astral-sh/ruff/pull/21478))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Render hyperlinks for lint errors ([#21514](https://github.com/astral-sh/ruff/pull/21514))
|
|
||||||
- Add a `ruff analyze` option to skip over imports in `TYPE_CHECKING` blocks ([#21472](https://github.com/astral-sh/ruff/pull/21472))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Limit `eglot-format` hook to eglot-managed Python buffers ([#21459](https://github.com/astral-sh/ruff/pull/21459))
|
|
||||||
- Mention `force-exclude` in "Configuration > Python file discovery" ([#21500](https://github.com/astral-sh/ruff/pull/21500))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
|
|
||||||
- [@ntBre](https://github.com/ntBre)
|
|
||||||
- [@dylwil3](https://github.com/dylwil3)
|
|
||||||
- [@gauthsvenkat](https://github.com/gauthsvenkat)
|
|
||||||
- [@MichaReiser](https://github.com/MichaReiser)
|
|
||||||
- [@thamer](https://github.com/thamer)
|
|
||||||
- [@Ruchir28](https://github.com/Ruchir28)
|
|
||||||
- [@thejcannon](https://github.com/thejcannon)
|
|
||||||
- [@danparizher](https://github.com/danparizher)
|
|
||||||
- [@chirizxc](https://github.com/chirizxc)
|
|
||||||
|
|
||||||
## 0.14.5
|
|
||||||
|
|
||||||
Released on 2025-11-13.
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- \[`flake8-simplify`\] Apply `SIM113` when index variable is of type `int` ([#21395](https://github.com/astral-sh/ruff/pull/21395))
|
|
||||||
- \[`pydoclint`\] Fix false positive when Sphinx directives follow a "Raises" section (`DOC502`) ([#20535](https://github.com/astral-sh/ruff/pull/20535))
|
|
||||||
- \[`pydoclint`\] Support NumPy-style comma-separated parameters (`DOC102`) ([#20972](https://github.com/astral-sh/ruff/pull/20972))
|
|
||||||
- \[`refurb`\] Auto-fix annotated assignments (`FURB101`) ([#21278](https://github.com/astral-sh/ruff/pull/21278))
|
|
||||||
- \[`ruff`\] Ignore `str()` when not used for simple conversion (`RUF065`) ([#21330](https://github.com/astral-sh/ruff/pull/21330))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix syntax error false positive on alternative `match` patterns ([#21362](https://github.com/astral-sh/ruff/pull/21362))
|
|
||||||
- \[`flake8-simplify`\] Fix false positive for iterable initializers with generator arguments (`SIM222`) ([#21187](https://github.com/astral-sh/ruff/pull/21187))
|
|
||||||
- \[`pyupgrade`\] Fix false positive on relative imports from local `.builtins` module (`UP029`) ([#21309](https://github.com/astral-sh/ruff/pull/21309))
|
|
||||||
- \[`pyupgrade`\] Consistently set the deprecated tag (`UP035`) ([#21396](https://github.com/astral-sh/ruff/pull/21396))
|
|
||||||
|
|
||||||
### Rule changes
|
|
||||||
|
|
||||||
- \[`refurb`\] Detect empty f-strings (`FURB105`) ([#21348](https://github.com/astral-sh/ruff/pull/21348))
|
|
||||||
|
|
||||||
### CLI
|
|
||||||
|
|
||||||
- Add option to provide a reason to `--add-noqa` ([#21294](https://github.com/astral-sh/ruff/pull/21294))
|
|
||||||
- Add upstream linter URL to `ruff linter --output-format=json` ([#21316](https://github.com/astral-sh/ruff/pull/21316))
|
|
||||||
- Add color to `--help` ([#21337](https://github.com/astral-sh/ruff/pull/21337))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add a new "Opening a PR" section to the contribution guide ([#21298](https://github.com/astral-sh/ruff/pull/21298))
|
|
||||||
- Added the PyScripter IDE to the list of "Who is using Ruff?" ([#21402](https://github.com/astral-sh/ruff/pull/21402))
|
|
||||||
- Update PyCharm setup instructions ([#21409](https://github.com/astral-sh/ruff/pull/21409))
|
|
||||||
- \[`flake8-annotations`\] Add link to `allow-star-arg-any` option (`ANN401`) ([#21326](https://github.com/astral-sh/ruff/pull/21326))
|
|
||||||
|
|
||||||
### Other changes
|
|
||||||
|
|
||||||
- \[`configuration`\] Improve error message when `line-length` exceeds `u16::MAX` ([#21329](https://github.com/astral-sh/ruff/pull/21329))
|
|
||||||
|
|
||||||
### Contributors
|
|
||||||
|
|
||||||
- [@njhearp](https://github.com/njhearp)
|
|
||||||
- [@11happy](https://github.com/11happy)
|
|
||||||
- [@hugovk](https://github.com/hugovk)
|
|
||||||
- [@Gankra](https://github.com/Gankra)
|
|
||||||
- [@ntBre](https://github.com/ntBre)
|
|
||||||
- [@pyscripter](https://github.com/pyscripter)
|
|
||||||
- [@danparizher](https://github.com/danparizher)
|
|
||||||
- [@MichaReiser](https://github.com/MichaReiser)
|
|
||||||
- [@henryiii](https://github.com/henryiii)
|
|
||||||
- [@charliecloudberry](https://github.com/charliecloudberry)
|
|
||||||
|
|
||||||
## 0.14.4
|
## 0.14.4
|
||||||
|
|
||||||
Released on 2025-11-06.
|
Released on 2025-11-06.
|
||||||
|
|
|
||||||
|
|
@ -280,57 +280,15 @@ Note that plugin-specific configuration options are defined in their own modules
|
||||||
|
|
||||||
Finally, regenerate the documentation and generated code with `cargo dev generate-all`.
|
Finally, regenerate the documentation and generated code with `cargo dev generate-all`.
|
||||||
|
|
||||||
### Opening a PR
|
|
||||||
|
|
||||||
After you finish your changes, the next step is to open a PR. By default, two
|
|
||||||
sections will be filled into the PR body: the summary and the test plan.
|
|
||||||
|
|
||||||
#### The summary
|
|
||||||
|
|
||||||
The summary is intended to give us as maintainers information about your PR.
|
|
||||||
This should typically include a link to the relevant issue(s) you're addressing
|
|
||||||
in your PR, as well as a summary of the issue and your approach to fixing it. If
|
|
||||||
you have any questions about your approach or design, or if you considered
|
|
||||||
alternative approaches, that can also be helpful to include.
|
|
||||||
|
|
||||||
AI can be helpful in generating both the code and summary of your PR, but a
|
|
||||||
successful contribution should still be carefully reviewed by you and the
|
|
||||||
summary editorialized before submitting a PR. A great summary is thorough but
|
|
||||||
also succinct and gives us the context we need to review your PR.
|
|
||||||
|
|
||||||
You can find examples of excellent issues and PRs by searching for the
|
|
||||||
[`great writeup`](https://github.com/astral-sh/ruff/issues?q=label%3A%22great%20writeup%22)
|
|
||||||
label.
|
|
||||||
|
|
||||||
#### The test plan
|
|
||||||
|
|
||||||
The test plan is likely to be shorter than the summary and can be as simple as
|
|
||||||
"Added new snapshot tests for `RUF123`," at least for rule bugs. For LSP or some
|
|
||||||
types of CLI changes, in particular, it can also be helpful to include
|
|
||||||
screenshots or recordings of your change in action.
|
|
||||||
|
|
||||||
#### Ecosystem report
|
|
||||||
|
|
||||||
After opening the PR, an ecosystem report will be run as part of CI. This shows
|
|
||||||
a diff of linter and formatter behavior before and after the changes in your PR.
|
|
||||||
Going through these changes and reporting your findings in the PR summary or an
|
|
||||||
additional comment help us to review your PR more efficiently. It's also a great
|
|
||||||
way to find new test cases to incorporate into your PR if you identify any
|
|
||||||
issues.
|
|
||||||
|
|
||||||
#### PR status
|
|
||||||
|
|
||||||
To help us know when your PR is ready for review again, please either move your
|
|
||||||
PR back to a draft while working on it (marking it ready for review afterwards
|
|
||||||
will ping the previous reviewers) or explicitly re-request a review. This helps
|
|
||||||
us to avoid re-reviewing a PR while you're still working on it and also to
|
|
||||||
prioritize PRs that are definitely ready for review.
|
|
||||||
|
|
||||||
You can also thumbs-up or mark as resolved any comments we leave to let us know
|
|
||||||
you addressed them.
|
|
||||||
|
|
||||||
## MkDocs
|
## MkDocs
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
>
|
||||||
|
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
|
||||||
|
> This means only members of the Astral organization can preview the documentation exactly as it
|
||||||
|
> will appear in production.
|
||||||
|
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
|
||||||
|
|
||||||
To preview any changes to the documentation locally:
|
To preview any changes to the documentation locally:
|
||||||
|
|
||||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||||
|
|
@ -344,7 +302,11 @@ To preview any changes to the documentation locally:
|
||||||
1. Run the development server with:
|
1. Run the development server with:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.yml
|
# For contributors.
|
||||||
|
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||||
|
|
||||||
|
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||||
|
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
The documentation should then be available locally at
|
The documentation should then be available locally at
|
||||||
|
|
|
||||||
|
|
@ -254,21 +254,6 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-set"
|
|
||||||
version = "0.8.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
|
||||||
dependencies = [
|
|
||||||
"bit-vec",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-vec"
|
|
||||||
version = "0.8.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
|
|
@ -457,9 +442,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.5.53"
|
version = "4.5.51"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
|
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
|
|
@ -467,9 +452,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_builder"
|
name = "clap_builder"
|
||||||
version = "4.5.53"
|
version = "4.5.51"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
|
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
|
|
@ -959,18 +944,6 @@ dependencies = [
|
||||||
"parking_lot_core",
|
"parking_lot_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "datatest-stable"
|
|
||||||
version = "0.3.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a867d7322eb69cf3a68a5426387a25b45cb3b9c5ee41023ee6cea92e2afadd82"
|
|
||||||
dependencies = [
|
|
||||||
"camino",
|
|
||||||
"fancy-regex",
|
|
||||||
"libtest-mimic 0.8.1",
|
|
||||||
"walkdir",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive-where"
|
name = "derive-where"
|
||||||
version = "1.6.0"
|
version = "1.6.0"
|
||||||
|
|
@ -1043,7 +1016,7 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"option-ext",
|
"option-ext",
|
||||||
"redox_users",
|
"redox_users",
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1135,7 +1108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
|
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1165,17 +1138,6 @@ dependencies = [
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.61.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "fancy-regex"
|
|
||||||
version = "0.14.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
|
|
||||||
dependencies = [
|
|
||||||
"bit-set",
|
|
||||||
"regex-automata",
|
|
||||||
"regex-syntax",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastrand"
|
name = "fastrand"
|
||||||
version = "2.3.0"
|
version = "2.3.0"
|
||||||
|
|
@ -1276,9 +1238,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "get-size-derive2"
|
name = "get-size-derive2"
|
||||||
version = "0.7.3"
|
version = "0.7.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ab21d7bd2c625f2064f04ce54bcb88bc57c45724cde45cba326d784e22d3f71a"
|
checksum = "46b134aa084df7c3a513a1035c52f623e4b3065dfaf3d905a4f28a2e79b5bb3f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"attribute-derive",
|
"attribute-derive",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -1287,15 +1249,13 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "get-size2"
|
name = "get-size2"
|
||||||
version = "0.7.3"
|
version = "0.7.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "879272b0de109e2b67b39fcfe3d25fdbba96ac07e44a254f5a0b4d7ff55340cb"
|
checksum = "c0d51c9f2e956a517619ad9e7eaebc7a573f9c49b38152e12eade750f89156f9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"compact_str",
|
"compact_str",
|
||||||
"get-size-derive2",
|
"get-size-derive2",
|
||||||
"hashbrown 0.16.1",
|
"hashbrown 0.16.0",
|
||||||
"indexmap",
|
|
||||||
"ordermap",
|
|
||||||
"smallvec",
|
"smallvec",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -1392,9 +1352,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashbrown"
|
name = "hashbrown"
|
||||||
version = "0.16.1"
|
version = "0.16.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
|
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"equivalent",
|
"equivalent",
|
||||||
]
|
]
|
||||||
|
|
@ -1603,21 +1563,21 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indexmap"
|
name = "indexmap"
|
||||||
version = "2.12.1"
|
version = "2.12.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
|
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"equivalent",
|
"equivalent",
|
||||||
"hashbrown 0.16.1",
|
"hashbrown 0.16.0",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_core",
|
"serde_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indicatif"
|
name = "indicatif"
|
||||||
version = "0.18.3"
|
version = "0.18.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88"
|
checksum = "ade6dfcba0dfb62ad59e59e7241ec8912af34fd29e0e743e3db992bd278e8b65"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console 0.16.1",
|
"console 0.16.1",
|
||||||
"portable-atomic",
|
"portable-atomic",
|
||||||
|
|
@ -1663,6 +1623,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console 0.15.11",
|
"console 0.15.11",
|
||||||
|
"globset",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"pest",
|
"pest",
|
||||||
"pest_derive",
|
"pest_derive",
|
||||||
|
|
@ -1670,6 +1631,7 @@ dependencies = [
|
||||||
"ron",
|
"ron",
|
||||||
"serde",
|
"serde",
|
||||||
"similar",
|
"similar",
|
||||||
|
"walkdir",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1790,24 +1752,24 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jiff"
|
name = "jiff"
|
||||||
version = "0.2.16"
|
version = "0.2.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35"
|
checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jiff-static",
|
"jiff-static",
|
||||||
"jiff-tzdb-platform",
|
"jiff-tzdb-platform",
|
||||||
"log",
|
"log",
|
||||||
"portable-atomic",
|
"portable-atomic",
|
||||||
"portable-atomic-util",
|
"portable-atomic-util",
|
||||||
"serde_core",
|
"serde",
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jiff-static"
|
name = "jiff-static"
|
||||||
version = "0.2.16"
|
version = "0.2.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69"
|
checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -1889,9 +1851,9 @@ checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libcst"
|
name = "libcst"
|
||||||
version = "1.8.6"
|
version = "1.8.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6aea7143e4a0ed59b87a1ee71e198500889f8b005311136be15e84c97a6fcd8d"
|
checksum = "9d56bcd52d9b5e5f43e7fba20eb1f423ccb18c84cdf1cb506b8c1b95776b0b49"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"annotate-snippets",
|
"annotate-snippets",
|
||||||
"libcst_derive",
|
"libcst_derive",
|
||||||
|
|
@ -1904,9 +1866,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libcst_derive"
|
name = "libcst_derive"
|
||||||
version = "1.8.6"
|
version = "1.8.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0903173ea316c34a44d0497161e04d9210af44f5f5e89bf2f55d9a254c9a0e8d"
|
checksum = "3fcf5a725c4db703660124fe0edb98285f1605d0b87b7ee8684b699764a4f01a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
|
|
@ -1955,18 +1917,6 @@ dependencies = [
|
||||||
"threadpool",
|
"threadpool",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "libtest-mimic"
|
|
||||||
version = "0.8.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5297962ef19edda4ce33aaa484386e0a5b3d7f2f4e037cbeee00503ef6b29d33"
|
|
||||||
dependencies = [
|
|
||||||
"anstream",
|
|
||||||
"anstyle",
|
|
||||||
"clap",
|
|
||||||
"escape8259",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
version = "0.11.0"
|
version = "0.11.0"
|
||||||
|
|
@ -2282,9 +2232,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ordermap"
|
name = "ordermap"
|
||||||
version = "1.0.0"
|
version = "0.5.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ed637741ced8fb240855d22a2b4f208dab7a06bcce73380162e5253000c16758"
|
checksum = "b100f7dd605611822d30e182214d3c02fdefce2d801d23993f6b6ba6ca1392af"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"serde",
|
"serde",
|
||||||
|
|
@ -2656,9 +2606,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quick-junit"
|
name = "quick-junit"
|
||||||
version = "0.5.2"
|
version = "0.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6ee9342d671fae8d66b3ae9fd7a9714dfd089c04d2a8b1ec0436ef77aee15e5f"
|
checksum = "3ed1a693391a16317257103ad06a88c6529ac640846021da7c435a06fffdacd7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
|
|
@ -2671,9 +2621,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quick-xml"
|
name = "quick-xml"
|
||||||
version = "0.38.4"
|
version = "0.37.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c"
|
checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
@ -2700,9 +2650,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.42"
|
version = "1.0.41"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
|
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
@ -2908,7 +2858,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.14.9"
|
version = "0.14.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"argfile",
|
"argfile",
|
||||||
|
|
@ -3054,7 +3004,6 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"similar",
|
"similar",
|
||||||
"supports-hyperlinks",
|
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"thiserror 2.0.17",
|
"thiserror 2.0.17",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
|
@ -3166,18 +3115,17 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff_linter"
|
name = "ruff_linter"
|
||||||
version = "0.14.9"
|
version = "0.14.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick",
|
"aho-corasick",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bitflags 2.10.0",
|
"bitflags 2.10.0",
|
||||||
"clap",
|
"clap",
|
||||||
"colored 3.0.0",
|
"colored 3.0.0",
|
||||||
"compact_str",
|
|
||||||
"fern",
|
"fern",
|
||||||
"glob",
|
"glob",
|
||||||
"globset",
|
"globset",
|
||||||
"hashbrown 0.16.1",
|
"hashbrown 0.16.0",
|
||||||
"imperative",
|
"imperative",
|
||||||
"insta",
|
"insta",
|
||||||
"is-macro",
|
"is-macro",
|
||||||
|
|
@ -3326,7 +3274,6 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
"countme",
|
"countme",
|
||||||
"datatest-stable",
|
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
|
@ -3396,10 +3343,8 @@ dependencies = [
|
||||||
"bitflags 2.10.0",
|
"bitflags 2.10.0",
|
||||||
"bstr",
|
"bstr",
|
||||||
"compact_str",
|
"compact_str",
|
||||||
"datatest-stable",
|
|
||||||
"get-size2",
|
"get-size2",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.14.0",
|
|
||||||
"memchr",
|
"memchr",
|
||||||
"ruff_annotate_snippets",
|
"ruff_annotate_snippets",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
|
|
@ -3525,7 +3470,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff_wasm"
|
name = "ruff_wasm"
|
||||||
version = "0.14.9"
|
version = "0.14.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console_error_panic_hook",
|
"console_error_panic_hook",
|
||||||
"console_log",
|
"console_log",
|
||||||
|
|
@ -3623,7 +3568,7 @@ dependencies = [
|
||||||
"errno",
|
"errno",
|
||||||
"libc",
|
"libc",
|
||||||
"linux-raw-sys",
|
"linux-raw-sys",
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -3641,7 +3586,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "salsa"
|
name = "salsa"
|
||||||
version = "0.24.0"
|
version = "0.24.0"
|
||||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
|
source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"boxcar",
|
"boxcar",
|
||||||
"compact_str",
|
"compact_str",
|
||||||
|
|
@ -3652,7 +3597,6 @@ dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"intrusive-collections",
|
"intrusive-collections",
|
||||||
"inventory",
|
"inventory",
|
||||||
"ordermap",
|
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"portable-atomic",
|
"portable-atomic",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
|
|
@ -3666,12 +3610,12 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "salsa-macro-rules"
|
name = "salsa-macro-rules"
|
||||||
version = "0.24.0"
|
version = "0.24.0"
|
||||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
|
source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "salsa-macros"
|
name = "salsa-macros"
|
||||||
version = "0.24.0"
|
version = "0.24.0"
|
||||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1"
|
source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -3981,17 +3925,11 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supports-hyperlinks"
|
|
||||||
version = "3.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "804f44ed3c63152de6a9f90acbea1a110441de43006ea51bcce8f436196a288b"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.111"
|
version = "2.0.108"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
|
checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -4025,7 +3963,7 @@ dependencies = [
|
||||||
"getrandom 0.3.4",
|
"getrandom 0.3.4",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"rustix",
|
"rustix",
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -4270,9 +4208,9 @@ checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing"
|
name = "tracing"
|
||||||
version = "0.1.43"
|
version = "0.1.41"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
|
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
|
|
@ -4282,9 +4220,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-attributes"
|
name = "tracing-attributes"
|
||||||
version = "0.1.31"
|
version = "0.1.30"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
|
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -4293,9 +4231,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-core"
|
name = "tracing-core"
|
||||||
version = "0.1.35"
|
version = "0.1.34"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
|
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"valuable",
|
"valuable",
|
||||||
|
|
@ -4337,9 +4275,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-subscriber"
|
name = "tracing-subscriber"
|
||||||
version = "0.3.22"
|
version = "0.3.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
|
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"matchers",
|
"matchers",
|
||||||
|
|
@ -4361,7 +4299,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
|
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ignore",
|
"ignore",
|
||||||
"libtest-mimic 0.7.3",
|
"libtest-mimic",
|
||||||
"snapbox",
|
"snapbox",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -4390,7 +4328,6 @@ dependencies = [
|
||||||
"ruff_python_trivia",
|
"ruff_python_trivia",
|
||||||
"salsa",
|
"salsa",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"tikv-jemallocator",
|
|
||||||
"toml",
|
"toml",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-flame",
|
"tracing-flame",
|
||||||
|
|
@ -4517,7 +4454,7 @@ dependencies = [
|
||||||
"drop_bomb",
|
"drop_bomb",
|
||||||
"get-size2",
|
"get-size2",
|
||||||
"glob",
|
"glob",
|
||||||
"hashbrown 0.16.1",
|
"hashbrown 0.16.0",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"indoc",
|
"indoc",
|
||||||
"insta",
|
"insta",
|
||||||
|
|
@ -4529,7 +4466,6 @@ dependencies = [
|
||||||
"quickcheck_macros",
|
"quickcheck_macros",
|
||||||
"ruff_annotate_snippets",
|
"ruff_annotate_snippets",
|
||||||
"ruff_db",
|
"ruff_db",
|
||||||
"ruff_diagnostics",
|
|
||||||
"ruff_index",
|
"ruff_index",
|
||||||
"ruff_macros",
|
"ruff_macros",
|
||||||
"ruff_memory_usage",
|
"ruff_memory_usage",
|
||||||
|
|
@ -4575,7 +4511,6 @@ dependencies = [
|
||||||
"lsp-types",
|
"lsp-types",
|
||||||
"regex",
|
"regex",
|
||||||
"ruff_db",
|
"ruff_db",
|
||||||
"ruff_diagnostics",
|
|
||||||
"ruff_macros",
|
"ruff_macros",
|
||||||
"ruff_notebook",
|
"ruff_notebook",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
|
|
@ -4586,7 +4521,6 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"shellexpand",
|
"shellexpand",
|
||||||
"smallvec",
|
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"thiserror 2.0.17",
|
"thiserror 2.0.17",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
|
@ -4611,13 +4545,11 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"camino",
|
"camino",
|
||||||
"colored 3.0.0",
|
"colored 3.0.0",
|
||||||
"dunce",
|
|
||||||
"insta",
|
"insta",
|
||||||
"memchr",
|
"memchr",
|
||||||
"path-slash",
|
"path-slash",
|
||||||
"regex",
|
"regex",
|
||||||
"ruff_db",
|
"ruff_db",
|
||||||
"ruff_diagnostics",
|
|
||||||
"ruff_index",
|
"ruff_index",
|
||||||
"ruff_notebook",
|
"ruff_notebook",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
|
|
@ -4659,7 +4591,6 @@ dependencies = [
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"log",
|
"log",
|
||||||
"ruff_db",
|
"ruff_db",
|
||||||
"ruff_diagnostics",
|
|
||||||
"ruff_notebook",
|
"ruff_notebook",
|
||||||
"ruff_python_formatter",
|
"ruff_python_formatter",
|
||||||
"ruff_source_file",
|
"ruff_source_file",
|
||||||
|
|
@ -5080,7 +5011,7 @@ version = "0.1.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
||||||
16
Cargo.toml
16
Cargo.toml
|
|
@ -5,7 +5,7 @@ resolver = "2"
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
# Please update rustfmt.toml when bumping the Rust edition
|
# Please update rustfmt.toml when bumping the Rust edition
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
rust-version = "1.90"
|
rust-version = "1.89"
|
||||||
homepage = "https://docs.astral.sh/ruff"
|
homepage = "https://docs.astral.sh/ruff"
|
||||||
documentation = "https://docs.astral.sh/ruff"
|
documentation = "https://docs.astral.sh/ruff"
|
||||||
repository = "https://github.com/astral-sh/ruff"
|
repository = "https://github.com/astral-sh/ruff"
|
||||||
|
|
@ -81,7 +81,6 @@ compact_str = "0.9.0"
|
||||||
criterion = { version = "0.7.0", default-features = false }
|
criterion = { version = "0.7.0", default-features = false }
|
||||||
crossbeam = { version = "0.8.4" }
|
crossbeam = { version = "0.8.4" }
|
||||||
dashmap = { version = "6.0.1" }
|
dashmap = { version = "6.0.1" }
|
||||||
datatest-stable = { version = "0.3.3" }
|
|
||||||
dir-test = { version = "0.4.0" }
|
dir-test = { version = "0.4.0" }
|
||||||
dunce = { version = "1.0.5" }
|
dunce = { version = "1.0.5" }
|
||||||
drop_bomb = { version = "0.1.5" }
|
drop_bomb = { version = "0.1.5" }
|
||||||
|
|
@ -89,7 +88,7 @@ etcetera = { version = "0.11.0" }
|
||||||
fern = { version = "0.7.0" }
|
fern = { version = "0.7.0" }
|
||||||
filetime = { version = "0.2.23" }
|
filetime = { version = "0.2.23" }
|
||||||
getrandom = { version = "0.3.1" }
|
getrandom = { version = "0.3.1" }
|
||||||
get-size2 = { version = "0.7.3", features = [
|
get-size2 = { version = "0.7.0", features = [
|
||||||
"derive",
|
"derive",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
|
|
@ -130,7 +129,7 @@ memchr = { version = "2.7.1" }
|
||||||
mimalloc = { version = "0.1.39" }
|
mimalloc = { version = "0.1.39" }
|
||||||
natord = { version = "1.0.9" }
|
natord = { version = "1.0.9" }
|
||||||
notify = { version = "8.0.0" }
|
notify = { version = "8.0.0" }
|
||||||
ordermap = { version = "1.0.0" }
|
ordermap = { version = "0.5.0" }
|
||||||
path-absolutize = { version = "3.1.1" }
|
path-absolutize = { version = "3.1.1" }
|
||||||
path-slash = { version = "0.2.1" }
|
path-slash = { version = "0.2.1" }
|
||||||
pathdiff = { version = "0.2.1" }
|
pathdiff = { version = "0.2.1" }
|
||||||
|
|
@ -147,7 +146,7 @@ regex-automata = { version = "0.4.9" }
|
||||||
rustc-hash = { version = "2.0.0" }
|
rustc-hash = { version = "2.0.0" }
|
||||||
rustc-stable-hash = { version = "0.1.2" }
|
rustc-stable-hash = { version = "0.1.2" }
|
||||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "55e5e7d32fa3fc189276f35bb04c9438f9aedbd1", default-features = false, features = [
|
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "05a9af7f554b64b8aadc2eeb6f2caf73d0408d09", default-features = false, features = [
|
||||||
"compact_str",
|
"compact_str",
|
||||||
"macros",
|
"macros",
|
||||||
"salsa_unstable",
|
"salsa_unstable",
|
||||||
|
|
@ -174,7 +173,6 @@ snapbox = { version = "0.6.0", features = [
|
||||||
static_assertions = "1.1.0"
|
static_assertions = "1.1.0"
|
||||||
strum = { version = "0.27.0", features = ["strum_macros"] }
|
strum = { version = "0.27.0", features = ["strum_macros"] }
|
||||||
strum_macros = { version = "0.27.0" }
|
strum_macros = { version = "0.27.0" }
|
||||||
supports-hyperlinks = { version = "3.1.0" }
|
|
||||||
syn = { version = "2.0.55" }
|
syn = { version = "2.0.55" }
|
||||||
tempfile = { version = "3.9.0" }
|
tempfile = { version = "3.9.0" }
|
||||||
test-case = { version = "3.3.1" }
|
test-case = { version = "3.3.1" }
|
||||||
|
|
@ -273,12 +271,6 @@ large_stack_arrays = "allow"
|
||||||
lto = "fat"
|
lto = "fat"
|
||||||
codegen-units = 16
|
codegen-units = 16
|
||||||
|
|
||||||
# Profile to build a minimally sized binary for ruff/ty
|
|
||||||
[profile.minimal-size]
|
|
||||||
inherits = "release"
|
|
||||||
opt-level = "z"
|
|
||||||
codegen-units = 1
|
|
||||||
|
|
||||||
# Some crates don't change as much but benefit more from
|
# Some crates don't change as much but benefit more from
|
||||||
# more expensive optimization passes, so we selectively
|
# more expensive optimization passes, so we selectively
|
||||||
# decrease codegen-units in some cases.
|
# decrease codegen-units in some cases.
|
||||||
|
|
|
||||||
14
README.md
14
README.md
|
|
@ -57,11 +57,8 @@ Ruff is extremely actively developed and used in major open-source projects like
|
||||||
|
|
||||||
...and [many more](#whos-using-ruff).
|
...and [many more](#whos-using-ruff).
|
||||||
|
|
||||||
Ruff is backed by [Astral](https://astral.sh), the creators of
|
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
||||||
[uv](https://github.com/astral-sh/uv) and [ty](https://github.com/astral-sh/ty).
|
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||||
|
|
||||||
Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff), or the
|
|
||||||
original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
|
||||||
|
|
||||||
## Testimonials
|
## Testimonials
|
||||||
|
|
||||||
|
|
@ -150,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
||||||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||||
|
|
||||||
# For a specific version.
|
# For a specific version.
|
||||||
curl -LsSf https://astral.sh/ruff/0.14.9/install.sh | sh
|
curl -LsSf https://astral.sh/ruff/0.14.4/install.sh | sh
|
||||||
powershell -c "irm https://astral.sh/ruff/0.14.9/install.ps1 | iex"
|
powershell -c "irm https://astral.sh/ruff/0.14.4/install.ps1 | iex"
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||||
|
|
@ -184,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
||||||
```yaml
|
```yaml
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
# Ruff version.
|
# Ruff version.
|
||||||
rev: v0.14.9
|
rev: v0.14.4
|
||||||
hooks:
|
hooks:
|
||||||
# Run the linter.
|
# Run the linter.
|
||||||
- id: ruff-check
|
- id: ruff-check
|
||||||
|
|
@ -494,7 +491,6 @@ Ruff is used by a number of major open-source projects and companies, including:
|
||||||
- [PyTorch](https://github.com/pytorch/pytorch)
|
- [PyTorch](https://github.com/pytorch/pytorch)
|
||||||
- [Pydantic](https://github.com/pydantic/pydantic)
|
- [Pydantic](https://github.com/pydantic/pydantic)
|
||||||
- [Pylint](https://github.com/PyCQA/pylint)
|
- [Pylint](https://github.com/PyCQA/pylint)
|
||||||
- [PyScripter](https://github.com/pyscripter/pyscripter)
|
|
||||||
- [PyVista](https://github.com/pyvista/pyvista)
|
- [PyVista](https://github.com/pyvista/pyvista)
|
||||||
- [Reflex](https://github.com/reflex-dev/reflex)
|
- [Reflex](https://github.com/reflex-dev/reflex)
|
||||||
- [River](https://github.com/online-ml/river)
|
- [River](https://github.com/online-ml/river)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.14.9"
|
version = "0.14.4"
|
||||||
publish = true
|
publish = true
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -7,10 +7,8 @@ use std::sync::Arc;
|
||||||
|
|
||||||
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
use crate::commands::completions::config::{OptionString, OptionStringParser};
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use clap::builder::Styles;
|
|
||||||
use clap::builder::styling::{AnsiColor, Effects};
|
|
||||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||||
use clap::{Parser, Subcommand};
|
use clap::{Parser, Subcommand, command};
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use path_absolutize::path_dedot;
|
use path_absolutize::path_dedot;
|
||||||
|
|
@ -80,13 +78,6 @@ impl GlobalConfigArgs {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Configures Clap v3-style help menu colors
|
|
||||||
const STYLES: Styles = Styles::styled()
|
|
||||||
.header(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
|
||||||
.usage(AnsiColor::Green.on_default().effects(Effects::BOLD))
|
|
||||||
.literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD))
|
|
||||||
.placeholder(AnsiColor::Cyan.on_default());
|
|
||||||
|
|
||||||
#[derive(Debug, Parser)]
|
#[derive(Debug, Parser)]
|
||||||
#[command(
|
#[command(
|
||||||
author,
|
author,
|
||||||
|
|
@ -95,7 +86,6 @@ const STYLES: Styles = Styles::styled()
|
||||||
after_help = "For help with a specific command, see: `ruff help <command>`."
|
after_help = "For help with a specific command, see: `ruff help <command>`."
|
||||||
)]
|
)]
|
||||||
#[command(version)]
|
#[command(version)]
|
||||||
#[command(styles = STYLES)]
|
|
||||||
pub struct Args {
|
pub struct Args {
|
||||||
#[command(subcommand)]
|
#[command(subcommand)]
|
||||||
pub(crate) command: Command,
|
pub(crate) command: Command,
|
||||||
|
|
@ -167,7 +157,6 @@ pub enum AnalyzeCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, clap::Parser)]
|
#[derive(Clone, Debug, clap::Parser)]
|
||||||
#[expect(clippy::struct_excessive_bools)]
|
|
||||||
pub struct AnalyzeGraphCommand {
|
pub struct AnalyzeGraphCommand {
|
||||||
/// List of files or directories to include.
|
/// List of files or directories to include.
|
||||||
#[clap(help = "List of files or directories to include [default: .]")]
|
#[clap(help = "List of files or directories to include [default: .]")]
|
||||||
|
|
@ -194,12 +183,6 @@ pub struct AnalyzeGraphCommand {
|
||||||
/// Path to a virtual environment to use for resolving additional dependencies
|
/// Path to a virtual environment to use for resolving additional dependencies
|
||||||
#[arg(long)]
|
#[arg(long)]
|
||||||
python: Option<PathBuf>,
|
python: Option<PathBuf>,
|
||||||
/// Include imports that are only used for type checking (i.e., imports within `if TYPE_CHECKING:` blocks).
|
|
||||||
/// Use `--no-type-checking-imports` to exclude imports that are only used for type checking.
|
|
||||||
#[arg(long, overrides_with("no_type_checking_imports"))]
|
|
||||||
type_checking_imports: bool,
|
|
||||||
#[arg(long, overrides_with("type_checking_imports"), hide = true)]
|
|
||||||
no_type_checking_imports: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
|
// The `Parser` derive is for ruff_dev, for ruff `Args` would be sufficient
|
||||||
|
|
@ -422,13 +405,8 @@ pub struct CheckCommand {
|
||||||
)]
|
)]
|
||||||
pub statistics: bool,
|
pub statistics: bool,
|
||||||
/// Enable automatic additions of `noqa` directives to failing lines.
|
/// Enable automatic additions of `noqa` directives to failing lines.
|
||||||
/// Optionally provide a reason to append after the codes.
|
|
||||||
#[arg(
|
#[arg(
|
||||||
long,
|
long,
|
||||||
value_name = "REASON",
|
|
||||||
default_missing_value = "",
|
|
||||||
num_args = 0..=1,
|
|
||||||
require_equals = true,
|
|
||||||
// conflicts_with = "add_noqa",
|
// conflicts_with = "add_noqa",
|
||||||
conflicts_with = "show_files",
|
conflicts_with = "show_files",
|
||||||
conflicts_with = "show_settings",
|
conflicts_with = "show_settings",
|
||||||
|
|
@ -440,7 +418,7 @@ pub struct CheckCommand {
|
||||||
conflicts_with = "fix",
|
conflicts_with = "fix",
|
||||||
conflicts_with = "diff",
|
conflicts_with = "diff",
|
||||||
)]
|
)]
|
||||||
pub add_noqa: Option<String>,
|
pub add_noqa: bool,
|
||||||
/// See the files Ruff will be run against with the current settings.
|
/// See the files Ruff will be run against with the current settings.
|
||||||
#[arg(
|
#[arg(
|
||||||
long,
|
long,
|
||||||
|
|
@ -846,10 +824,6 @@ impl AnalyzeGraphCommand {
|
||||||
string_imports_min_dots: self.min_dots,
|
string_imports_min_dots: self.min_dots,
|
||||||
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
preview: resolve_bool_arg(self.preview, self.no_preview).map(PreviewMode::from),
|
||||||
target_version: self.target_version.map(ast::PythonVersion::from),
|
target_version: self.target_version.map(ast::PythonVersion::from),
|
||||||
type_checking_imports: resolve_bool_arg(
|
|
||||||
self.type_checking_imports,
|
|
||||||
self.no_type_checking_imports,
|
|
||||||
),
|
|
||||||
..ExplicitConfigOverrides::default()
|
..ExplicitConfigOverrides::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -1073,7 +1047,7 @@ Possible choices:
|
||||||
/// etc.).
|
/// etc.).
|
||||||
#[expect(clippy::struct_excessive_bools)]
|
#[expect(clippy::struct_excessive_bools)]
|
||||||
pub struct CheckArguments {
|
pub struct CheckArguments {
|
||||||
pub add_noqa: Option<String>,
|
pub add_noqa: bool,
|
||||||
pub diff: bool,
|
pub diff: bool,
|
||||||
pub exit_non_zero_on_fix: bool,
|
pub exit_non_zero_on_fix: bool,
|
||||||
pub exit_zero: bool,
|
pub exit_zero: bool,
|
||||||
|
|
@ -1346,7 +1320,6 @@ struct ExplicitConfigOverrides {
|
||||||
extension: Option<Vec<ExtensionPair>>,
|
extension: Option<Vec<ExtensionPair>>,
|
||||||
detect_string_imports: Option<bool>,
|
detect_string_imports: Option<bool>,
|
||||||
string_imports_min_dots: Option<usize>,
|
string_imports_min_dots: Option<usize>,
|
||||||
type_checking_imports: Option<bool>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||||
|
|
@ -1437,9 +1410,6 @@ impl ConfigurationTransformer for ExplicitConfigOverrides {
|
||||||
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
if let Some(string_imports_min_dots) = &self.string_imports_min_dots {
|
||||||
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
config.analyze.string_imports_min_dots = Some(*string_imports_min_dots);
|
||||||
}
|
}
|
||||||
if let Some(type_checking_imports) = &self.type_checking_imports {
|
|
||||||
config.analyze.type_checking_imports = Some(*type_checking_imports);
|
|
||||||
}
|
|
||||||
|
|
||||||
config
|
config
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,6 @@ pub(crate) fn add_noqa(
|
||||||
files: &[PathBuf],
|
files: &[PathBuf],
|
||||||
pyproject_config: &PyprojectConfig,
|
pyproject_config: &PyprojectConfig,
|
||||||
config_arguments: &ConfigArguments,
|
config_arguments: &ConfigArguments,
|
||||||
reason: Option<&str>,
|
|
||||||
) -> Result<usize> {
|
) -> Result<usize> {
|
||||||
// Collect all the files to check.
|
// Collect all the files to check.
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
@ -77,14 +76,7 @@ pub(crate) fn add_noqa(
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match add_noqa_to_path(
|
match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) {
|
||||||
path,
|
|
||||||
package,
|
|
||||||
&source_kind,
|
|
||||||
source_type,
|
|
||||||
&settings.linter,
|
|
||||||
reason,
|
|
||||||
) {
|
|
||||||
Ok(count) => Some(count),
|
Ok(count) => Some(count),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to add noqa to {}: {e}", path.display());
|
error!("Failed to add noqa to {}: {e}", path.display());
|
||||||
|
|
|
||||||
|
|
@ -105,7 +105,6 @@ pub(crate) fn analyze_graph(
|
||||||
let settings = resolver.resolve(path);
|
let settings = resolver.resolve(path);
|
||||||
let string_imports = settings.analyze.string_imports;
|
let string_imports = settings.analyze.string_imports;
|
||||||
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
let include_dependencies = settings.analyze.include_dependencies.get(path).cloned();
|
||||||
let type_checking_imports = settings.analyze.type_checking_imports;
|
|
||||||
|
|
||||||
// Skip excluded files.
|
// Skip excluded files.
|
||||||
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
if (settings.file_resolver.force_exclude || !resolved_file.is_root())
|
||||||
|
|
@ -168,7 +167,6 @@ pub(crate) fn analyze_graph(
|
||||||
&path,
|
&path,
|
||||||
package.as_deref(),
|
package.as_deref(),
|
||||||
string_imports,
|
string_imports,
|
||||||
type_checking_imports,
|
|
||||||
)
|
)
|
||||||
.unwrap_or_else(|err| {
|
.unwrap_or_else(|err| {
|
||||||
warn!("Failed to generate import map for {path}: {err}");
|
warn!("Failed to generate import map for {path}: {err}");
|
||||||
|
|
|
||||||
|
|
@ -16,8 +16,6 @@ struct LinterInfo {
|
||||||
prefix: &'static str,
|
prefix: &'static str,
|
||||||
name: &'static str,
|
name: &'static str,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
url: Option<&'static str>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
categories: Option<Vec<LinterCategoryInfo>>,
|
categories: Option<Vec<LinterCategoryInfo>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -52,7 +50,6 @@ pub(crate) fn linter(format: HelpFormat) -> Result<()> {
|
||||||
.map(|linter_info| LinterInfo {
|
.map(|linter_info| LinterInfo {
|
||||||
prefix: linter_info.common_prefix(),
|
prefix: linter_info.common_prefix(),
|
||||||
name: linter_info.name(),
|
name: linter_info.name(),
|
||||||
url: linter_info.url(),
|
|
||||||
categories: linter_info.upstream_categories().map(|cats| {
|
categories: linter_info.upstream_categories().map(|cats| {
|
||||||
cats.iter()
|
cats.iter()
|
||||||
.map(|c| LinterCategoryInfo {
|
.map(|c| LinterCategoryInfo {
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ use std::sync::mpsc::channel;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use clap::CommandFactory;
|
use clap::CommandFactory;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use log::error;
|
use log::{error, warn};
|
||||||
use notify::{RecursiveMode, Watcher, recommended_watcher};
|
use notify::{RecursiveMode, Watcher, recommended_watcher};
|
||||||
|
|
||||||
use args::{GlobalConfigArgs, ServerCommand};
|
use args::{GlobalConfigArgs, ServerCommand};
|
||||||
|
|
@ -319,20 +319,12 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result<Exi
|
||||||
warn_user!("Detected debug build without --no-cache.");
|
warn_user!("Detected debug build without --no-cache.");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(reason) = &cli.add_noqa {
|
if cli.add_noqa {
|
||||||
if !fix_mode.is_generate() {
|
if !fix_mode.is_generate() {
|
||||||
warn_user!("--fix is incompatible with --add-noqa.");
|
warn_user!("--fix is incompatible with --add-noqa.");
|
||||||
}
|
}
|
||||||
if reason.contains(['\n', '\r']) {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"--add-noqa <reason> cannot contain newline characters"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let reason_opt = (!reason.is_empty()).then_some(reason.as_str());
|
|
||||||
|
|
||||||
let modifications =
|
let modifications =
|
||||||
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?;
|
commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?;
|
||||||
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
if modifications > 0 && config_arguments.log_level >= LogLevel::Default {
|
||||||
let s = if modifications == 1 { "" } else { "s" };
|
let s = if modifications == 1 { "" } else { "s" };
|
||||||
#[expect(clippy::print_stderr)]
|
#[expect(clippy::print_stderr)]
|
||||||
|
|
|
||||||
|
|
@ -34,21 +34,9 @@ struct ExpandedStatistics<'a> {
|
||||||
code: Option<&'a SecondaryCode>,
|
code: Option<&'a SecondaryCode>,
|
||||||
name: &'static str,
|
name: &'static str,
|
||||||
count: usize,
|
count: usize,
|
||||||
#[serde(rename = "fixable")]
|
fixable: bool,
|
||||||
all_fixable: bool,
|
|
||||||
fixable_count: usize,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandedStatistics<'_> {
|
|
||||||
fn any_fixable(&self) -> bool {
|
|
||||||
self.fixable_count > 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Accumulator type for grouping diagnostics by code.
|
|
||||||
/// Format: (`code`, `representative_diagnostic`, `total_count`, `fixable_count`)
|
|
||||||
type DiagnosticGroup<'a> = (Option<&'a SecondaryCode>, &'a Diagnostic, usize, usize);
|
|
||||||
|
|
||||||
pub(crate) struct Printer {
|
pub(crate) struct Printer {
|
||||||
format: OutputFormat,
|
format: OutputFormat,
|
||||||
log_level: LogLevel,
|
log_level: LogLevel,
|
||||||
|
|
@ -145,7 +133,7 @@ impl Printer {
|
||||||
if fixables.applicable > 0 {
|
if fixables.applicable > 0 {
|
||||||
writeln!(
|
writeln!(
|
||||||
writer,
|
writer,
|
||||||
"{fix_prefix} {} fixable with the `--fix` option.",
|
"{fix_prefix} {} fixable with the --fix option.",
|
||||||
fixables.applicable
|
fixables.applicable
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
@ -268,41 +256,35 @@ impl Printer {
|
||||||
diagnostics: &Diagnostics,
|
diagnostics: &Diagnostics,
|
||||||
writer: &mut dyn Write,
|
writer: &mut dyn Write,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let required_applicability = self.unsafe_fixes.required_applicability();
|
|
||||||
let statistics: Vec<ExpandedStatistics> = diagnostics
|
let statistics: Vec<ExpandedStatistics> = diagnostics
|
||||||
.inner
|
.inner
|
||||||
.iter()
|
.iter()
|
||||||
.sorted_by_key(|diagnostic| diagnostic.secondary_code())
|
.map(|message| (message.secondary_code(), message))
|
||||||
.fold(vec![], |mut acc: Vec<DiagnosticGroup>, diagnostic| {
|
.sorted_by_key(|(code, message)| (*code, message.fixable()))
|
||||||
let is_fixable = diagnostic
|
.fold(
|
||||||
.fix()
|
vec![],
|
||||||
.is_some_and(|fix| fix.applies(required_applicability));
|
|mut acc: Vec<((Option<&SecondaryCode>, &Diagnostic), usize)>, (code, message)| {
|
||||||
let code = diagnostic.secondary_code();
|
if let Some(((prev_code, _prev_message), count)) = acc.last_mut() {
|
||||||
|
|
||||||
if let Some((prev_code, _prev_message, count, fixable_count)) = acc.last_mut() {
|
|
||||||
if *prev_code == code {
|
if *prev_code == code {
|
||||||
*count += 1;
|
*count += 1;
|
||||||
if is_fixable {
|
|
||||||
*fixable_count += 1;
|
|
||||||
}
|
|
||||||
return acc;
|
return acc;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
acc.push((code, diagnostic, 1, usize::from(is_fixable)));
|
acc.push(((code, message), 1));
|
||||||
acc
|
acc
|
||||||
})
|
},
|
||||||
|
)
|
||||||
.iter()
|
.iter()
|
||||||
.map(
|
.map(|&((code, message), count)| ExpandedStatistics {
|
||||||
|&(code, message, count, fixable_count)| ExpandedStatistics {
|
|
||||||
code,
|
code,
|
||||||
name: message.name(),
|
name: message.name(),
|
||||||
count,
|
count,
|
||||||
// Backward compatibility: `fixable` is true only when all violations are fixable.
|
fixable: if let Some(fix) = message.fix() {
|
||||||
// See: https://github.com/astral-sh/ruff/pull/21513
|
fix.applies(self.unsafe_fixes.required_applicability())
|
||||||
all_fixable: fixable_count == count,
|
} else {
|
||||||
fixable_count,
|
false
|
||||||
},
|
},
|
||||||
)
|
})
|
||||||
.sorted_by_key(|statistic| Reverse(statistic.count))
|
.sorted_by_key(|statistic| Reverse(statistic.count))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
|
|
@ -326,14 +308,13 @@ impl Printer {
|
||||||
.map(|statistic| statistic.code.map_or(0, |s| s.len()))
|
.map(|statistic| statistic.code.map_or(0, |s| s.len()))
|
||||||
.max()
|
.max()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let any_fixable = statistics.iter().any(ExpandedStatistics::any_fixable);
|
let any_fixable = statistics.iter().any(|statistic| statistic.fixable);
|
||||||
|
|
||||||
let all_fixable = format!("[{}] ", "*".cyan());
|
let fixable = format!("[{}] ", "*".cyan());
|
||||||
let partially_fixable = format!("[{}] ", "-".cyan());
|
|
||||||
let unfixable = "[ ] ";
|
let unfixable = "[ ] ";
|
||||||
|
|
||||||
// By default, we mimic Flake8's `--statistics` format.
|
// By default, we mimic Flake8's `--statistics` format.
|
||||||
for statistic in &statistics {
|
for statistic in statistics {
|
||||||
writeln!(
|
writeln!(
|
||||||
writer,
|
writer,
|
||||||
"{:>count_width$}\t{:<code_width$}\t{}{}",
|
"{:>count_width$}\t{:<code_width$}\t{}{}",
|
||||||
|
|
@ -345,10 +326,8 @@ impl Printer {
|
||||||
.red()
|
.red()
|
||||||
.bold(),
|
.bold(),
|
||||||
if any_fixable {
|
if any_fixable {
|
||||||
if statistic.all_fixable {
|
if statistic.fixable {
|
||||||
&all_fixable
|
&fixable
|
||||||
} else if statistic.any_fixable() {
|
|
||||||
&partially_fixable
|
|
||||||
} else {
|
} else {
|
||||||
unfixable
|
unfixable
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,193 +0,0 @@
|
||||||
use std::process::Command;
|
|
||||||
|
|
||||||
use insta_cmd::assert_cmd_snapshot;
|
|
||||||
|
|
||||||
use crate::CliTest;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn type_checking_imports() -> anyhow::Result<()> {
|
|
||||||
let test = AnalyzeTest::with_files([
|
|
||||||
("ruff/__init__.py", ""),
|
|
||||||
(
|
|
||||||
"ruff/a.py",
|
|
||||||
r#"
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
import ruff.b
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
import ruff.c
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"ruff/b.py",
|
|
||||||
r#"
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ruff import c
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
("ruff/c.py", ""),
|
|
||||||
])?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(test.command(), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
{
|
|
||||||
"ruff/__init__.py": [],
|
|
||||||
"ruff/a.py": [
|
|
||||||
"ruff/b.py",
|
|
||||||
"ruff/c.py"
|
|
||||||
],
|
|
||||||
"ruff/b.py": [
|
|
||||||
"ruff/c.py"
|
|
||||||
],
|
|
||||||
"ruff/c.py": []
|
|
||||||
}
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(
|
|
||||||
test.command()
|
|
||||||
.arg("--no-type-checking-imports"),
|
|
||||||
@r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
{
|
|
||||||
"ruff/__init__.py": [],
|
|
||||||
"ruff/a.py": [
|
|
||||||
"ruff/b.py"
|
|
||||||
],
|
|
||||||
"ruff/b.py": [],
|
|
||||||
"ruff/c.py": []
|
|
||||||
}
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn type_checking_imports_from_config() -> anyhow::Result<()> {
|
|
||||||
let test = AnalyzeTest::with_files([
|
|
||||||
("ruff/__init__.py", ""),
|
|
||||||
(
|
|
||||||
"ruff/a.py",
|
|
||||||
r#"
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
import ruff.b
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
import ruff.c
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
(
|
|
||||||
"ruff/b.py",
|
|
||||||
r#"
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ruff import c
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
("ruff/c.py", ""),
|
|
||||||
(
|
|
||||||
"ruff.toml",
|
|
||||||
r#"
|
|
||||||
[analyze]
|
|
||||||
type-checking-imports = false
|
|
||||||
"#,
|
|
||||||
),
|
|
||||||
])?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(test.command(), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
{
|
|
||||||
"ruff/__init__.py": [],
|
|
||||||
"ruff/a.py": [
|
|
||||||
"ruff/b.py"
|
|
||||||
],
|
|
||||||
"ruff/b.py": [],
|
|
||||||
"ruff/c.py": []
|
|
||||||
}
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###);
|
|
||||||
|
|
||||||
test.write_file(
|
|
||||||
"ruff.toml",
|
|
||||||
r#"
|
|
||||||
[analyze]
|
|
||||||
type-checking-imports = true
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(test.command(), @r###"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
{
|
|
||||||
"ruff/__init__.py": [],
|
|
||||||
"ruff/a.py": [
|
|
||||||
"ruff/b.py",
|
|
||||||
"ruff/c.py"
|
|
||||||
],
|
|
||||||
"ruff/b.py": [
|
|
||||||
"ruff/c.py"
|
|
||||||
],
|
|
||||||
"ruff/c.py": []
|
|
||||||
}
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"###
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
struct AnalyzeTest {
|
|
||||||
cli_test: CliTest,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AnalyzeTest {
|
|
||||||
pub(crate) fn new() -> anyhow::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
cli_test: CliTest::with_settings(|_, mut settings| {
|
|
||||||
settings.add_filter(r#"\\\\"#, "/");
|
|
||||||
settings
|
|
||||||
})?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn with_files<'a>(files: impl IntoIterator<Item = (&'a str, &'a str)>) -> anyhow::Result<Self> {
|
|
||||||
let case = Self::new()?;
|
|
||||||
case.write_files(files)?;
|
|
||||||
Ok(case)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[expect(unused)]
|
|
||||||
fn with_file(path: impl AsRef<std::path::Path>, content: &str) -> anyhow::Result<Self> {
|
|
||||||
let fixture = Self::new()?;
|
|
||||||
fixture.write_file(path, content)?;
|
|
||||||
Ok(fixture)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn command(&self) -> Command {
|
|
||||||
let mut command = self.cli_test.command();
|
|
||||||
command.arg("analyze").arg("graph").arg("--preview");
|
|
||||||
command
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::ops::Deref for AnalyzeTest {
|
|
||||||
type Target = CliTest;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.cli_test
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1440,78 +1440,6 @@ def function():
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn ignore_noqa() -> Result<()> {
|
|
||||||
let fixture = CliTest::new()?;
|
|
||||||
fixture.write_file(
|
|
||||||
"ruff.toml",
|
|
||||||
r#"
|
|
||||||
[lint]
|
|
||||||
select = ["F401"]
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
fixture.write_file(
|
|
||||||
"noqa.py",
|
|
||||||
r#"
|
|
||||||
import os # noqa: F401
|
|
||||||
|
|
||||||
# ruff: disable[F401]
|
|
||||||
import sys
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// without --ignore-noqa
|
|
||||||
assert_cmd_snapshot!(fixture
|
|
||||||
.check_command()
|
|
||||||
.args(["--config", "ruff.toml"])
|
|
||||||
.arg("noqa.py"),
|
|
||||||
@r"
|
|
||||||
success: false
|
|
||||||
exit_code: 1
|
|
||||||
----- stdout -----
|
|
||||||
noqa.py:5:8: F401 [*] `sys` imported but unused
|
|
||||||
Found 1 error.
|
|
||||||
[*] 1 fixable with the `--fix` option.
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
");
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(fixture
|
|
||||||
.check_command()
|
|
||||||
.args(["--config", "ruff.toml"])
|
|
||||||
.arg("noqa.py")
|
|
||||||
.args(["--preview"]),
|
|
||||||
@r"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
All checks passed!
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
");
|
|
||||||
|
|
||||||
// with --ignore-noqa --preview
|
|
||||||
assert_cmd_snapshot!(fixture
|
|
||||||
.check_command()
|
|
||||||
.args(["--config", "ruff.toml"])
|
|
||||||
.arg("noqa.py")
|
|
||||||
.args(["--ignore-noqa", "--preview"]),
|
|
||||||
@r"
|
|
||||||
success: false
|
|
||||||
exit_code: 1
|
|
||||||
----- stdout -----
|
|
||||||
noqa.py:2:8: F401 [*] `os` imported but unused
|
|
||||||
noqa.py:5:8: F401 [*] `sys` imported but unused
|
|
||||||
Found 2 errors.
|
|
||||||
[*] 2 fixable with the `--fix` option.
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_noqa() -> Result<()> {
|
fn add_noqa() -> Result<()> {
|
||||||
let fixture = CliTest::new()?;
|
let fixture = CliTest::new()?;
|
||||||
|
|
@ -1704,100 +1632,6 @@ def unused(x): # noqa: ANN001, ARG001, D103
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_noqa_existing_file_level_noqa() -> Result<()> {
|
|
||||||
let fixture = CliTest::new()?;
|
|
||||||
fixture.write_file(
|
|
||||||
"ruff.toml",
|
|
||||||
r#"
|
|
||||||
[lint]
|
|
||||||
select = ["F401"]
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
fixture.write_file(
|
|
||||||
"noqa.py",
|
|
||||||
r#"
|
|
||||||
# ruff: noqa F401
|
|
||||||
import os
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(fixture
|
|
||||||
.check_command()
|
|
||||||
.args(["--config", "ruff.toml"])
|
|
||||||
.arg("noqa.py")
|
|
||||||
.arg("--preview")
|
|
||||||
.args(["--add-noqa"])
|
|
||||||
.arg("-")
|
|
||||||
.pass_stdin(r#"
|
|
||||||
|
|
||||||
"#), @r"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
");
|
|
||||||
|
|
||||||
let test_code =
|
|
||||||
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
|
|
||||||
|
|
||||||
insta::assert_snapshot!(test_code, @r"
|
|
||||||
# ruff: noqa F401
|
|
||||||
import os
|
|
||||||
");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_noqa_existing_range_suppression() -> Result<()> {
|
|
||||||
let fixture = CliTest::new()?;
|
|
||||||
fixture.write_file(
|
|
||||||
"ruff.toml",
|
|
||||||
r#"
|
|
||||||
[lint]
|
|
||||||
select = ["F401"]
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
fixture.write_file(
|
|
||||||
"noqa.py",
|
|
||||||
r#"
|
|
||||||
# ruff: disable[F401]
|
|
||||||
import os
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(fixture
|
|
||||||
.check_command()
|
|
||||||
.args(["--config", "ruff.toml"])
|
|
||||||
.arg("noqa.py")
|
|
||||||
.arg("--preview")
|
|
||||||
.args(["--add-noqa"])
|
|
||||||
.arg("-")
|
|
||||||
.pass_stdin(r#"
|
|
||||||
|
|
||||||
"#), @r"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
");
|
|
||||||
|
|
||||||
let test_code =
|
|
||||||
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
|
|
||||||
|
|
||||||
insta::assert_snapshot!(test_code, @r"
|
|
||||||
# ruff: disable[F401]
|
|
||||||
import os
|
|
||||||
");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn add_noqa_multiline_comment() -> Result<()> {
|
fn add_noqa_multiline_comment() -> Result<()> {
|
||||||
let fixture = CliTest::new()?;
|
let fixture = CliTest::new()?;
|
||||||
|
|
@ -1926,64 +1760,6 @@ from foo import ( # noqa: F401
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_noqa_with_reason() -> Result<()> {
|
|
||||||
let fixture = CliTest::new()?;
|
|
||||||
fixture.write_file(
|
|
||||||
"test.py",
|
|
||||||
r#"import os
|
|
||||||
|
|
||||||
def foo():
|
|
||||||
x = 1
|
|
||||||
"#,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(fixture
|
|
||||||
.check_command()
|
|
||||||
.arg("--add-noqa=TODO: fix")
|
|
||||||
.arg("--select=F401,F841")
|
|
||||||
.arg("test.py"), @r"
|
|
||||||
success: true
|
|
||||||
exit_code: 0
|
|
||||||
----- stdout -----
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
Added 2 noqa directives.
|
|
||||||
");
|
|
||||||
|
|
||||||
let content = fs::read_to_string(fixture.root().join("test.py"))?;
|
|
||||||
insta::assert_snapshot!(content, @r"
|
|
||||||
import os # noqa: F401 TODO: fix
|
|
||||||
|
|
||||||
def foo():
|
|
||||||
x = 1 # noqa: F841 TODO: fix
|
|
||||||
");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn add_noqa_with_newline_in_reason() -> Result<()> {
|
|
||||||
let fixture = CliTest::new()?;
|
|
||||||
fixture.write_file("test.py", "import os\n")?;
|
|
||||||
|
|
||||||
assert_cmd_snapshot!(fixture
|
|
||||||
.check_command()
|
|
||||||
.arg("--add-noqa=line1\nline2")
|
|
||||||
.arg("--select=F401")
|
|
||||||
.arg("test.py"), @r###"
|
|
||||||
success: false
|
|
||||||
exit_code: 2
|
|
||||||
----- stdout -----
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
ruff failed
|
|
||||||
Cause: --add-noqa <reason> cannot contain newline characters
|
|
||||||
"###);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
|
/// Infer `3.11` from `requires-python` in `pyproject.toml`.
|
||||||
#[test]
|
#[test]
|
||||||
fn requires_python() -> Result<()> {
|
fn requires_python() -> Result<()> {
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,6 @@ use std::{
|
||||||
};
|
};
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
|
|
||||||
mod analyze_graph;
|
|
||||||
mod format;
|
mod format;
|
||||||
mod lint;
|
mod lint;
|
||||||
|
|
||||||
|
|
@ -63,7 +62,9 @@ impl CliTest {
|
||||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||||
) -> anyhow::Result<Self> {
|
) -> anyhow::Result<Self> {
|
||||||
let case = Self::new()?;
|
let case = Self::new()?;
|
||||||
case.write_files(files)?;
|
for file in files {
|
||||||
|
case.write_file(file.0, file.1)?;
|
||||||
|
}
|
||||||
Ok(case)
|
Ok(case)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -152,16 +153,6 @@ impl CliTest {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn write_files<'a>(
|
|
||||||
&self,
|
|
||||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
|
||||||
) -> Result<()> {
|
|
||||||
for file in files {
|
|
||||||
self.write_file(file.0, file.1)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the path to the test directory root.
|
/// Returns the path to the test directory root.
|
||||||
pub(crate) fn root(&self) -> &Path {
|
pub(crate) fn root(&self) -> &Path {
|
||||||
&self.project_dir
|
&self.project_dir
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ info:
|
||||||
- concise
|
- concise
|
||||||
- "--show-settings"
|
- "--show-settings"
|
||||||
- test.py
|
- test.py
|
||||||
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
|
|
@ -283,6 +284,5 @@ analyze.target_version = 3.10
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ info:
|
||||||
- UP007
|
- UP007
|
||||||
- test.py
|
- test.py
|
||||||
- "-"
|
- "-"
|
||||||
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
|
|
@ -285,6 +286,5 @@ analyze.target_version = 3.11
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,7 @@ info:
|
||||||
- UP007
|
- UP007
|
||||||
- test.py
|
- test.py
|
||||||
- "-"
|
- "-"
|
||||||
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
|
|
@ -287,6 +288,5 @@ analyze.target_version = 3.11
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ info:
|
||||||
- py310
|
- py310
|
||||||
- test.py
|
- test.py
|
||||||
- "-"
|
- "-"
|
||||||
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
|
|
@ -287,6 +288,5 @@ analyze.target_version = 3.10
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ info:
|
||||||
- "--select"
|
- "--select"
|
||||||
- UP007
|
- UP007
|
||||||
- foo/test.py
|
- foo/test.py
|
||||||
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
|
|
@ -284,6 +285,5 @@ analyze.target_version = 3.11
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ info:
|
||||||
- "--select"
|
- "--select"
|
||||||
- UP007
|
- UP007
|
||||||
- foo/test.py
|
- foo/test.py
|
||||||
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
|
|
@ -284,6 +285,5 @@ analyze.target_version = 3.10
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -283,6 +283,5 @@ analyze.target_version = 3.10
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -283,6 +283,5 @@ analyze.target_version = 3.10
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ info:
|
||||||
- concise
|
- concise
|
||||||
- test.py
|
- test.py
|
||||||
- "--show-settings"
|
- "--show-settings"
|
||||||
|
snapshot_kind: text
|
||||||
---
|
---
|
||||||
success: true
|
success: true
|
||||||
exit_code: 0
|
exit_code: 0
|
||||||
|
|
@ -283,6 +284,5 @@ analyze.target_version = 3.11
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -1043,7 +1043,7 @@ def mvce(keys, values):
|
||||||
----- stdout -----
|
----- stdout -----
|
||||||
1 C416 [*] unnecessary-comprehension
|
1 C416 [*] unnecessary-comprehension
|
||||||
Found 1 error.
|
Found 1 error.
|
||||||
[*] 1 fixable with the `--fix` option.
|
[*] 1 fixable with the --fix option.
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
");
|
");
|
||||||
|
|
@ -1073,8 +1073,7 @@ def mvce(keys, values):
|
||||||
"code": "C416",
|
"code": "C416",
|
||||||
"name": "unnecessary-comprehension",
|
"name": "unnecessary-comprehension",
|
||||||
"count": 1,
|
"count": 1,
|
||||||
"fixable": false,
|
"fixable": false
|
||||||
"fixable_count": 0
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -1107,8 +1106,7 @@ def mvce(keys, values):
|
||||||
"code": "C416",
|
"code": "C416",
|
||||||
"name": "unnecessary-comprehension",
|
"name": "unnecessary-comprehension",
|
||||||
"count": 1,
|
"count": 1,
|
||||||
"fixable": true,
|
"fixable": true
|
||||||
"fixable_count": 1
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -1116,54 +1114,6 @@ def mvce(keys, values):
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn show_statistics_json_partial_fix() {
|
|
||||||
let mut cmd = RuffCheck::default()
|
|
||||||
.args([
|
|
||||||
"--select",
|
|
||||||
"UP035",
|
|
||||||
"--statistics",
|
|
||||||
"--output-format",
|
|
||||||
"json",
|
|
||||||
])
|
|
||||||
.build();
|
|
||||||
assert_cmd_snapshot!(cmd
|
|
||||||
.pass_stdin("from typing import List, AsyncGenerator"), @r#"
|
|
||||||
success: false
|
|
||||||
exit_code: 1
|
|
||||||
----- stdout -----
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"code": "UP035",
|
|
||||||
"name": "deprecated-import",
|
|
||||||
"count": 2,
|
|
||||||
"fixable": false,
|
|
||||||
"fixable_count": 1
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn show_statistics_partial_fix() {
|
|
||||||
let mut cmd = RuffCheck::default()
|
|
||||||
.args(["--select", "UP035", "--statistics"])
|
|
||||||
.build();
|
|
||||||
assert_cmd_snapshot!(cmd
|
|
||||||
.pass_stdin("from typing import List, AsyncGenerator"), @r"
|
|
||||||
success: false
|
|
||||||
exit_code: 1
|
|
||||||
----- stdout -----
|
|
||||||
2 UP035 [-] deprecated-import
|
|
||||||
Found 2 errors.
|
|
||||||
[*] 1 fixable with the `--fix` option.
|
|
||||||
|
|
||||||
----- stderr -----
|
|
||||||
");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn show_statistics_syntax_errors() {
|
fn show_statistics_syntax_errors() {
|
||||||
let mut cmd = RuffCheck::default()
|
let mut cmd = RuffCheck::default()
|
||||||
|
|
@ -1860,7 +1810,7 @@ fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
|
||||||
--> -:1:1
|
--> -:1:1
|
||||||
|
|
||||||
Found 2 errors.
|
Found 2 errors.
|
||||||
[*] 1 fixable with the `--fix` option.
|
[*] 1 fixable with the --fix option.
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
");
|
");
|
||||||
|
|
@ -1903,7 +1853,7 @@ fn check_shows_unsafe_fixes_with_opt_in() {
|
||||||
--> -:1:1
|
--> -:1:1
|
||||||
|
|
||||||
Found 2 errors.
|
Found 2 errors.
|
||||||
[*] 2 fixable with the `--fix` option.
|
[*] 2 fixable with the --fix option.
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
");
|
");
|
||||||
|
|
|
||||||
|
|
@ -396,6 +396,5 @@ analyze.target_version = 3.7
|
||||||
analyze.string_imports = disabled
|
analyze.string_imports = disabled
|
||||||
analyze.extension = ExtensionMapping({})
|
analyze.extension = ExtensionMapping({})
|
||||||
analyze.include_dependencies = {}
|
analyze.include_dependencies = {}
|
||||||
analyze.type_checking_imports = true
|
|
||||||
|
|
||||||
----- stderr -----
|
----- stderr -----
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,7 @@
|
||||||
//! styling.
|
//! styling.
|
||||||
//!
|
//!
|
||||||
//! The above snippet has been built out of the following structure:
|
//! The above snippet has been built out of the following structure:
|
||||||
use crate::{Id, snippet};
|
use crate::snippet;
|
||||||
use std::cmp::{Reverse, max, min};
|
use std::cmp::{Reverse, max, min};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
|
@ -189,7 +189,6 @@ impl DisplaySet<'_> {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_annotation(
|
fn format_annotation(
|
||||||
&self,
|
&self,
|
||||||
line_offset: usize,
|
line_offset: usize,
|
||||||
|
|
@ -200,13 +199,11 @@ impl DisplaySet<'_> {
|
||||||
) -> fmt::Result {
|
) -> fmt::Result {
|
||||||
let hide_severity = annotation.annotation_type.is_none();
|
let hide_severity = annotation.annotation_type.is_none();
|
||||||
let color = get_annotation_style(&annotation.annotation_type, stylesheet);
|
let color = get_annotation_style(&annotation.annotation_type, stylesheet);
|
||||||
|
|
||||||
let formatted_len = if let Some(id) = &annotation.id {
|
let formatted_len = if let Some(id) = &annotation.id {
|
||||||
let id_len = id.id.len();
|
|
||||||
if hide_severity {
|
if hide_severity {
|
||||||
id_len
|
id.len()
|
||||||
} else {
|
} else {
|
||||||
2 + id_len + annotation_type_len(&annotation.annotation_type)
|
2 + id.len() + annotation_type_len(&annotation.annotation_type)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
annotation_type_len(&annotation.annotation_type)
|
annotation_type_len(&annotation.annotation_type)
|
||||||
|
|
@ -259,20 +256,9 @@ impl DisplaySet<'_> {
|
||||||
let annotation_type = annotation_type_str(&annotation.annotation_type);
|
let annotation_type = annotation_type_str(&annotation.annotation_type);
|
||||||
if let Some(id) = annotation.id {
|
if let Some(id) = annotation.id {
|
||||||
if hide_severity {
|
if hide_severity {
|
||||||
buffer.append(
|
buffer.append(line_offset, &format!("{id} "), *stylesheet.error());
|
||||||
line_offset,
|
|
||||||
&format!("{id} ", id = fmt_with_hyperlink(id.id, id.url, stylesheet)),
|
|
||||||
*stylesheet.error(),
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
buffer.append(
|
buffer.append(line_offset, &format!("{annotation_type}[{id}]"), *color);
|
||||||
line_offset,
|
|
||||||
&format!(
|
|
||||||
"{annotation_type}[{id}]",
|
|
||||||
id = fmt_with_hyperlink(id.id, id.url, stylesheet)
|
|
||||||
),
|
|
||||||
*color,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
buffer.append(line_offset, annotation_type, *color);
|
buffer.append(line_offset, annotation_type, *color);
|
||||||
|
|
@ -721,7 +707,7 @@ impl DisplaySet<'_> {
|
||||||
let style =
|
let style =
|
||||||
get_annotation_style(&annotation.annotation_type, stylesheet);
|
get_annotation_style(&annotation.annotation_type, stylesheet);
|
||||||
let mut formatted_len = if let Some(id) = &annotation.annotation.id {
|
let mut formatted_len = if let Some(id) = &annotation.annotation.id {
|
||||||
2 + id.id.len()
|
2 + id.len()
|
||||||
+ annotation_type_len(&annotation.annotation.annotation_type)
|
+ annotation_type_len(&annotation.annotation.annotation_type)
|
||||||
} else {
|
} else {
|
||||||
annotation_type_len(&annotation.annotation.annotation_type)
|
annotation_type_len(&annotation.annotation.annotation_type)
|
||||||
|
|
@ -738,10 +724,7 @@ impl DisplaySet<'_> {
|
||||||
} else if formatted_len != 0 {
|
} else if formatted_len != 0 {
|
||||||
formatted_len += 2;
|
formatted_len += 2;
|
||||||
let id = match &annotation.annotation.id {
|
let id = match &annotation.annotation.id {
|
||||||
Some(id) => format!(
|
Some(id) => format!("[{id}]"),
|
||||||
"[{id}]",
|
|
||||||
id = fmt_with_hyperlink(&id.id, id.url, stylesheet)
|
|
||||||
),
|
|
||||||
None => String::new(),
|
None => String::new(),
|
||||||
};
|
};
|
||||||
buffer.puts(
|
buffer.puts(
|
||||||
|
|
@ -844,7 +827,7 @@ impl DisplaySet<'_> {
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub(crate) struct Annotation<'a> {
|
pub(crate) struct Annotation<'a> {
|
||||||
pub(crate) annotation_type: DisplayAnnotationType,
|
pub(crate) annotation_type: DisplayAnnotationType,
|
||||||
pub(crate) id: Option<Id<'a>>,
|
pub(crate) id: Option<&'a str>,
|
||||||
pub(crate) label: Vec<DisplayTextFragment<'a>>,
|
pub(crate) label: Vec<DisplayTextFragment<'a>>,
|
||||||
pub(crate) is_fixable: bool,
|
pub(crate) is_fixable: bool,
|
||||||
}
|
}
|
||||||
|
|
@ -1157,7 +1140,7 @@ fn format_message<'m>(
|
||||||
|
|
||||||
fn format_title<'a>(
|
fn format_title<'a>(
|
||||||
level: crate::Level,
|
level: crate::Level,
|
||||||
id: Option<Id<'a>>,
|
id: Option<&'a str>,
|
||||||
label: &'a str,
|
label: &'a str,
|
||||||
is_fixable: bool,
|
is_fixable: bool,
|
||||||
) -> DisplayLine<'a> {
|
) -> DisplayLine<'a> {
|
||||||
|
|
@ -1175,7 +1158,7 @@ fn format_title<'a>(
|
||||||
|
|
||||||
fn format_footer<'a>(
|
fn format_footer<'a>(
|
||||||
level: crate::Level,
|
level: crate::Level,
|
||||||
id: Option<Id<'a>>,
|
id: Option<&'a str>,
|
||||||
label: &'a str,
|
label: &'a str,
|
||||||
) -> Vec<DisplayLine<'a>> {
|
) -> Vec<DisplayLine<'a>> {
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
|
|
@ -1723,7 +1706,6 @@ fn format_body<'m>(
|
||||||
annotation: Annotation {
|
annotation: Annotation {
|
||||||
annotation_type,
|
annotation_type,
|
||||||
id: None,
|
id: None,
|
||||||
|
|
||||||
label: format_label(annotation.label, None),
|
label: format_label(annotation.label, None),
|
||||||
is_fixable: false,
|
is_fixable: false,
|
||||||
},
|
},
|
||||||
|
|
@ -1905,40 +1887,3 @@ fn char_width(c: char) -> Option<usize> {
|
||||||
unicode_width::UnicodeWidthChar::width(c)
|
unicode_width::UnicodeWidthChar::width(c)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn fmt_with_hyperlink<'a, T>(
|
|
||||||
content: T,
|
|
||||||
url: Option<&'a str>,
|
|
||||||
stylesheet: &Stylesheet,
|
|
||||||
) -> impl std::fmt::Display + 'a
|
|
||||||
where
|
|
||||||
T: std::fmt::Display + 'a,
|
|
||||||
{
|
|
||||||
struct FmtHyperlink<'a, T> {
|
|
||||||
content: T,
|
|
||||||
url: Option<&'a str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> std::fmt::Display for FmtHyperlink<'_, T>
|
|
||||||
where
|
|
||||||
T: std::fmt::Display,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
if let Some(url) = self.url {
|
|
||||||
write!(f, "\x1B]8;;{url}\x1B\\")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.content.fmt(f)?;
|
|
||||||
|
|
||||||
if self.url.is_some() {
|
|
||||||
f.write_str("\x1B]8;;\x1B\\")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = if stylesheet.hyperlink { url } else { None };
|
|
||||||
|
|
||||||
FmtHyperlink { content, url }
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,6 @@ impl Renderer {
|
||||||
}
|
}
|
||||||
.effects(Effects::BOLD),
|
.effects(Effects::BOLD),
|
||||||
none: Style::new(),
|
none: Style::new(),
|
||||||
hyperlink: true,
|
|
||||||
},
|
},
|
||||||
..Self::plain()
|
..Self::plain()
|
||||||
}
|
}
|
||||||
|
|
@ -155,11 +154,6 @@ impl Renderer {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const fn hyperlink(mut self, hyperlink: bool) -> Self {
|
|
||||||
self.stylesheet.hyperlink = hyperlink;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set the string used for when a long line is cut.
|
/// Set the string used for when a long line is cut.
|
||||||
///
|
///
|
||||||
/// The default is `...` (three `U+002E` characters).
|
/// The default is `...` (three `U+002E` characters).
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,6 @@ pub(crate) struct Stylesheet {
|
||||||
pub(crate) line_no: Style,
|
pub(crate) line_no: Style,
|
||||||
pub(crate) emphasis: Style,
|
pub(crate) emphasis: Style,
|
||||||
pub(crate) none: Style,
|
pub(crate) none: Style,
|
||||||
pub(crate) hyperlink: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Stylesheet {
|
impl Default for Stylesheet {
|
||||||
|
|
@ -30,7 +29,6 @@ impl Stylesheet {
|
||||||
line_no: Style::new(),
|
line_no: Style::new(),
|
||||||
emphasis: Style::new(),
|
emphasis: Style::new(),
|
||||||
none: Style::new(),
|
none: Style::new(),
|
||||||
hyperlink: false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -12,19 +12,13 @@
|
||||||
|
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Default, PartialEq)]
|
|
||||||
pub(crate) struct Id<'a> {
|
|
||||||
pub(crate) id: &'a str,
|
|
||||||
pub(crate) url: Option<&'a str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Primary structure provided for formatting
|
/// Primary structure provided for formatting
|
||||||
///
|
///
|
||||||
/// See [`Level::title`] to create a [`Message`]
|
/// See [`Level::title`] to create a [`Message`]
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Message<'a> {
|
pub struct Message<'a> {
|
||||||
pub(crate) level: Level,
|
pub(crate) level: Level,
|
||||||
pub(crate) id: Option<Id<'a>>,
|
pub(crate) id: Option<&'a str>,
|
||||||
pub(crate) title: &'a str,
|
pub(crate) title: &'a str,
|
||||||
pub(crate) snippets: Vec<Snippet<'a>>,
|
pub(crate) snippets: Vec<Snippet<'a>>,
|
||||||
pub(crate) footer: Vec<Message<'a>>,
|
pub(crate) footer: Vec<Message<'a>>,
|
||||||
|
|
@ -34,12 +28,7 @@ pub struct Message<'a> {
|
||||||
|
|
||||||
impl<'a> Message<'a> {
|
impl<'a> Message<'a> {
|
||||||
pub fn id(mut self, id: &'a str) -> Self {
|
pub fn id(mut self, id: &'a str) -> Self {
|
||||||
self.id = Some(Id { id, url: None });
|
self.id = Some(id);
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn id_with_url(mut self, id: &'a str, url: Option<&'a str>) -> Self {
|
|
||||||
self.id = Some(Id { id, url });
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -59,6 +59,8 @@ divan = { workspace = true, optional = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
|
codspeed-criterion-compat = { workspace = true, default-features = false, optional = true }
|
||||||
criterion = { workspace = true, default-features = false, optional = true }
|
criterion = { workspace = true, default-features = false, optional = true }
|
||||||
|
rayon = { workspace = true }
|
||||||
|
rustc-hash = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
|
|
@ -86,7 +88,3 @@ mimalloc = { workspace = true }
|
||||||
|
|
||||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dev-dependencies]
|
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dev-dependencies]
|
||||||
tikv-jemallocator = { workspace = true }
|
tikv-jemallocator = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
rustc-hash = { workspace = true }
|
|
||||||
rayon = { workspace = true }
|
|
||||||
|
|
|
||||||
|
|
@ -6,8 +6,7 @@ use criterion::{
|
||||||
use ruff_benchmark::{
|
use ruff_benchmark::{
|
||||||
LARGE_DATASET, NUMPY_CTYPESLIB, NUMPY_GLOBALS, PYDANTIC_TYPES, TestCase, UNICODE_PYPINYIN,
|
LARGE_DATASET, NUMPY_CTYPESLIB, NUMPY_GLOBALS, PYDANTIC_TYPES, TestCase, UNICODE_PYPINYIN,
|
||||||
};
|
};
|
||||||
use ruff_python_ast::token::TokenKind;
|
use ruff_python_parser::{Mode, TokenKind, lexer};
|
||||||
use ruff_python_parser::{Mode, lexer};
|
|
||||||
|
|
||||||
#[cfg(target_os = "windows")]
|
#[cfg(target_os = "windows")]
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
|
|
|
||||||
|
|
@ -667,7 +667,7 @@ fn attrs(criterion: &mut Criterion) {
|
||||||
max_dep_date: "2025-06-17",
|
max_dep_date: "2025-06-17",
|
||||||
python_version: PythonVersion::PY313,
|
python_version: PythonVersion::PY313,
|
||||||
},
|
},
|
||||||
120,
|
110,
|
||||||
);
|
);
|
||||||
|
|
||||||
bench_project(&benchmark, criterion);
|
bench_project(&benchmark, criterion);
|
||||||
|
|
|
||||||
|
|
@ -71,13 +71,16 @@ impl Display for Benchmark<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) {
|
fn check_project(db: &ProjectDatabase, max_diagnostics: usize) {
|
||||||
let result = db.check();
|
let result = db.check();
|
||||||
let diagnostics = result.len();
|
let diagnostics = result.len();
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
diagnostics > 1 && diagnostics <= max_diagnostics,
|
diagnostics > 1 && diagnostics <= max_diagnostics,
|
||||||
"Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}",
|
"Expected between {} and {} diagnostics but got {}",
|
||||||
|
1,
|
||||||
|
max_diagnostics,
|
||||||
|
diagnostics
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -120,7 +123,7 @@ static COLOUR_SCIENCE: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-06-17",
|
max_dep_date: "2025-06-17",
|
||||||
python_version: PythonVersion::PY310,
|
python_version: PythonVersion::PY310,
|
||||||
},
|
},
|
||||||
1070,
|
600,
|
||||||
);
|
);
|
||||||
|
|
||||||
static FREQTRADE: Benchmark = Benchmark::new(
|
static FREQTRADE: Benchmark = Benchmark::new(
|
||||||
|
|
@ -143,7 +146,7 @@ static FREQTRADE: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-06-17",
|
max_dep_date: "2025-06-17",
|
||||||
python_version: PythonVersion::PY312,
|
python_version: PythonVersion::PY312,
|
||||||
},
|
},
|
||||||
600,
|
525,
|
||||||
);
|
);
|
||||||
|
|
||||||
static PANDAS: Benchmark = Benchmark::new(
|
static PANDAS: Benchmark = Benchmark::new(
|
||||||
|
|
@ -163,7 +166,7 @@ static PANDAS: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-06-17",
|
max_dep_date: "2025-06-17",
|
||||||
python_version: PythonVersion::PY312,
|
python_version: PythonVersion::PY312,
|
||||||
},
|
},
|
||||||
4000,
|
3000,
|
||||||
);
|
);
|
||||||
|
|
||||||
static PYDANTIC: Benchmark = Benchmark::new(
|
static PYDANTIC: Benchmark = Benchmark::new(
|
||||||
|
|
@ -181,7 +184,7 @@ static PYDANTIC: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-06-17",
|
max_dep_date: "2025-06-17",
|
||||||
python_version: PythonVersion::PY39,
|
python_version: PythonVersion::PY39,
|
||||||
},
|
},
|
||||||
7000,
|
1000,
|
||||||
);
|
);
|
||||||
|
|
||||||
static SYMPY: Benchmark = Benchmark::new(
|
static SYMPY: Benchmark = Benchmark::new(
|
||||||
|
|
@ -194,7 +197,7 @@ static SYMPY: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-06-17",
|
max_dep_date: "2025-06-17",
|
||||||
python_version: PythonVersion::PY312,
|
python_version: PythonVersion::PY312,
|
||||||
},
|
},
|
||||||
13100,
|
13000,
|
||||||
);
|
);
|
||||||
|
|
||||||
static TANJUN: Benchmark = Benchmark::new(
|
static TANJUN: Benchmark = Benchmark::new(
|
||||||
|
|
@ -223,7 +226,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-08-09",
|
max_dep_date: "2025-08-09",
|
||||||
python_version: PythonVersion::PY311,
|
python_version: PythonVersion::PY311,
|
||||||
},
|
},
|
||||||
1100,
|
800,
|
||||||
);
|
);
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
|
|
@ -231,11 +234,11 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||||
bencher
|
bencher
|
||||||
.with_inputs(|| benchmark.setup_iteration())
|
.with_inputs(|| benchmark.setup_iteration())
|
||||||
.bench_local_refs(|db| {
|
.bench_local_refs(|db| {
|
||||||
check_project(db, benchmark.project.name, benchmark.max_diagnostics);
|
check_project(db, benchmark.max_diagnostics);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench(args=[&ALTAIR, &FREQTRADE, &TANJUN], sample_size=2, sample_count=3)]
|
#[bench(args=[&ALTAIR, &FREQTRADE, &PYDANTIC, &TANJUN], sample_size=2, sample_count=3)]
|
||||||
fn small(bencher: Bencher, benchmark: &Benchmark) {
|
fn small(bencher: Bencher, benchmark: &Benchmark) {
|
||||||
run_single_threaded(bencher, benchmark);
|
run_single_threaded(bencher, benchmark);
|
||||||
}
|
}
|
||||||
|
|
@ -245,12 +248,12 @@ fn medium(bencher: Bencher, benchmark: &Benchmark) {
|
||||||
run_single_threaded(bencher, benchmark);
|
run_single_threaded(bencher, benchmark);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench(args=[&SYMPY, &PYDANTIC], sample_size=1, sample_count=2)]
|
#[bench(args=[&SYMPY], sample_size=1, sample_count=2)]
|
||||||
fn large(bencher: Bencher, benchmark: &Benchmark) {
|
fn large(bencher: Bencher, benchmark: &Benchmark) {
|
||||||
run_single_threaded(bencher, benchmark);
|
run_single_threaded(bencher, benchmark);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench(args=[&ALTAIR], sample_size=3, sample_count=8)]
|
#[bench(args=[&PYDANTIC], sample_size=3, sample_count=8)]
|
||||||
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||||
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
let thread_pool = ThreadPoolBuilder::new().build().unwrap();
|
||||||
|
|
||||||
|
|
@ -258,7 +261,7 @@ fn multithreaded(bencher: Bencher, benchmark: &Benchmark) {
|
||||||
.with_inputs(|| benchmark.setup_iteration())
|
.with_inputs(|| benchmark.setup_iteration())
|
||||||
.bench_local_values(|db| {
|
.bench_local_values(|db| {
|
||||||
thread_pool.install(|| {
|
thread_pool.install(|| {
|
||||||
check_project(&db, benchmark.project.name, benchmark.max_diagnostics);
|
check_project(&db, benchmark.max_diagnostics);
|
||||||
db
|
db
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
@ -282,7 +285,7 @@ fn main() {
|
||||||
// branch when looking up the ingredient index.
|
// branch when looking up the ingredient index.
|
||||||
{
|
{
|
||||||
let db = TANJUN.setup_iteration();
|
let db = TANJUN.setup_iteration();
|
||||||
check_project(&db, TANJUN.project.name, TANJUN.max_diagnostics);
|
check_project(&db, TANJUN.max_diagnostics);
|
||||||
}
|
}
|
||||||
|
|
||||||
divan::main();
|
divan::main();
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,6 @@ schemars = { workspace = true, optional = true }
|
||||||
serde = { workspace = true, optional = true }
|
serde = { workspace = true, optional = true }
|
||||||
serde_json = { workspace = true, optional = true }
|
serde_json = { workspace = true, optional = true }
|
||||||
similar = { workspace = true }
|
similar = { workspace = true }
|
||||||
supports-hyperlinks = { workspace = true }
|
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true, optional = true }
|
tracing-subscriber = { workspace = true, optional = true }
|
||||||
|
|
|
||||||
|
|
@ -64,8 +64,6 @@ impl Diagnostic {
|
||||||
id,
|
id,
|
||||||
severity,
|
severity,
|
||||||
message: message.into_diagnostic_message(),
|
message: message.into_diagnostic_message(),
|
||||||
custom_concise_message: None,
|
|
||||||
documentation_url: None,
|
|
||||||
annotations: vec![],
|
annotations: vec![],
|
||||||
subs: vec![],
|
subs: vec![],
|
||||||
fix: None,
|
fix: None,
|
||||||
|
|
@ -166,8 +164,28 @@ impl Diagnostic {
|
||||||
/// Returns the primary message for this diagnostic.
|
/// Returns the primary message for this diagnostic.
|
||||||
///
|
///
|
||||||
/// A diagnostic always has a message, but it may be empty.
|
/// A diagnostic always has a message, but it may be empty.
|
||||||
|
///
|
||||||
|
/// NOTE: At present, this routine will return the first primary
|
||||||
|
/// annotation's message as the primary message when the main diagnostic
|
||||||
|
/// message is empty. This is meant to facilitate an incremental migration
|
||||||
|
/// in ty over to the new diagnostic data model. (The old data model
|
||||||
|
/// didn't distinguish between messages on the entire diagnostic and
|
||||||
|
/// messages attached to a particular span.)
|
||||||
pub fn primary_message(&self) -> &str {
|
pub fn primary_message(&self) -> &str {
|
||||||
self.inner.message.as_str()
|
if !self.inner.message.as_str().is_empty() {
|
||||||
|
return self.inner.message.as_str();
|
||||||
|
}
|
||||||
|
// FIXME: As a special case, while we're migrating ty
|
||||||
|
// to the new diagnostic data model, we'll look for a primary
|
||||||
|
// message from the primary annotation. This is because most
|
||||||
|
// ty diagnostics are created with an empty diagnostic
|
||||||
|
// message and instead attach the message to the annotation.
|
||||||
|
// Fixing this will require touching basically every diagnostic
|
||||||
|
// in ty, so we do it this way for now to match the old
|
||||||
|
// semantics. ---AG
|
||||||
|
self.primary_annotation()
|
||||||
|
.and_then(|ann| ann.get_message())
|
||||||
|
.unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Introspects this diagnostic and returns what kind of "primary" message
|
/// Introspects this diagnostic and returns what kind of "primary" message
|
||||||
|
|
@ -179,35 +197,35 @@ impl Diagnostic {
|
||||||
/// contains *essential* information or context for understanding the
|
/// contains *essential* information or context for understanding the
|
||||||
/// diagnostic.
|
/// diagnostic.
|
||||||
///
|
///
|
||||||
|
/// The reason why we don't just always return both the main diagnostic
|
||||||
|
/// message and the primary annotation message is because this was written
|
||||||
|
/// in the midst of an incremental migration of ty over to the new
|
||||||
|
/// diagnostic data model. At time of writing, diagnostics were still
|
||||||
|
/// constructed in the old model where the main diagnostic message and the
|
||||||
|
/// primary annotation message were not distinguished from each other. So
|
||||||
|
/// for now, we carefully return what kind of messages this diagnostic
|
||||||
|
/// contains. In effect, if this diagnostic has a non-empty main message
|
||||||
|
/// *and* a non-empty primary annotation message, then the diagnostic is
|
||||||
|
/// 100% using the new diagnostic data model and we can format things
|
||||||
|
/// appropriately.
|
||||||
|
///
|
||||||
/// The type returned implements the `std::fmt::Display` trait. In most
|
/// The type returned implements the `std::fmt::Display` trait. In most
|
||||||
/// cases, just converting it to a string (or printing it) will do what
|
/// cases, just converting it to a string (or printing it) will do what
|
||||||
/// you want.
|
/// you want.
|
||||||
pub fn concise_message(&self) -> ConciseMessage<'_> {
|
pub fn concise_message(&self) -> ConciseMessage<'_> {
|
||||||
if let Some(custom_message) = &self.inner.custom_concise_message {
|
|
||||||
return ConciseMessage::Custom(custom_message.as_str());
|
|
||||||
}
|
|
||||||
|
|
||||||
let main = self.inner.message.as_str();
|
let main = self.inner.message.as_str();
|
||||||
let annotation = self
|
let annotation = self
|
||||||
.primary_annotation()
|
.primary_annotation()
|
||||||
.and_then(|ann| ann.get_message())
|
.and_then(|ann| ann.get_message())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
if annotation.is_empty() {
|
match (main.is_empty(), annotation.is_empty()) {
|
||||||
ConciseMessage::MainDiagnostic(main)
|
(false, true) => ConciseMessage::MainDiagnostic(main),
|
||||||
} else {
|
(true, false) => ConciseMessage::PrimaryAnnotation(annotation),
|
||||||
ConciseMessage::Both { main, annotation }
|
(false, false) => ConciseMessage::Both { main, annotation },
|
||||||
|
(true, true) => ConciseMessage::Empty,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set a custom message for the concise formatting of this diagnostic.
|
|
||||||
///
|
|
||||||
/// This overrides the default behavior of generating a concise message
|
|
||||||
/// from the main diagnostic message and the primary annotation.
|
|
||||||
pub fn set_concise_message(&mut self, message: impl IntoDiagnosticMessage) {
|
|
||||||
Arc::make_mut(&mut self.inner).custom_concise_message =
|
|
||||||
Some(message.into_diagnostic_message());
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the severity of this diagnostic.
|
/// Returns the severity of this diagnostic.
|
||||||
///
|
///
|
||||||
/// Note that this may be different than the severity of sub-diagnostics.
|
/// Note that this may be different than the severity of sub-diagnostics.
|
||||||
|
|
@ -321,13 +339,6 @@ impl Diagnostic {
|
||||||
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
Arc::make_mut(&mut self.inner).fix = Some(fix);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If `fix` is `Some`, set the fix for this diagnostic.
|
|
||||||
pub fn set_optional_fix(&mut self, fix: Option<Fix>) {
|
|
||||||
if let Some(fix) = fix {
|
|
||||||
self.set_fix(fix);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove the fix for this diagnostic.
|
/// Remove the fix for this diagnostic.
|
||||||
pub fn remove_fix(&mut self) {
|
pub fn remove_fix(&mut self) {
|
||||||
Arc::make_mut(&mut self.inner).fix = None;
|
Arc::make_mut(&mut self.inner).fix = None;
|
||||||
|
|
@ -345,14 +356,6 @@ impl Diagnostic {
|
||||||
.is_some_and(|fix| fix.applies(config.fix_applicability))
|
.is_some_and(|fix| fix.applies(config.fix_applicability))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn documentation_url(&self) -> Option<&str> {
|
|
||||||
self.inner.documentation_url.as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn set_documentation_url(&mut self, url: Option<String>) {
|
|
||||||
Arc::make_mut(&mut self.inner).documentation_url = url;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
/// Returns the offset of the parent statement for this diagnostic if it exists.
|
||||||
///
|
///
|
||||||
/// This is primarily used for checking noqa/secondary code suppressions.
|
/// This is primarily used for checking noqa/secondary code suppressions.
|
||||||
|
|
@ -426,6 +429,28 @@ impl Diagnostic {
|
||||||
.map(|sub| sub.inner.message.as_str())
|
.map(|sub| sub.inner.message.as_str())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the URL for the rule documentation, if it exists.
|
||||||
|
pub fn to_ruff_url(&self) -> Option<String> {
|
||||||
|
match self.id() {
|
||||||
|
DiagnosticId::Panic
|
||||||
|
| DiagnosticId::Io
|
||||||
|
| DiagnosticId::InvalidSyntax
|
||||||
|
| DiagnosticId::RevealedType
|
||||||
|
| DiagnosticId::UnknownRule
|
||||||
|
| DiagnosticId::InvalidGlob
|
||||||
|
| DiagnosticId::EmptyInclude
|
||||||
|
| DiagnosticId::UnnecessaryOverridesSection
|
||||||
|
| DiagnosticId::UselessOverridesSection
|
||||||
|
| DiagnosticId::DeprecatedSetting
|
||||||
|
| DiagnosticId::Unformatted
|
||||||
|
| DiagnosticId::InvalidCliOption
|
||||||
|
| DiagnosticId::InternalError => None,
|
||||||
|
DiagnosticId::Lint(lint_name) => {
|
||||||
|
Some(format!("{}/rules/{lint_name}", env!("CARGO_PKG_HOMEPAGE")))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the filename for the message.
|
/// Returns the filename for the message.
|
||||||
///
|
///
|
||||||
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
/// Panics if the diagnostic has no primary span, or if its file is not a `SourceFile`.
|
||||||
|
|
@ -505,10 +530,8 @@ impl Diagnostic {
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Hash, get_size2::GetSize)]
|
||||||
struct DiagnosticInner {
|
struct DiagnosticInner {
|
||||||
id: DiagnosticId,
|
id: DiagnosticId,
|
||||||
documentation_url: Option<String>,
|
|
||||||
severity: Severity,
|
severity: Severity,
|
||||||
message: DiagnosticMessage,
|
message: DiagnosticMessage,
|
||||||
custom_concise_message: Option<DiagnosticMessage>,
|
|
||||||
annotations: Vec<Annotation>,
|
annotations: Vec<Annotation>,
|
||||||
subs: Vec<SubDiagnostic>,
|
subs: Vec<SubDiagnostic>,
|
||||||
fix: Option<Fix>,
|
fix: Option<Fix>,
|
||||||
|
|
@ -660,6 +683,18 @@ impl SubDiagnostic {
|
||||||
/// contains *essential* information or context for understanding the
|
/// contains *essential* information or context for understanding the
|
||||||
/// diagnostic.
|
/// diagnostic.
|
||||||
///
|
///
|
||||||
|
/// The reason why we don't just always return both the main diagnostic
|
||||||
|
/// message and the primary annotation message is because this was written
|
||||||
|
/// in the midst of an incremental migration of ty over to the new
|
||||||
|
/// diagnostic data model. At time of writing, diagnostics were still
|
||||||
|
/// constructed in the old model where the main diagnostic message and the
|
||||||
|
/// primary annotation message were not distinguished from each other. So
|
||||||
|
/// for now, we carefully return what kind of messages this diagnostic
|
||||||
|
/// contains. In effect, if this diagnostic has a non-empty main message
|
||||||
|
/// *and* a non-empty primary annotation message, then the diagnostic is
|
||||||
|
/// 100% using the new diagnostic data model and we can format things
|
||||||
|
/// appropriately.
|
||||||
|
///
|
||||||
/// The type returned implements the `std::fmt::Display` trait. In most
|
/// The type returned implements the `std::fmt::Display` trait. In most
|
||||||
/// cases, just converting it to a string (or printing it) will do what
|
/// cases, just converting it to a string (or printing it) will do what
|
||||||
/// you want.
|
/// you want.
|
||||||
|
|
@ -669,10 +704,11 @@ impl SubDiagnostic {
|
||||||
.primary_annotation()
|
.primary_annotation()
|
||||||
.and_then(|ann| ann.get_message())
|
.and_then(|ann| ann.get_message())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
if annotation.is_empty() {
|
match (main.is_empty(), annotation.is_empty()) {
|
||||||
ConciseMessage::MainDiagnostic(main)
|
(false, true) => ConciseMessage::MainDiagnostic(main),
|
||||||
} else {
|
(true, false) => ConciseMessage::PrimaryAnnotation(annotation),
|
||||||
ConciseMessage::Both { main, annotation }
|
(false, false) => ConciseMessage::Both { main, annotation },
|
||||||
|
(true, true) => ConciseMessage::Empty,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -842,10 +878,6 @@ impl Annotation {
|
||||||
pub fn hide_snippet(&mut self, yes: bool) {
|
pub fn hide_snippet(&mut self, yes: bool) {
|
||||||
self.hide_snippet = yes;
|
self.hide_snippet = yes;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_primary(&self) -> bool {
|
|
||||||
self.is_primary
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Tags that can be associated with an annotation.
|
/// Tags that can be associated with an annotation.
|
||||||
|
|
@ -1466,12 +1498,28 @@ pub enum DiagnosticFormat {
|
||||||
pub enum ConciseMessage<'a> {
|
pub enum ConciseMessage<'a> {
|
||||||
/// A diagnostic contains a non-empty main message and an empty
|
/// A diagnostic contains a non-empty main message and an empty
|
||||||
/// primary annotation message.
|
/// primary annotation message.
|
||||||
|
///
|
||||||
|
/// This strongly suggests that the diagnostic is using the
|
||||||
|
/// "new" data model.
|
||||||
MainDiagnostic(&'a str),
|
MainDiagnostic(&'a str),
|
||||||
|
/// A diagnostic contains an empty main message and a non-empty
|
||||||
|
/// primary annotation message.
|
||||||
|
///
|
||||||
|
/// This strongly suggests that the diagnostic is using the
|
||||||
|
/// "old" data model.
|
||||||
|
PrimaryAnnotation(&'a str),
|
||||||
/// A diagnostic contains a non-empty main message and a non-empty
|
/// A diagnostic contains a non-empty main message and a non-empty
|
||||||
/// primary annotation message.
|
/// primary annotation message.
|
||||||
|
///
|
||||||
|
/// This strongly suggests that the diagnostic is using the
|
||||||
|
/// "new" data model.
|
||||||
Both { main: &'a str, annotation: &'a str },
|
Both { main: &'a str, annotation: &'a str },
|
||||||
/// A custom concise message has been provided.
|
/// A diagnostic contains an empty main message and an empty
|
||||||
Custom(&'a str),
|
/// primary annotation message.
|
||||||
|
///
|
||||||
|
/// This indicates that the diagnostic is probably using the old
|
||||||
|
/// model.
|
||||||
|
Empty,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for ConciseMessage<'_> {
|
impl std::fmt::Display for ConciseMessage<'_> {
|
||||||
|
|
@ -1480,12 +1528,13 @@ impl std::fmt::Display for ConciseMessage<'_> {
|
||||||
ConciseMessage::MainDiagnostic(main) => {
|
ConciseMessage::MainDiagnostic(main) => {
|
||||||
write!(f, "{main}")
|
write!(f, "{main}")
|
||||||
}
|
}
|
||||||
|
ConciseMessage::PrimaryAnnotation(annotation) => {
|
||||||
|
write!(f, "{annotation}")
|
||||||
|
}
|
||||||
ConciseMessage::Both { main, annotation } => {
|
ConciseMessage::Both { main, annotation } => {
|
||||||
write!(f, "{main}: {annotation}")
|
write!(f, "{main}: {annotation}")
|
||||||
}
|
}
|
||||||
ConciseMessage::Custom(message) => {
|
ConciseMessage::Empty => Ok(()),
|
||||||
write!(f, "{message}")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -205,7 +205,6 @@ impl<'a> Resolved<'a> {
|
||||||
struct ResolvedDiagnostic<'a> {
|
struct ResolvedDiagnostic<'a> {
|
||||||
level: AnnotateLevel,
|
level: AnnotateLevel,
|
||||||
id: Option<String>,
|
id: Option<String>,
|
||||||
documentation_url: Option<String>,
|
|
||||||
message: String,
|
message: String,
|
||||||
annotations: Vec<ResolvedAnnotation<'a>>,
|
annotations: Vec<ResolvedAnnotation<'a>>,
|
||||||
is_fixable: bool,
|
is_fixable: bool,
|
||||||
|
|
@ -241,12 +240,12 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||||
// `DisplaySet::format_annotation` for both cases, but this is a small hack to improve
|
// `DisplaySet::format_annotation` for both cases, but this is a small hack to improve
|
||||||
// the formatting of syntax errors for now. This should also be kept consistent with the
|
// the formatting of syntax errors for now. This should also be kept consistent with the
|
||||||
// concise formatting.
|
// concise formatting.
|
||||||
diag.secondary_code().map_or_else(
|
Some(diag.secondary_code().map_or_else(
|
||||||
|| format!("{id}:", id = diag.inner.id),
|
|| format!("{id}:", id = diag.inner.id),
|
||||||
|code| code.to_string(),
|
|code| code.to_string(),
|
||||||
)
|
))
|
||||||
} else {
|
} else {
|
||||||
diag.inner.id.to_string()
|
Some(diag.inner.id.to_string())
|
||||||
};
|
};
|
||||||
|
|
||||||
let level = if config.hide_severity {
|
let level = if config.hide_severity {
|
||||||
|
|
@ -257,8 +256,7 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||||
|
|
||||||
ResolvedDiagnostic {
|
ResolvedDiagnostic {
|
||||||
level,
|
level,
|
||||||
id: Some(id),
|
id,
|
||||||
documentation_url: diag.documentation_url().map(ToString::to_string),
|
|
||||||
message: diag.inner.message.as_str().to_string(),
|
message: diag.inner.message.as_str().to_string(),
|
||||||
annotations,
|
annotations,
|
||||||
is_fixable: config.show_fix_status && diag.has_applicable_fix(config),
|
is_fixable: config.show_fix_status && diag.has_applicable_fix(config),
|
||||||
|
|
@ -289,7 +287,6 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||||
ResolvedDiagnostic {
|
ResolvedDiagnostic {
|
||||||
level: diag.inner.severity.to_annotate(),
|
level: diag.inner.severity.to_annotate(),
|
||||||
id: None,
|
id: None,
|
||||||
documentation_url: None,
|
|
||||||
message: diag.inner.message.as_str().to_string(),
|
message: diag.inner.message.as_str().to_string(),
|
||||||
annotations,
|
annotations,
|
||||||
is_fixable: false,
|
is_fixable: false,
|
||||||
|
|
@ -388,7 +385,6 @@ impl<'a> ResolvedDiagnostic<'a> {
|
||||||
RenderableDiagnostic {
|
RenderableDiagnostic {
|
||||||
level: self.level,
|
level: self.level,
|
||||||
id: self.id.as_deref(),
|
id: self.id.as_deref(),
|
||||||
documentation_url: self.documentation_url.as_deref(),
|
|
||||||
message: &self.message,
|
message: &self.message,
|
||||||
snippets_by_input,
|
snippets_by_input,
|
||||||
is_fixable: self.is_fixable,
|
is_fixable: self.is_fixable,
|
||||||
|
|
@ -489,7 +485,6 @@ struct RenderableDiagnostic<'r> {
|
||||||
/// An ID is always present for top-level diagnostics and always absent for
|
/// An ID is always present for top-level diagnostics and always absent for
|
||||||
/// sub-diagnostics.
|
/// sub-diagnostics.
|
||||||
id: Option<&'r str>,
|
id: Option<&'r str>,
|
||||||
documentation_url: Option<&'r str>,
|
|
||||||
/// The message emitted with the diagnostic, before any snippets are
|
/// The message emitted with the diagnostic, before any snippets are
|
||||||
/// rendered.
|
/// rendered.
|
||||||
message: &'r str,
|
message: &'r str,
|
||||||
|
|
@ -524,7 +519,7 @@ impl RenderableDiagnostic<'_> {
|
||||||
.is_fixable(self.is_fixable)
|
.is_fixable(self.is_fixable)
|
||||||
.lineno_offset(self.header_offset);
|
.lineno_offset(self.header_offset);
|
||||||
if let Some(id) = self.id {
|
if let Some(id) = self.id {
|
||||||
message = message.id_with_url(id, self.documentation_url);
|
message = message.id(id);
|
||||||
}
|
}
|
||||||
message.snippets(snippets)
|
message.snippets(snippets)
|
||||||
}
|
}
|
||||||
|
|
@ -2881,12 +2876,6 @@ watermelon
|
||||||
self.diag.help(message);
|
self.diag.help(message);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the documentation URL for the diagnostic.
|
|
||||||
pub(super) fn documentation_url(mut self, url: impl Into<String>) -> DiagnosticBuilder<'e> {
|
|
||||||
self.diag.set_documentation_url(Some(url.into()));
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A helper builder for tersely populating a `SubDiagnostic`.
|
/// A helper builder for tersely populating a `SubDiagnostic`.
|
||||||
|
|
@ -3001,7 +2990,6 @@ def fibonacci(n):
|
||||||
TextSize::from(10),
|
TextSize::from(10),
|
||||||
))))
|
))))
|
||||||
.noqa_offset(TextSize::from(7))
|
.noqa_offset(TextSize::from(7))
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-import")
|
|
||||||
.build(),
|
.build(),
|
||||||
env.builder(
|
env.builder(
|
||||||
"unused-variable",
|
"unused-variable",
|
||||||
|
|
@ -3016,13 +3004,11 @@ def fibonacci(n):
|
||||||
TextSize::from(99),
|
TextSize::from(99),
|
||||||
)))
|
)))
|
||||||
.noqa_offset(TextSize::from(94))
|
.noqa_offset(TextSize::from(94))
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-variable")
|
|
||||||
.build(),
|
.build(),
|
||||||
env.builder("undefined-name", Severity::Error, "Undefined name `a`")
|
env.builder("undefined-name", Severity::Error, "Undefined name `a`")
|
||||||
.primary("undef.py", "1:3", "1:4", "")
|
.primary("undef.py", "1:3", "1:4", "")
|
||||||
.secondary_code("F821")
|
.secondary_code("F821")
|
||||||
.noqa_offset(TextSize::from(3))
|
.noqa_offset(TextSize::from(3))
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/undefined-name")
|
|
||||||
.build(),
|
.build(),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
@ -3137,7 +3123,6 @@ if call(foo
|
||||||
TextSize::from(19),
|
TextSize::from(19),
|
||||||
))))
|
))))
|
||||||
.noqa_offset(TextSize::from(16))
|
.noqa_offset(TextSize::from(16))
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-import")
|
|
||||||
.build(),
|
.build(),
|
||||||
env.builder(
|
env.builder(
|
||||||
"unused-import",
|
"unused-import",
|
||||||
|
|
@ -3152,7 +3137,6 @@ if call(foo
|
||||||
TextSize::from(40),
|
TextSize::from(40),
|
||||||
))))
|
))))
|
||||||
.noqa_offset(TextSize::from(35))
|
.noqa_offset(TextSize::from(35))
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-import")
|
|
||||||
.build(),
|
.build(),
|
||||||
env.builder(
|
env.builder(
|
||||||
"unused-variable",
|
"unused-variable",
|
||||||
|
|
@ -3167,7 +3151,6 @@ if call(foo
|
||||||
TextSize::from(104),
|
TextSize::from(104),
|
||||||
))))
|
))))
|
||||||
.noqa_offset(TextSize::from(98))
|
.noqa_offset(TextSize::from(98))
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/unused-variable")
|
|
||||||
.build(),
|
.build(),
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::diagnostic::{
|
use crate::diagnostic::{
|
||||||
Diagnostic, DisplayDiagnosticConfig, Severity,
|
Diagnostic, DisplayDiagnosticConfig, Severity,
|
||||||
stylesheet::{DiagnosticStylesheet, fmt_styled, fmt_with_hyperlink},
|
stylesheet::{DiagnosticStylesheet, fmt_styled},
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::FileResolver;
|
use super::FileResolver;
|
||||||
|
|
@ -62,29 +62,18 @@ impl<'a> ConciseRenderer<'a> {
|
||||||
}
|
}
|
||||||
write!(f, "{sep} ")?;
|
write!(f, "{sep} ")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.config.hide_severity {
|
if self.config.hide_severity {
|
||||||
if let Some(code) = diag.secondary_code() {
|
if let Some(code) = diag.secondary_code() {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{code} ",
|
"{code} ",
|
||||||
code = fmt_styled(
|
code = fmt_styled(code, stylesheet.secondary_code)
|
||||||
fmt_with_hyperlink(&code, diag.documentation_url(), &stylesheet),
|
|
||||||
stylesheet.secondary_code
|
|
||||||
)
|
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{id}: ",
|
"{id}: ",
|
||||||
id = fmt_styled(
|
id = fmt_styled(diag.inner.id.as_str(), stylesheet.secondary_code)
|
||||||
fmt_with_hyperlink(
|
|
||||||
&diag.inner.id,
|
|
||||||
diag.documentation_url(),
|
|
||||||
&stylesheet
|
|
||||||
),
|
|
||||||
stylesheet.secondary_code
|
|
||||||
)
|
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
if self.config.show_fix_status {
|
if self.config.show_fix_status {
|
||||||
|
|
@ -104,10 +93,7 @@ impl<'a> ConciseRenderer<'a> {
|
||||||
f,
|
f,
|
||||||
"{severity}[{id}] ",
|
"{severity}[{id}] ",
|
||||||
severity = fmt_styled(severity, severity_style),
|
severity = fmt_styled(severity, severity_style),
|
||||||
id = fmt_styled(
|
id = fmt_styled(diag.id(), stylesheet.emphasis)
|
||||||
fmt_with_hyperlink(&diag.id(), diag.documentation_url(), &stylesheet),
|
|
||||||
stylesheet.emphasis
|
|
||||||
)
|
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -49,8 +49,7 @@ impl<'a> FullRenderer<'a> {
|
||||||
.help(stylesheet.help)
|
.help(stylesheet.help)
|
||||||
.line_no(stylesheet.line_no)
|
.line_no(stylesheet.line_no)
|
||||||
.emphasis(stylesheet.emphasis)
|
.emphasis(stylesheet.emphasis)
|
||||||
.none(stylesheet.none)
|
.none(stylesheet.none);
|
||||||
.hyperlink(stylesheet.hyperlink);
|
|
||||||
|
|
||||||
for diag in diagnostics {
|
for diag in diagnostics {
|
||||||
let resolved = Resolved::new(self.resolver, diag, self.config);
|
let resolved = Resolved::new(self.resolver, diag, self.config);
|
||||||
|
|
@ -113,16 +112,16 @@ impl std::fmt::Display for Diff<'_> {
|
||||||
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
|
// `None`, indicating a regular script file, all the lines will be in one "cell" under the
|
||||||
// `None` key.
|
// `None` key.
|
||||||
let cells = if let Some(notebook_index) = &self.notebook_index {
|
let cells = if let Some(notebook_index) = &self.notebook_index {
|
||||||
let mut last_cell_index = OneIndexed::MIN;
|
let mut last_cell = OneIndexed::MIN;
|
||||||
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
|
let mut cells: Vec<(Option<OneIndexed>, TextSize)> = Vec::new();
|
||||||
for cell in notebook_index.iter() {
|
for (row, cell) in notebook_index.iter() {
|
||||||
if cell.cell_index() != last_cell_index {
|
if cell != last_cell {
|
||||||
let offset = source_code.line_start(cell.start_row());
|
let offset = source_code.line_start(row);
|
||||||
cells.push((Some(last_cell_index), offset));
|
cells.push((Some(last_cell), offset));
|
||||||
last_cell_index = cell.cell_index();
|
last_cell = cell;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cells.push((Some(last_cell_index), source_text.text_len()));
|
cells.push((Some(last_cell), source_text.text_len()));
|
||||||
cells
|
cells
|
||||||
} else {
|
} else {
|
||||||
vec![(None, source_text.text_len())]
|
vec![(None, source_text.text_len())]
|
||||||
|
|
@ -704,7 +703,52 @@ print()
|
||||||
env.show_fix_status(true);
|
env.show_fix_status(true);
|
||||||
env.fix_applicability(Applicability::DisplayOnly);
|
env.fix_applicability(Applicability::DisplayOnly);
|
||||||
|
|
||||||
insta::assert_snapshot!(env.render_diagnostics(&diagnostics));
|
insta::assert_snapshot!(env.render_diagnostics(&diagnostics), @r"
|
||||||
|
error[unused-import][*]: `os` imported but unused
|
||||||
|
--> notebook.ipynb:cell 1:2:8
|
||||||
|
|
|
||||||
|
1 | # cell 1
|
||||||
|
2 | import os
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
help: Remove unused import: `os`
|
||||||
|
::: cell 1
|
||||||
|
1 | # cell 1
|
||||||
|
- import os
|
||||||
|
|
||||||
|
error[unused-import][*]: `math` imported but unused
|
||||||
|
--> notebook.ipynb:cell 2:2:8
|
||||||
|
|
|
||||||
|
1 | # cell 2
|
||||||
|
2 | import math
|
||||||
|
| ^^^^
|
||||||
|
3 |
|
||||||
|
4 | print('hello world')
|
||||||
|
|
|
||||||
|
help: Remove unused import: `math`
|
||||||
|
::: cell 2
|
||||||
|
1 | # cell 2
|
||||||
|
- import math
|
||||||
|
2 |
|
||||||
|
3 | print('hello world')
|
||||||
|
|
||||||
|
error[unused-variable][*]: Local variable `x` is assigned to but never used
|
||||||
|
--> notebook.ipynb:cell 3:4:5
|
||||||
|
|
|
||||||
|
2 | def foo():
|
||||||
|
3 | print()
|
||||||
|
4 | x = 1
|
||||||
|
| ^
|
||||||
|
|
|
||||||
|
help: Remove assignment to unused variable `x`
|
||||||
|
::: cell 3
|
||||||
|
1 | # cell 3
|
||||||
|
2 | def foo():
|
||||||
|
3 | print()
|
||||||
|
- x = 1
|
||||||
|
4 |
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -724,7 +768,31 @@ print()
|
||||||
}
|
}
|
||||||
*fix = Fix::unsafe_edits(edits.remove(0), edits);
|
*fix = Fix::unsafe_edits(edits.remove(0), edits);
|
||||||
|
|
||||||
insta::assert_snapshot!(env.render(&diagnostic));
|
insta::assert_snapshot!(env.render(&diagnostic), @r"
|
||||||
|
error[unused-import][*]: `os` imported but unused
|
||||||
|
--> notebook.ipynb:cell 1:2:8
|
||||||
|
|
|
||||||
|
1 | # cell 1
|
||||||
|
2 | import os
|
||||||
|
| ^^
|
||||||
|
|
|
||||||
|
help: Remove unused import: `os`
|
||||||
|
::: cell 1
|
||||||
|
1 | # cell 1
|
||||||
|
- import os
|
||||||
|
::: cell 2
|
||||||
|
1 | # cell 2
|
||||||
|
- import math
|
||||||
|
2 |
|
||||||
|
3 | print('hello world')
|
||||||
|
::: cell 3
|
||||||
|
1 | # cell 3
|
||||||
|
2 | def foo():
|
||||||
|
3 | print()
|
||||||
|
- x = 1
|
||||||
|
4 |
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Carriage return (`\r`) is a valid line-ending in Python, so we should normalize this to a
|
/// Carriage return (`\r`) is a valid line-ending in Python, so we should normalize this to a
|
||||||
|
|
|
||||||
|
|
@ -100,7 +100,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||||
if config.preview {
|
if config.preview {
|
||||||
JsonDiagnostic {
|
JsonDiagnostic {
|
||||||
code: diagnostic.secondary_code_or_id(),
|
code: diagnostic.secondary_code_or_id(),
|
||||||
url: diagnostic.documentation_url(),
|
url: diagnostic.to_ruff_url(),
|
||||||
message: diagnostic.body(),
|
message: diagnostic.body(),
|
||||||
fix,
|
fix,
|
||||||
cell: notebook_cell_index,
|
cell: notebook_cell_index,
|
||||||
|
|
@ -112,7 +112,7 @@ pub(super) fn diagnostic_to_json<'a>(
|
||||||
} else {
|
} else {
|
||||||
JsonDiagnostic {
|
JsonDiagnostic {
|
||||||
code: diagnostic.secondary_code_or_id(),
|
code: diagnostic.secondary_code_or_id(),
|
||||||
url: diagnostic.documentation_url(),
|
url: diagnostic.to_ruff_url(),
|
||||||
message: diagnostic.body(),
|
message: diagnostic.body(),
|
||||||
fix,
|
fix,
|
||||||
cell: notebook_cell_index,
|
cell: notebook_cell_index,
|
||||||
|
|
@ -228,7 +228,7 @@ pub(crate) struct JsonDiagnostic<'a> {
|
||||||
location: Option<JsonLocation>,
|
location: Option<JsonLocation>,
|
||||||
message: &'a str,
|
message: &'a str,
|
||||||
noqa_row: Option<OneIndexed>,
|
noqa_row: Option<OneIndexed>,
|
||||||
url: Option<&'a str>,
|
url: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
|
|
@ -294,10 +294,7 @@ mod tests {
|
||||||
env.format(DiagnosticFormat::Json);
|
env.format(DiagnosticFormat::Json);
|
||||||
env.preview(false);
|
env.preview(false);
|
||||||
|
|
||||||
let diag = env
|
let diag = env.err().build();
|
||||||
.err()
|
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
insta::assert_snapshot!(
|
insta::assert_snapshot!(
|
||||||
env.render(&diag),
|
env.render(&diag),
|
||||||
|
|
@ -331,10 +328,7 @@ mod tests {
|
||||||
env.format(DiagnosticFormat::Json);
|
env.format(DiagnosticFormat::Json);
|
||||||
env.preview(true);
|
env.preview(true);
|
||||||
|
|
||||||
let diag = env
|
let diag = env.err().build();
|
||||||
.err()
|
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
insta::assert_snapshot!(
|
insta::assert_snapshot!(
|
||||||
env.render(&diag),
|
env.render(&diag),
|
||||||
|
|
|
||||||
|
|
@ -82,7 +82,7 @@ fn diagnostic_to_rdjson<'a>(
|
||||||
value: diagnostic
|
value: diagnostic
|
||||||
.secondary_code()
|
.secondary_code()
|
||||||
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
|
.map_or_else(|| diagnostic.name(), |code| code.as_str()),
|
||||||
url: diagnostic.documentation_url(),
|
url: diagnostic.to_ruff_url(),
|
||||||
},
|
},
|
||||||
suggestions: rdjson_suggestions(
|
suggestions: rdjson_suggestions(
|
||||||
edits,
|
edits,
|
||||||
|
|
@ -182,7 +182,7 @@ impl RdjsonRange {
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
struct RdjsonCode<'a> {
|
struct RdjsonCode<'a> {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
url: Option<&'a str>,
|
url: Option<String>,
|
||||||
value: &'a str,
|
value: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -217,10 +217,7 @@ mod tests {
|
||||||
env.format(DiagnosticFormat::Rdjson);
|
env.format(DiagnosticFormat::Rdjson);
|
||||||
env.preview(false);
|
env.preview(false);
|
||||||
|
|
||||||
let diag = env
|
let diag = env.err().build();
|
||||||
.err()
|
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
insta::assert_snapshot!(env.render(&diag));
|
insta::assert_snapshot!(env.render(&diag));
|
||||||
}
|
}
|
||||||
|
|
@ -231,10 +228,7 @@ mod tests {
|
||||||
env.format(DiagnosticFormat::Rdjson);
|
env.format(DiagnosticFormat::Rdjson);
|
||||||
env.preview(true);
|
env.preview(true);
|
||||||
|
|
||||||
let diag = env
|
let diag = env.err().build();
|
||||||
.err()
|
|
||||||
.documentation_url("https://docs.astral.sh/ruff/rules/test-diagnostic")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
insta::assert_snapshot!(env.render(&diag));
|
insta::assert_snapshot!(env.render(&diag));
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
||||||
---
|
|
||||||
source: crates/ruff_db/src/diagnostic/render/full.rs
|
|
||||||
expression: env.render_diagnostics(&diagnostics)
|
|
||||||
---
|
|
||||||
error[unused-import][*]: `os` imported but unused
|
|
||||||
--> notebook.ipynb:cell 1:2:8
|
|
||||||
|
|
|
||||||
1 | # cell 1
|
|
||||||
2 | import os
|
|
||||||
| ^^
|
|
||||||
|
|
|
||||||
help: Remove unused import: `os`
|
|
||||||
::: cell 1
|
|
||||||
1 | # cell 1
|
|
||||||
- import os
|
|
||||||
|
|
||||||
error[unused-import][*]: `math` imported but unused
|
|
||||||
--> notebook.ipynb:cell 2:2:8
|
|
||||||
|
|
|
||||||
1 | # cell 2
|
|
||||||
2 | import math
|
|
||||||
| ^^^^
|
|
||||||
3 |
|
|
||||||
4 | print('hello world')
|
|
||||||
|
|
|
||||||
help: Remove unused import: `math`
|
|
||||||
::: cell 2
|
|
||||||
1 | # cell 2
|
|
||||||
- import math
|
|
||||||
2 |
|
|
||||||
3 | print('hello world')
|
|
||||||
|
|
||||||
error[unused-variable][*]: Local variable `x` is assigned to but never used
|
|
||||||
--> notebook.ipynb:cell 3:4:5
|
|
||||||
|
|
|
||||||
2 | def foo():
|
|
||||||
3 | print()
|
|
||||||
4 | x = 1
|
|
||||||
| ^
|
|
||||||
|
|
|
||||||
help: Remove assignment to unused variable `x`
|
|
||||||
::: cell 3
|
|
||||||
1 | # cell 3
|
|
||||||
2 | def foo():
|
|
||||||
3 | print()
|
|
||||||
- x = 1
|
|
||||||
4 |
|
|
||||||
note: This is an unsafe fix and may change runtime behavior
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
---
|
|
||||||
source: crates/ruff_db/src/diagnostic/render/full.rs
|
|
||||||
expression: env.render(&diagnostic)
|
|
||||||
---
|
|
||||||
error[unused-import][*]: `os` imported but unused
|
|
||||||
--> notebook.ipynb:cell 1:2:8
|
|
||||||
|
|
|
||||||
1 | # cell 1
|
|
||||||
2 | import os
|
|
||||||
| ^^
|
|
||||||
|
|
|
||||||
help: Remove unused import: `os`
|
|
||||||
::: cell 1
|
|
||||||
1 | # cell 1
|
|
||||||
- import os
|
|
||||||
::: cell 2
|
|
||||||
1 | # cell 2
|
|
||||||
- import math
|
|
||||||
2 |
|
|
||||||
3 | print('hello world')
|
|
||||||
::: cell 3
|
|
||||||
1 | # cell 3
|
|
||||||
2 | def foo():
|
|
||||||
3 | print()
|
|
||||||
- x = 1
|
|
||||||
4 |
|
|
||||||
note: This is an unsafe fix and may change runtime behavior
|
|
||||||
|
|
@ -31,43 +31,6 @@ where
|
||||||
FmtStyled { content, style }
|
FmtStyled { content, style }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn fmt_with_hyperlink<'a, T>(
|
|
||||||
content: T,
|
|
||||||
url: Option<&'a str>,
|
|
||||||
stylesheet: &DiagnosticStylesheet,
|
|
||||||
) -> impl std::fmt::Display + 'a
|
|
||||||
where
|
|
||||||
T: std::fmt::Display + 'a,
|
|
||||||
{
|
|
||||||
struct FmtHyperlink<'a, T> {
|
|
||||||
content: T,
|
|
||||||
url: Option<&'a str>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> std::fmt::Display for FmtHyperlink<'_, T>
|
|
||||||
where
|
|
||||||
T: std::fmt::Display,
|
|
||||||
{
|
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
|
||||||
if let Some(url) = self.url {
|
|
||||||
write!(f, "\x1B]8;;{url}\x1B\\")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.content.fmt(f)?;
|
|
||||||
|
|
||||||
if self.url.is_some() {
|
|
||||||
f.write_str("\x1B]8;;\x1B\\")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = if stylesheet.hyperlink { url } else { None };
|
|
||||||
|
|
||||||
FmtHyperlink { content, url }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct DiagnosticStylesheet {
|
pub struct DiagnosticStylesheet {
|
||||||
pub(crate) error: Style,
|
pub(crate) error: Style,
|
||||||
|
|
@ -84,7 +47,6 @@ pub struct DiagnosticStylesheet {
|
||||||
pub(crate) deletion: Style,
|
pub(crate) deletion: Style,
|
||||||
pub(crate) insertion_line_no: Style,
|
pub(crate) insertion_line_no: Style,
|
||||||
pub(crate) deletion_line_no: Style,
|
pub(crate) deletion_line_no: Style,
|
||||||
pub(crate) hyperlink: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for DiagnosticStylesheet {
|
impl Default for DiagnosticStylesheet {
|
||||||
|
|
@ -97,8 +59,6 @@ impl DiagnosticStylesheet {
|
||||||
/// Default terminal styling
|
/// Default terminal styling
|
||||||
pub fn styled() -> Self {
|
pub fn styled() -> Self {
|
||||||
let bright_blue = AnsiColor::BrightBlue.on_default();
|
let bright_blue = AnsiColor::BrightBlue.on_default();
|
||||||
|
|
||||||
let hyperlink = supports_hyperlinks::supports_hyperlinks();
|
|
||||||
Self {
|
Self {
|
||||||
error: AnsiColor::BrightRed.on_default().effects(Effects::BOLD),
|
error: AnsiColor::BrightRed.on_default().effects(Effects::BOLD),
|
||||||
warning: AnsiColor::Yellow.on_default().effects(Effects::BOLD),
|
warning: AnsiColor::Yellow.on_default().effects(Effects::BOLD),
|
||||||
|
|
@ -114,7 +74,6 @@ impl DiagnosticStylesheet {
|
||||||
deletion: AnsiColor::Red.on_default(),
|
deletion: AnsiColor::Red.on_default(),
|
||||||
insertion_line_no: AnsiColor::Green.on_default().effects(Effects::BOLD),
|
insertion_line_no: AnsiColor::Green.on_default().effects(Effects::BOLD),
|
||||||
deletion_line_no: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
deletion_line_no: AnsiColor::Red.on_default().effects(Effects::BOLD),
|
||||||
hyperlink,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -134,7 +93,6 @@ impl DiagnosticStylesheet {
|
||||||
deletion: Style::new(),
|
deletion: Style::new(),
|
||||||
insertion_line_no: Style::new(),
|
insertion_line_no: Style::new(),
|
||||||
deletion_line_no: Style::new(),
|
deletion_line_no: Style::new(),
|
||||||
hyperlink: false,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -475,12 +475,6 @@ impl File {
|
||||||
self.path(db).as_str().ends_with("__init__.pyi")
|
self.path(db).as_str().ends_with("__init__.pyi")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the file is an `__init__.pyi`
|
|
||||||
pub fn is_package(self, db: &dyn Db) -> bool {
|
|
||||||
let path = self.path(db).as_str();
|
|
||||||
path.ends_with("__init__.pyi") || path.ends_with("__init__.py")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn source_type(self, db: &dyn Db) -> PySourceType {
|
pub fn source_type(self, db: &dyn Db) -> PySourceType {
|
||||||
match self.path(db) {
|
match self.path(db) {
|
||||||
FilePath::System(path) => path
|
FilePath::System(path) => path
|
||||||
|
|
|
||||||
|
|
@ -21,11 +21,7 @@ use crate::source::source_text;
|
||||||
/// reflected in the changed AST offsets.
|
/// reflected in the changed AST offsets.
|
||||||
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
|
/// The other reason is that Ruff's AST doesn't implement `Eq` which Salsa requires
|
||||||
/// for determining if a query result is unchanged.
|
/// for determining if a query result is unchanged.
|
||||||
///
|
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size)]
|
||||||
/// The LRU capacity of 200 was picked without any empirical evidence that it's optimal,
|
|
||||||
/// instead it's a wild guess that it should be unlikely that incremental changes involve
|
|
||||||
/// more than 200 modules. Parsed ASTs within the same revision are never evicted by Salsa.
|
|
||||||
#[salsa::tracked(returns(ref), no_eq, heap_size=ruff_memory_usage::heap_size, lru=200)]
|
|
||||||
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
||||||
let _span = tracing::trace_span!("parsed_module", ?file).entered();
|
let _span = tracing::trace_span!("parsed_module", ?file).entered();
|
||||||
|
|
||||||
|
|
@ -96,9 +92,14 @@ impl ParsedModule {
|
||||||
self.inner.store(None);
|
self.inner.store(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the file to which this module belongs.
|
/// Returns the pointer address of this [`ParsedModule`].
|
||||||
pub fn file(&self) -> File {
|
///
|
||||||
self.file
|
/// The pointer uniquely identifies the module within the current Salsa revision,
|
||||||
|
/// regardless of whether particular [`ParsedModuleRef`] instances are garbage collected.
|
||||||
|
pub fn addr(&self) -> usize {
|
||||||
|
// Note that the outer `Arc` in `inner` is stable across garbage collection, while the inner
|
||||||
|
// `Arc` within the `ArcSwap` may change.
|
||||||
|
Arc::as_ptr(&self.inner).addr()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,6 @@ use ruff_source_file::LineIndex;
|
||||||
|
|
||||||
use crate::Db;
|
use crate::Db;
|
||||||
use crate::files::{File, FilePath};
|
use crate::files::{File, FilePath};
|
||||||
use crate::system::System;
|
|
||||||
|
|
||||||
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
||||||
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
#[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
|
||||||
|
|
@ -16,7 +15,7 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||||
let mut read_error = None;
|
let mut read_error = None;
|
||||||
|
|
||||||
let kind = if is_notebook(db.system(), path) {
|
let kind = if is_notebook(file.path(db)) {
|
||||||
file.read_to_notebook(db)
|
file.read_to_notebook(db)
|
||||||
.unwrap_or_else(|error| {
|
.unwrap_or_else(|error| {
|
||||||
tracing::debug!("Failed to read notebook '{path}': {error}");
|
tracing::debug!("Failed to read notebook '{path}': {error}");
|
||||||
|
|
@ -41,17 +40,18 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_notebook(system: &dyn System, path: &FilePath) -> bool {
|
fn is_notebook(path: &FilePath) -> bool {
|
||||||
let source_type = match path {
|
match path {
|
||||||
FilePath::System(path) => system.source_type(path),
|
FilePath::System(system) => system.extension().is_some_and(|extension| {
|
||||||
FilePath::SystemVirtual(system_virtual) => system.virtual_path_source_type(system_virtual),
|
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||||
FilePath::Vendored(_) => return false,
|
}),
|
||||||
};
|
FilePath::SystemVirtual(system_virtual) => {
|
||||||
|
system_virtual.extension().is_some_and(|extension| {
|
||||||
let with_extension_fallback =
|
PySourceType::try_from_extension(extension) == Some(PySourceType::Ipynb)
|
||||||
source_type.or_else(|| PySourceType::try_from_extension(path.extension()?));
|
})
|
||||||
|
}
|
||||||
with_extension_fallback == Some(PySourceType::Ipynb)
|
FilePath::Vendored(_) => false,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The source text of a file containing python code.
|
/// The source text of a file containing python code.
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,6 @@ pub use os::OsSystem;
|
||||||
|
|
||||||
use filetime::FileTime;
|
use filetime::FileTime;
|
||||||
use ruff_notebook::{Notebook, NotebookError};
|
use ruff_notebook::{Notebook, NotebookError};
|
||||||
use ruff_python_ast::PySourceType;
|
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::fmt::{Debug, Formatter};
|
use std::fmt::{Debug, Formatter};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
@ -17,11 +16,12 @@ use std::{fmt, io};
|
||||||
pub use test::{DbWithTestSystem, DbWithWritableSystem, InMemorySystem, TestSystem};
|
pub use test::{DbWithTestSystem, DbWithWritableSystem, InMemorySystem, TestSystem};
|
||||||
use walk_directory::WalkDirectoryBuilder;
|
use walk_directory::WalkDirectoryBuilder;
|
||||||
|
|
||||||
|
use crate::file_revision::FileRevision;
|
||||||
|
|
||||||
pub use self::path::{
|
pub use self::path::{
|
||||||
DeduplicatedNestedPathsIter, SystemPath, SystemPathBuf, SystemVirtualPath,
|
DeduplicatedNestedPathsIter, SystemPath, SystemPathBuf, SystemVirtualPath,
|
||||||
SystemVirtualPathBuf, deduplicate_nested_paths,
|
SystemVirtualPathBuf, deduplicate_nested_paths,
|
||||||
};
|
};
|
||||||
use crate::file_revision::FileRevision;
|
|
||||||
|
|
||||||
mod memory_fs;
|
mod memory_fs;
|
||||||
#[cfg(feature = "os")]
|
#[cfg(feature = "os")]
|
||||||
|
|
@ -66,35 +66,6 @@ pub trait System: Debug + Sync + Send {
|
||||||
/// See [dunce::canonicalize] for more information.
|
/// See [dunce::canonicalize] for more information.
|
||||||
fn canonicalize_path(&self, path: &SystemPath) -> Result<SystemPathBuf>;
|
fn canonicalize_path(&self, path: &SystemPath) -> Result<SystemPathBuf>;
|
||||||
|
|
||||||
/// Returns the source type for `path` if known or `None`.
|
|
||||||
///
|
|
||||||
/// The default is to always return `None`, assuming the system
|
|
||||||
/// has no additional information and that the caller should
|
|
||||||
/// rely on the file extension instead.
|
|
||||||
///
|
|
||||||
/// This is primarily used for the LSP integration to respect
|
|
||||||
/// the chosen language (or the fact that it is a notebook) in
|
|
||||||
/// the editor.
|
|
||||||
fn source_type(&self, path: &SystemPath) -> Option<PySourceType> {
|
|
||||||
let _ = path;
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the source type for `path` if known or `None`.
|
|
||||||
///
|
|
||||||
/// The default is to always return `None`, assuming the system
|
|
||||||
/// has no additional information and that the caller should
|
|
||||||
/// rely on the file extension instead.
|
|
||||||
///
|
|
||||||
/// This is primarily used for the LSP integration to respect
|
|
||||||
/// the chosen language (or the fact that it is a notebook) in
|
|
||||||
/// the editor.
|
|
||||||
fn virtual_path_source_type(&self, path: &SystemVirtualPath) -> Option<PySourceType> {
|
|
||||||
let _ = path;
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Reads the content of the file at `path` into a [`String`].
|
/// Reads the content of the file at `path` into a [`String`].
|
||||||
fn read_to_string(&self, path: &SystemPath) -> Result<String>;
|
fn read_to_string(&self, path: &SystemPath) -> Result<String>;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -667,13 +667,6 @@ impl Deref for SystemPathBuf {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<Path> for SystemPathBuf {
|
|
||||||
#[inline]
|
|
||||||
fn as_ref(&self) -> &Path {
|
|
||||||
self.0.as_std_path()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
|
impl<P: AsRef<SystemPath>> FromIterator<P> for SystemPathBuf {
|
||||||
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
|
fn from_iter<I: IntoIterator<Item = P>>(iter: I) -> Self {
|
||||||
let mut buf = SystemPathBuf::new();
|
let mut buf = SystemPathBuf::new();
|
||||||
|
|
|
||||||
|
|
@ -144,8 +144,8 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
|
|
||||||
if let Some(deprecated) = &field.deprecated {
|
if let Some(deprecated) = &field.deprecated {
|
||||||
output.push_str("!!! warning \"Deprecated\"\n");
|
output.push_str("> [!WARN] \"Deprecated\"\n");
|
||||||
output.push_str(" This option has been deprecated");
|
output.push_str("> This option has been deprecated");
|
||||||
|
|
||||||
if let Some(since) = deprecated.since {
|
if let Some(since) = deprecated.since {
|
||||||
write!(output, " in {since}").unwrap();
|
write!(output, " in {since}").unwrap();
|
||||||
|
|
@ -166,9 +166,8 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
let _ = writeln!(output, "**Type**: `{}`", field.value_type);
|
let _ = writeln!(output, "**Type**: `{}`", field.value_type);
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
output.push_str("**Example usage**:\n\n");
|
output.push_str("**Example usage** (`pyproject.toml`):\n\n");
|
||||||
output.push_str(&format_example(
|
output.push_str(&format_example(
|
||||||
"pyproject.toml",
|
|
||||||
&format_header(
|
&format_header(
|
||||||
field.scope,
|
field.scope,
|
||||||
field.example,
|
field.example,
|
||||||
|
|
@ -180,11 +179,11 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_example(title: &str, header: &str, content: &str) -> String {
|
fn format_example(header: &str, content: &str) -> String {
|
||||||
if header.is_empty() {
|
if header.is_empty() {
|
||||||
format!("```toml title=\"{title}\"\n{content}\n```\n",)
|
format!("```toml\n{content}\n```\n",)
|
||||||
} else {
|
} else {
|
||||||
format!("```toml title=\"{title}\"\n{header}\n{content}\n```\n",)
|
format!("```toml\n{header}\n{content}\n```\n",)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ impl Edit {
|
||||||
|
|
||||||
/// Creates an edit that replaces the content in `range` with `content`.
|
/// Creates an edit that replaces the content in `range` with `content`.
|
||||||
pub fn range_replacement(content: String, range: TextRange) -> Self {
|
pub fn range_replacement(content: String, range: TextRange) -> Self {
|
||||||
debug_assert!(!content.is_empty(), "Prefer `Edit::deletion`");
|
debug_assert!(!content.is_empty(), "Prefer `Fix::deletion`");
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
content: Some(Box::from(content)),
|
content: Some(Box::from(content)),
|
||||||
|
|
|
||||||
|
|
@ -149,10 +149,6 @@ impl Fix {
|
||||||
&self.edits
|
&self.edits
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_edits(self) -> Vec<Edit> {
|
|
||||||
self.edits
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the [`Applicability`] of the [`Fix`].
|
/// Return the [`Applicability`] of the [`Fix`].
|
||||||
pub fn applicability(&self) -> Applicability {
|
pub fn applicability(&self) -> Applicability {
|
||||||
self.applicability
|
self.applicability
|
||||||
|
|
|
||||||
|
|
@ -337,7 +337,7 @@ macro_rules! best_fitting {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::{FormatState, SimpleFormatOptions, VecBuffer};
|
use crate::{FormatState, SimpleFormatOptions, VecBuffer, write};
|
||||||
|
|
||||||
struct TestFormat;
|
struct TestFormat;
|
||||||
|
|
||||||
|
|
@ -385,8 +385,8 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn best_fitting_variants_print_as_lists() {
|
fn best_fitting_variants_print_as_lists() {
|
||||||
use crate::Formatted;
|
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
use crate::{Formatted, format, format_args};
|
||||||
|
|
||||||
// The second variant below should be selected when printing at a width of 30
|
// The second variant below should be selected when printing at a width of 30
|
||||||
let formatted_best_fitting = format!(
|
let formatted_best_fitting = format!(
|
||||||
|
|
|
||||||
|
|
@ -14,21 +14,14 @@ pub(crate) struct Collector<'a> {
|
||||||
string_imports: StringImports,
|
string_imports: StringImports,
|
||||||
/// The collected imports from the Python AST.
|
/// The collected imports from the Python AST.
|
||||||
imports: Vec<CollectedImport>,
|
imports: Vec<CollectedImport>,
|
||||||
/// Whether to detect type checking imports
|
|
||||||
type_checking_imports: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Collector<'a> {
|
impl<'a> Collector<'a> {
|
||||||
pub(crate) fn new(
|
pub(crate) fn new(module_path: Option<&'a [String]>, string_imports: StringImports) -> Self {
|
||||||
module_path: Option<&'a [String]>,
|
|
||||||
string_imports: StringImports,
|
|
||||||
type_checking_imports: bool,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
Self {
|
||||||
module_path,
|
module_path,
|
||||||
string_imports,
|
string_imports,
|
||||||
imports: Vec::new(),
|
imports: Vec::new(),
|
||||||
type_checking_imports,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -98,25 +91,10 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Stmt::If(ast::StmtIf {
|
|
||||||
test,
|
|
||||||
body,
|
|
||||||
elif_else_clauses,
|
|
||||||
range: _,
|
|
||||||
node_index: _,
|
|
||||||
}) => {
|
|
||||||
// Skip TYPE_CHECKING blocks if not requested
|
|
||||||
if self.type_checking_imports || !is_type_checking_condition(test) {
|
|
||||||
self.visit_body(body);
|
|
||||||
}
|
|
||||||
|
|
||||||
for clause in elif_else_clauses {
|
|
||||||
self.visit_elif_else_clause(clause);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Stmt::FunctionDef(_)
|
Stmt::FunctionDef(_)
|
||||||
| Stmt::ClassDef(_)
|
| Stmt::ClassDef(_)
|
||||||
| Stmt::While(_)
|
| Stmt::While(_)
|
||||||
|
| Stmt::If(_)
|
||||||
| Stmt::With(_)
|
| Stmt::With(_)
|
||||||
| Stmt::Match(_)
|
| Stmt::Match(_)
|
||||||
| Stmt::Try(_)
|
| Stmt::Try(_)
|
||||||
|
|
@ -174,30 +152,6 @@ impl<'ast> SourceOrderVisitor<'ast> for Collector<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if an expression is a `TYPE_CHECKING` condition.
|
|
||||||
///
|
|
||||||
/// Returns `true` for:
|
|
||||||
/// - `TYPE_CHECKING`
|
|
||||||
/// - `typing.TYPE_CHECKING`
|
|
||||||
///
|
|
||||||
/// NOTE: Aliased `TYPE_CHECKING`, i.e. `import typing.TYPE_CHECKING as TC; if TC: ...`
|
|
||||||
/// will not be detected!
|
|
||||||
fn is_type_checking_condition(expr: &Expr) -> bool {
|
|
||||||
match expr {
|
|
||||||
// `if TYPE_CHECKING:`
|
|
||||||
Expr::Name(ast::ExprName { id, .. }) => id.as_str() == "TYPE_CHECKING",
|
|
||||||
// `if typing.TYPE_CHECKING:`
|
|
||||||
Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => {
|
|
||||||
attr.as_str() == "TYPE_CHECKING"
|
|
||||||
&& matches!(
|
|
||||||
value.as_ref(),
|
|
||||||
Expr::Name(ast::ExprName { id, .. }) if id.as_str() == "typing"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) enum CollectedImport {
|
pub(crate) enum CollectedImport {
|
||||||
/// The import was part of an `import` statement.
|
/// The import was part of an `import` statement.
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,6 @@ impl ModuleImports {
|
||||||
path: &SystemPath,
|
path: &SystemPath,
|
||||||
package: Option<&SystemPath>,
|
package: Option<&SystemPath>,
|
||||||
string_imports: StringImports,
|
string_imports: StringImports,
|
||||||
type_checking_imports: bool,
|
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
// Parse the source code.
|
// Parse the source code.
|
||||||
let parsed = parse(source, ParseOptions::from(source_type))?;
|
let parsed = parse(source, ParseOptions::from(source_type))?;
|
||||||
|
|
@ -39,17 +38,13 @@ impl ModuleImports {
|
||||||
package.and_then(|package| to_module_path(package.as_std_path(), path.as_std_path()));
|
package.and_then(|package| to_module_path(package.as_std_path(), path.as_std_path()));
|
||||||
|
|
||||||
// Collect the imports.
|
// Collect the imports.
|
||||||
let imports = Collector::new(
|
let imports =
|
||||||
module_path.as_deref(),
|
Collector::new(module_path.as_deref(), string_imports).collect(parsed.syntax());
|
||||||
string_imports,
|
|
||||||
type_checking_imports,
|
|
||||||
)
|
|
||||||
.collect(parsed.syntax());
|
|
||||||
|
|
||||||
// Resolve the imports.
|
// Resolve the imports.
|
||||||
let mut resolved_imports = ModuleImports::default();
|
let mut resolved_imports = ModuleImports::default();
|
||||||
for import in imports {
|
for import in imports {
|
||||||
for resolved in Resolver::new(db, path).resolve(import) {
|
for resolved in Resolver::new(db).resolve(import) {
|
||||||
if let Some(path) = resolved.as_system_path() {
|
if let Some(path) = resolved.as_system_path() {
|
||||||
resolved_imports.insert(path.to_path_buf());
|
resolved_imports.insert(path.to_path_buf());
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,5 @@
|
||||||
use ruff_db::files::{File, FilePath, system_path_to_file};
|
use ruff_db::files::FilePath;
|
||||||
use ruff_db::system::SystemPath;
|
use ty_python_semantic::{ModuleName, resolve_module, resolve_real_module};
|
||||||
use ty_python_semantic::{
|
|
||||||
ModuleName, resolve_module, resolve_module_confident, resolve_real_module,
|
|
||||||
resolve_real_module_confident,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::ModuleDb;
|
use crate::ModuleDb;
|
||||||
use crate::collector::CollectedImport;
|
use crate::collector::CollectedImport;
|
||||||
|
|
@ -11,15 +7,12 @@ use crate::collector::CollectedImport;
|
||||||
/// Collect all imports for a given Python file.
|
/// Collect all imports for a given Python file.
|
||||||
pub(crate) struct Resolver<'a> {
|
pub(crate) struct Resolver<'a> {
|
||||||
db: &'a ModuleDb,
|
db: &'a ModuleDb,
|
||||||
file: Option<File>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Resolver<'a> {
|
impl<'a> Resolver<'a> {
|
||||||
/// Initialize a [`Resolver`] with a given [`ModuleDb`].
|
/// Initialize a [`Resolver`] with a given [`ModuleDb`].
|
||||||
pub(crate) fn new(db: &'a ModuleDb, path: &SystemPath) -> Self {
|
pub(crate) fn new(db: &'a ModuleDb) -> Self {
|
||||||
// If we know the importing file we can potentially resolve more imports
|
Self { db }
|
||||||
let file = system_path_to_file(db, path).ok();
|
|
||||||
Self { db, file }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve the [`CollectedImport`] into a [`FilePath`].
|
/// Resolve the [`CollectedImport`] into a [`FilePath`].
|
||||||
|
|
@ -77,21 +70,13 @@ impl<'a> Resolver<'a> {
|
||||||
|
|
||||||
/// Resolves a module name to a module.
|
/// Resolves a module name to a module.
|
||||||
pub(crate) fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
pub(crate) fn resolve_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||||
let module = if let Some(file) = self.file {
|
let module = resolve_module(self.db, module_name)?;
|
||||||
resolve_module(self.db, file, module_name)?
|
|
||||||
} else {
|
|
||||||
resolve_module_confident(self.db, module_name)?
|
|
||||||
};
|
|
||||||
Some(module.file(self.db)?.path(self.db))
|
Some(module.file(self.db)?.path(self.db))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolves a module name to a module (stubs not allowed).
|
/// Resolves a module name to a module (stubs not allowed).
|
||||||
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
fn resolve_real_module(&self, module_name: &ModuleName) -> Option<&'a FilePath> {
|
||||||
let module = if let Some(file) = self.file {
|
let module = resolve_real_module(self.db, module_name)?;
|
||||||
resolve_real_module(self.db, file, module_name)?
|
|
||||||
} else {
|
|
||||||
resolve_real_module_confident(self.db, module_name)?
|
|
||||||
};
|
|
||||||
Some(module.file(self.db)?.path(self.db))
|
Some(module.file(self.db)?.path(self.db))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ use std::collections::BTreeMap;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[derive(Debug, Clone, CacheKey)]
|
#[derive(Debug, Default, Clone, CacheKey)]
|
||||||
pub struct AnalyzeSettings {
|
pub struct AnalyzeSettings {
|
||||||
pub exclude: FilePatternSet,
|
pub exclude: FilePatternSet,
|
||||||
pub preview: PreviewMode,
|
pub preview: PreviewMode,
|
||||||
|
|
@ -14,21 +14,6 @@ pub struct AnalyzeSettings {
|
||||||
pub string_imports: StringImports,
|
pub string_imports: StringImports,
|
||||||
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
|
pub include_dependencies: BTreeMap<PathBuf, (PathBuf, Vec<String>)>,
|
||||||
pub extension: ExtensionMapping,
|
pub extension: ExtensionMapping,
|
||||||
pub type_checking_imports: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for AnalyzeSettings {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
exclude: FilePatternSet::default(),
|
|
||||||
preview: PreviewMode::default(),
|
|
||||||
target_version: PythonVersion::default(),
|
|
||||||
string_imports: StringImports::default(),
|
|
||||||
include_dependencies: BTreeMap::default(),
|
|
||||||
extension: ExtensionMapping::default(),
|
|
||||||
type_checking_imports: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for AnalyzeSettings {
|
impl fmt::Display for AnalyzeSettings {
|
||||||
|
|
@ -44,7 +29,6 @@ impl fmt::Display for AnalyzeSettings {
|
||||||
self.string_imports,
|
self.string_imports,
|
||||||
self.extension | debug,
|
self.extension | debug,
|
||||||
self.include_dependencies | debug,
|
self.include_dependencies | debug,
|
||||||
self.type_checking_imports,
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "ruff_linter"
|
name = "ruff_linter"
|
||||||
version = "0.14.9"
|
version = "0.14.4"
|
||||||
publish = false
|
publish = false
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
|
|
@ -35,7 +35,6 @@ anyhow = { workspace = true }
|
||||||
bitflags = { workspace = true }
|
bitflags = { workspace = true }
|
||||||
clap = { workspace = true, features = ["derive", "string"], optional = true }
|
clap = { workspace = true, features = ["derive", "string"], optional = true }
|
||||||
colored = { workspace = true }
|
colored = { workspace = true }
|
||||||
compact_str = { workspace = true }
|
|
||||||
fern = { workspace = true }
|
fern = { workspace = true }
|
||||||
glob = { workspace = true }
|
glob = { workspace = true }
|
||||||
globset = { workspace = true }
|
globset = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -45,22 +45,3 @@ urllib.request.urlopen(urllib.request.Request(url))
|
||||||
# https://github.com/astral-sh/ruff/issues/15522
|
# https://github.com/astral-sh/ruff/issues/15522
|
||||||
map(urllib.request.urlopen, [])
|
map(urllib.request.urlopen, [])
|
||||||
foo = urllib.request.urlopen
|
foo = urllib.request.urlopen
|
||||||
|
|
||||||
# https://github.com/astral-sh/ruff/issues/21462
|
|
||||||
path = "https://example.com/data.csv"
|
|
||||||
urllib.request.urlretrieve(path, "data.csv")
|
|
||||||
url = "https://example.com/api"
|
|
||||||
urllib.request.Request(url)
|
|
||||||
|
|
||||||
# Test resolved f-strings and concatenated string literals
|
|
||||||
fstring_url = f"https://example.com/data.csv"
|
|
||||||
urllib.request.urlopen(fstring_url)
|
|
||||||
urllib.request.Request(fstring_url)
|
|
||||||
|
|
||||||
concatenated_url = "https://" + "example.com/data.csv"
|
|
||||||
urllib.request.urlopen(concatenated_url)
|
|
||||||
urllib.request.Request(concatenated_url)
|
|
||||||
|
|
||||||
nested_concatenated = "http://" + "example.com" + "/data.csv"
|
|
||||||
urllib.request.urlopen(nested_concatenated)
|
|
||||||
urllib.request.Request(nested_concatenated)
|
|
||||||
|
|
|
||||||
|
|
@ -28,11 +28,9 @@ yaml.load("{}", SafeLoader)
|
||||||
yaml.load("{}", yaml.SafeLoader)
|
yaml.load("{}", yaml.SafeLoader)
|
||||||
yaml.load("{}", CSafeLoader)
|
yaml.load("{}", CSafeLoader)
|
||||||
yaml.load("{}", yaml.CSafeLoader)
|
yaml.load("{}", yaml.CSafeLoader)
|
||||||
yaml.load("{}", yaml.cyaml.CSafeLoader)
|
|
||||||
yaml.load("{}", NewSafeLoader)
|
yaml.load("{}", NewSafeLoader)
|
||||||
yaml.load("{}", Loader=SafeLoader)
|
yaml.load("{}", Loader=SafeLoader)
|
||||||
yaml.load("{}", Loader=yaml.SafeLoader)
|
yaml.load("{}", Loader=yaml.SafeLoader)
|
||||||
yaml.load("{}", Loader=CSafeLoader)
|
yaml.load("{}", Loader=CSafeLoader)
|
||||||
yaml.load("{}", Loader=yaml.CSafeLoader)
|
yaml.load("{}", Loader=yaml.CSafeLoader)
|
||||||
yaml.load("{}", Loader=yaml.cyaml.CSafeLoader)
|
|
||||||
yaml.load("{}", Loader=NewSafeLoader)
|
yaml.load("{}", Loader=NewSafeLoader)
|
||||||
|
|
|
||||||
|
|
@ -4,31 +4,3 @@ CommunityData("public", mpModel=0) # S508
|
||||||
CommunityData("public", mpModel=1) # S508
|
CommunityData("public", mpModel=1) # S508
|
||||||
|
|
||||||
CommunityData("public", mpModel=2) # OK
|
CommunityData("public", mpModel=2) # OK
|
||||||
|
|
||||||
# New API paths
|
|
||||||
import pysnmp.hlapi.asyncio
|
|
||||||
import pysnmp.hlapi.v1arch
|
|
||||||
import pysnmp.hlapi.v1arch.asyncio
|
|
||||||
import pysnmp.hlapi.v1arch.asyncio.auth
|
|
||||||
import pysnmp.hlapi.v3arch
|
|
||||||
import pysnmp.hlapi.v3arch.asyncio
|
|
||||||
import pysnmp.hlapi.v3arch.asyncio.auth
|
|
||||||
import pysnmp.hlapi.auth
|
|
||||||
|
|
||||||
pysnmp.hlapi.asyncio.CommunityData("public", mpModel=0) # S508
|
|
||||||
pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
|
||||||
pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=0) # S508
|
|
||||||
pysnmp.hlapi.v1arch.CommunityData("public", mpModel=0) # S508
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=0) # S508
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=0) # S508
|
|
||||||
pysnmp.hlapi.v3arch.CommunityData("public", mpModel=0) # S508
|
|
||||||
pysnmp.hlapi.auth.CommunityData("public", mpModel=0) # S508
|
|
||||||
|
|
||||||
pysnmp.hlapi.asyncio.CommunityData("public", mpModel=2) # OK
|
|
||||||
pysnmp.hlapi.v1arch.asyncio.auth.CommunityData("public", mpModel=2) # OK
|
|
||||||
pysnmp.hlapi.v1arch.asyncio.CommunityData("public", mpModel=2) # OK
|
|
||||||
pysnmp.hlapi.v1arch.CommunityData("public", mpModel=2) # OK
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.auth.CommunityData("public", mpModel=2) # OK
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.CommunityData("public", mpModel=2) # OK
|
|
||||||
pysnmp.hlapi.v3arch.CommunityData("public", mpModel=2) # OK
|
|
||||||
pysnmp.hlapi.auth.CommunityData("public", mpModel=2) # OK
|
|
||||||
|
|
|
||||||
|
|
@ -5,19 +5,3 @@ insecure = UsmUserData("securityName") # S509
|
||||||
auth_no_priv = UsmUserData("securityName", "authName") # S509
|
auth_no_priv = UsmUserData("securityName", "authName") # S509
|
||||||
|
|
||||||
less_insecure = UsmUserData("securityName", "authName", "privName") # OK
|
less_insecure = UsmUserData("securityName", "authName", "privName") # OK
|
||||||
|
|
||||||
# New API paths
|
|
||||||
import pysnmp.hlapi.asyncio
|
|
||||||
import pysnmp.hlapi.v3arch.asyncio
|
|
||||||
import pysnmp.hlapi.v3arch.asyncio.auth
|
|
||||||
import pysnmp.hlapi.auth
|
|
||||||
|
|
||||||
pysnmp.hlapi.asyncio.UsmUserData("user") # S509
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.UsmUserData("user") # S509
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user") # S509
|
|
||||||
pysnmp.hlapi.auth.UsmUserData("user") # S509
|
|
||||||
|
|
||||||
pysnmp.hlapi.asyncio.UsmUserData("user", "authkey", "privkey") # OK
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.UsmUserData("user", "authkey", "privkey") # OK
|
|
||||||
pysnmp.hlapi.v3arch.asyncio.auth.UsmUserData("user", "authkey", "privkey") # OK
|
|
||||||
pysnmp.hlapi.auth.UsmUserData("user", "authkey", "privkey") # OK
|
|
||||||
|
|
|
||||||
|
|
@ -199,9 +199,6 @@ def bytes_okay(value=bytes(1)):
|
||||||
def int_okay(value=int("12")):
|
def int_okay(value=int("12")):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Allow immutable slice()
|
|
||||||
def slice_okay(value=slice(1,2)):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Allow immutable complex() value
|
# Allow immutable complex() value
|
||||||
def complex_okay(value=complex(1,2)):
|
def complex_okay(value=complex(1,2)):
|
||||||
|
|
|
||||||
|
|
@ -52,16 +52,16 @@ def not_broken5():
|
||||||
yield inner()
|
yield inner()
|
||||||
|
|
||||||
|
|
||||||
def broken3():
|
def not_broken6():
|
||||||
return (yield from [])
|
return (yield from [])
|
||||||
|
|
||||||
|
|
||||||
def broken4():
|
def not_broken7():
|
||||||
x = yield from []
|
x = yield from []
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
def broken5():
|
def not_broken8():
|
||||||
x = None
|
x = None
|
||||||
|
|
||||||
def inner(ex):
|
def inner(ex):
|
||||||
|
|
@ -76,13 +76,3 @@ class NotBroken9(object):
|
||||||
def __await__(self):
|
def __await__(self):
|
||||||
yield from function()
|
yield from function()
|
||||||
return 42
|
return 42
|
||||||
|
|
||||||
|
|
||||||
async def broken6():
|
|
||||||
yield 1
|
|
||||||
return foo()
|
|
||||||
|
|
||||||
|
|
||||||
async def broken7():
|
|
||||||
yield 1
|
|
||||||
return [1, 2, 3]
|
|
||||||
|
|
|
||||||
|
|
@ -208,17 +208,3 @@ _ = t"b {f"c" f"d {t"e" t"f"} g"} h"
|
||||||
_ = f"b {t"abc" \
|
_ = f"b {t"abc" \
|
||||||
t"def"} g"
|
t"def"} g"
|
||||||
|
|
||||||
|
|
||||||
# Explicit concatenation with either operand being
|
|
||||||
# a string literal that wraps across multiple lines (in parentheses)
|
|
||||||
# reports diagnostic - no autofix.
|
|
||||||
# See https://github.com/astral-sh/ruff/issues/19757
|
|
||||||
_ = "abc" + (
|
|
||||||
"def"
|
|
||||||
"ghi"
|
|
||||||
)
|
|
||||||
|
|
||||||
_ = (
|
|
||||||
"abc"
|
|
||||||
"def"
|
|
||||||
) + "ghi"
|
|
||||||
|
|
|
||||||
|
|
@ -46,8 +46,7 @@ def func():
|
||||||
|
|
||||||
|
|
||||||
def func():
|
def func():
|
||||||
# SIM113
|
# OK (index doesn't start at 0
|
||||||
# https://github.com/astral-sh/ruff/pull/21395
|
|
||||||
idx = 10
|
idx = 10
|
||||||
for x in range(5):
|
for x in range(5):
|
||||||
g(x, idx)
|
g(x, idx)
|
||||||
|
|
|
||||||
|
|
@ -204,27 +204,3 @@ x = 1
|
||||||
print(f"{x=}" or "bar") # SIM222
|
print(f"{x=}" or "bar") # SIM222
|
||||||
(lambda: 1) or True # SIM222
|
(lambda: 1) or True # SIM222
|
||||||
(i for i in range(1)) or "bar" # SIM222
|
(i for i in range(1)) or "bar" # SIM222
|
||||||
|
|
||||||
# https://github.com/astral-sh/ruff/issues/21136
|
|
||||||
def get_items():
|
|
||||||
return tuple(item for item in Item.objects.all()) or None # OK
|
|
||||||
|
|
||||||
|
|
||||||
def get_items_list():
|
|
||||||
return tuple([item for item in items]) or None # OK
|
|
||||||
|
|
||||||
|
|
||||||
def get_items_set():
|
|
||||||
return tuple({item for item in items}) or None # OK
|
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/astral-sh/ruff/issues/21473
|
|
||||||
tuple("") or True # SIM222
|
|
||||||
tuple(t"") or True # OK
|
|
||||||
tuple(0) or True # OK
|
|
||||||
tuple(1) or True # OK
|
|
||||||
tuple(False) or True # OK
|
|
||||||
tuple(None) or True # OK
|
|
||||||
tuple(...) or True # OK
|
|
||||||
tuple(lambda x: x) or True # OK
|
|
||||||
tuple(x for x in range(0)) or True # OK
|
|
||||||
|
|
|
||||||
|
|
@ -157,15 +157,3 @@ print(f"{1}{''}" and "bar")
|
||||||
|
|
||||||
# https://github.com/astral-sh/ruff/issues/7127
|
# https://github.com/astral-sh/ruff/issues/7127
|
||||||
def f(a: "'' and 'b'"): ...
|
def f(a: "'' and 'b'"): ...
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/astral-sh/ruff/issues/21473
|
|
||||||
tuple("") and False # SIM223
|
|
||||||
tuple(t"") and False # OK
|
|
||||||
tuple(0) and False # OK
|
|
||||||
tuple(1) and False # OK
|
|
||||||
tuple(False) and False # OK
|
|
||||||
tuple(None) and False # OK
|
|
||||||
tuple(...) and False # OK
|
|
||||||
tuple(lambda x: x) and False # OK
|
|
||||||
tuple(x for x in range(0)) and False # OK
|
|
||||||
|
|
|
||||||
|
|
@ -371,61 +371,6 @@ class Foo:
|
||||||
"""
|
"""
|
||||||
return
|
return
|
||||||
|
|
||||||
# DOC102 - Test case from issue #20959: comma-separated parameters
|
|
||||||
def leq(x: object, y: object) -> bool:
|
|
||||||
"""Compare two objects for loose equality.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
x1, x2 : object
|
|
||||||
Objects.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
bool
|
|
||||||
Whether the objects are identical or equal.
|
|
||||||
"""
|
|
||||||
return x is y or x == y
|
|
||||||
|
|
||||||
|
|
||||||
# OK - comma-separated parameters that match function signature
|
|
||||||
def compare_values(x1: int, x2: int) -> bool:
|
|
||||||
"""Compare two integer values.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
x1, x2 : int
|
|
||||||
Values to compare.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
bool
|
|
||||||
True if values are equal.
|
|
||||||
"""
|
|
||||||
return x1 == x2
|
|
||||||
|
|
||||||
|
|
||||||
# DOC102 - mixed comma-separated and regular parameters
|
|
||||||
def process_data(data, x1: str, x2: str) -> str:
|
|
||||||
"""Process data with multiple string parameters.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
data : list
|
|
||||||
Input data to process.
|
|
||||||
x1, x2 : str
|
|
||||||
String parameters for processing.
|
|
||||||
extra_param : str
|
|
||||||
Extra parameter not in signature.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
str
|
|
||||||
Processed result.
|
|
||||||
"""
|
|
||||||
return f"{x1}{x2}{len(data)}"
|
|
||||||
|
|
||||||
|
|
||||||
# OK
|
# OK
|
||||||
def baz(x: int) -> int:
|
def baz(x: int) -> int:
|
||||||
"""
|
"""
|
||||||
|
|
@ -444,21 +389,3 @@ def baz(x: int) -> int:
|
||||||
int
|
int
|
||||||
"""
|
"""
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
# OK - comma-separated parameters without type annotations
|
|
||||||
def add_numbers(a, b):
|
|
||||||
"""
|
|
||||||
Adds two numbers and returns the result.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
a, b
|
|
||||||
The numbers to add.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
int
|
|
||||||
The sum of the two numbers.
|
|
||||||
"""
|
|
||||||
return a + b
|
|
||||||
|
|
|
||||||
|
|
@ -83,37 +83,6 @@ def calculate_speed(distance: float, time: float) -> float:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
# DOC502 regression for Sphinx directive after Raises (issue #18959)
|
|
||||||
def foo():
|
|
||||||
"""First line.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError:
|
|
||||||
some text
|
|
||||||
|
|
||||||
.. versionadded:: 0.7.0
|
|
||||||
The ``init_kwargs`` argument.
|
|
||||||
"""
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
# DOC502 regression for following section with colons
|
|
||||||
def example_with_following_section():
|
|
||||||
"""Summary.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: The resulting expression.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If the unit is not valid.
|
|
||||||
|
|
||||||
Relation to `time_range_lookup`:
|
|
||||||
- Handles the "start of" modifier.
|
|
||||||
- Example: "start of month" → `DATETRUNC()`.
|
|
||||||
"""
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
# This should NOT trigger DOC502 because OSError is explicitly re-raised
|
# This should NOT trigger DOC502 because OSError is explicitly re-raised
|
||||||
def f():
|
def f():
|
||||||
"""Do nothing.
|
"""Do nothing.
|
||||||
|
|
|
||||||
|
|
@ -117,33 +117,3 @@ def calculate_speed(distance: float, time: float) -> float:
|
||||||
except TypeError:
|
except TypeError:
|
||||||
print("Not a number? Shame on you!")
|
print("Not a number? Shame on you!")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
# DOC502 regression for Sphinx directive after Raises (issue #18959)
|
|
||||||
def foo():
|
|
||||||
"""First line.
|
|
||||||
|
|
||||||
Raises
|
|
||||||
------
|
|
||||||
ValueError
|
|
||||||
some text
|
|
||||||
|
|
||||||
.. versionadded:: 0.7.0
|
|
||||||
The ``init_kwargs`` argument.
|
|
||||||
"""
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
# Make sure we don't bail out on a Sphinx directive in the description of one
|
|
||||||
# of the exceptions
|
|
||||||
def foo():
|
|
||||||
"""First line.
|
|
||||||
|
|
||||||
Raises
|
|
||||||
------
|
|
||||||
ValueError
|
|
||||||
some text
|
|
||||||
.. math:: e^{xception}
|
|
||||||
ZeroDivisionError
|
|
||||||
Will not be raised, DOC502
|
|
||||||
"""
|
|
||||||
raise ValueError
|
|
||||||
|
|
|
||||||
|
|
@ -218,26 +218,3 @@ def should_not_fail(payload, Args):
|
||||||
Args:
|
Args:
|
||||||
The other arguments.
|
The other arguments.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
# Test cases for Unpack[TypedDict] kwargs
|
|
||||||
from typing import TypedDict
|
|
||||||
from typing_extensions import Unpack
|
|
||||||
|
|
||||||
class User(TypedDict):
|
|
||||||
id: int
|
|
||||||
name: str
|
|
||||||
|
|
||||||
def function_with_unpack_args_should_not_fail(query: str, **kwargs: Unpack[User]):
|
|
||||||
"""Function with Unpack kwargs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
query: some arg
|
|
||||||
"""
|
|
||||||
|
|
||||||
def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
|
|
||||||
"""Function with Unpack kwargs but missing query arg documentation.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
**kwargs: keyword arguments
|
|
||||||
"""
|
|
||||||
|
|
|
||||||
|
|
@ -17,24 +17,3 @@ def _():
|
||||||
|
|
||||||
# Valid yield scope
|
# Valid yield scope
|
||||||
yield 3
|
yield 3
|
||||||
|
|
||||||
|
|
||||||
# await is valid in any generator, sync or async
|
|
||||||
(await cor async for cor in f()) # ok
|
|
||||||
(await cor for cor in f()) # ok
|
|
||||||
|
|
||||||
# but not in comprehensions
|
|
||||||
[await cor async for cor in f()] # F704
|
|
||||||
{await cor async for cor in f()} # F704
|
|
||||||
{await cor: 1 async for cor in f()} # F704
|
|
||||||
[await cor for cor in f()] # F704
|
|
||||||
{await cor for cor in f()} # F704
|
|
||||||
{await cor: 1 for cor in f()} # F704
|
|
||||||
|
|
||||||
# or in the iterator of an async generator, which is evaluated in the parent
|
|
||||||
# scope
|
|
||||||
(cor async for cor in await f()) # F704
|
|
||||||
(await cor async for cor in [await c for c in f()]) # F704
|
|
||||||
|
|
||||||
# this is also okay because the comprehension is within the generator scope
|
|
||||||
([await c for c in cor] async for cor in f()) # ok
|
|
||||||
|
|
|
||||||
|
|
@ -30,23 +30,3 @@ for a, b in d_tuple:
|
||||||
pass
|
pass
|
||||||
for a, b in d_tuple_annotated:
|
for a, b in d_tuple_annotated:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Empty dict cases
|
|
||||||
empty_dict = {}
|
|
||||||
empty_dict["x"] = 1
|
|
||||||
for k, v in empty_dict:
|
|
||||||
pass
|
|
||||||
|
|
||||||
empty_dict_annotated_tuple_keys: dict[tuple[int, str], bool] = {}
|
|
||||||
for k, v in empty_dict_annotated_tuple_keys:
|
|
||||||
pass
|
|
||||||
|
|
||||||
empty_dict_unannotated = {}
|
|
||||||
empty_dict_unannotated[("x", "y")] = True
|
|
||||||
for k, v in empty_dict_unannotated:
|
|
||||||
pass
|
|
||||||
|
|
||||||
empty_dict_annotated_str_keys: dict[str, int] = {}
|
|
||||||
empty_dict_annotated_str_keys["x"] = 1
|
|
||||||
for k, v in empty_dict_annotated_str_keys:
|
|
||||||
pass
|
|
||||||
|
|
|
||||||
|
|
@ -129,26 +129,3 @@ def generator_with_lambda():
|
||||||
yield 1
|
yield 1
|
||||||
func = lambda x: x # Just a regular lambda
|
func = lambda x: x # Just a regular lambda
|
||||||
yield 2
|
yield 2
|
||||||
|
|
||||||
# See: https://github.com/astral-sh/ruff/issues/21162
|
|
||||||
def foo():
|
|
||||||
def g():
|
|
||||||
yield 1
|
|
||||||
raise StopIteration # Should not trigger
|
|
||||||
|
|
||||||
|
|
||||||
def foo():
|
|
||||||
def g():
|
|
||||||
raise StopIteration # Should not trigger
|
|
||||||
yield 1
|
|
||||||
|
|
||||||
# https://github.com/astral-sh/ruff/pull/21177#pullrequestreview-3430209718
|
|
||||||
def foo():
|
|
||||||
yield 1
|
|
||||||
class C:
|
|
||||||
raise StopIteration # Should trigger
|
|
||||||
yield C
|
|
||||||
|
|
||||||
# https://github.com/astral-sh/ruff/pull/21177#discussion_r2539702728
|
|
||||||
def foo():
|
|
||||||
raise StopIteration((yield 1)) # Should trigger
|
|
||||||
|
|
@ -2,40 +2,15 @@ from abc import ABC, abstractmethod
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
|
||||||
|
|
||||||
class MyError(Exception):
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class MySubError(MyError):
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class MyValueError(ValueError):
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class MyUserWarning(UserWarning):
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
# Violation test cases with builtin errors: PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 1: Useless exception statement
|
# Test case 1: Useless exception statement
|
||||||
def func():
|
def func():
|
||||||
AssertionError("This is an assertion error") # PLW0133
|
AssertionError("This is an assertion error") # PLW0133
|
||||||
MyError("This is a custom error") # PLW0133
|
|
||||||
MySubError("This is a custom error") # PLW0133
|
|
||||||
MyValueError("This is a custom value error") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 2: Useless exception statement in try-except block
|
# Test case 2: Useless exception statement in try-except block
|
||||||
def func():
|
def func():
|
||||||
try:
|
try:
|
||||||
Exception("This is an exception") # PLW0133
|
Exception("This is an exception") # PLW0133
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
@ -44,9 +19,6 @@ def func():
|
||||||
def func():
|
def func():
|
||||||
if True:
|
if True:
|
||||||
RuntimeError("This is an exception") # PLW0133
|
RuntimeError("This is an exception") # PLW0133
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 4: Useless exception statement in class
|
# Test case 4: Useless exception statement in class
|
||||||
|
|
@ -54,18 +26,12 @@ def func():
|
||||||
class Class:
|
class Class:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
TypeError("This is an exception") # PLW0133
|
TypeError("This is an exception") # PLW0133
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 5: Useless exception statement in function
|
# Test case 5: Useless exception statement in function
|
||||||
def func():
|
def func():
|
||||||
def inner():
|
def inner():
|
||||||
IndexError("This is an exception") # PLW0133
|
IndexError("This is an exception") # PLW0133
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
inner()
|
inner()
|
||||||
|
|
||||||
|
|
@ -74,9 +40,6 @@ def func():
|
||||||
def func():
|
def func():
|
||||||
while True:
|
while True:
|
||||||
KeyError("This is an exception") # PLW0133
|
KeyError("This is an exception") # PLW0133
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 7: Useless exception statement in abstract class
|
# Test case 7: Useless exception statement in abstract class
|
||||||
|
|
@ -85,58 +48,27 @@ def func():
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def method(self):
|
def method(self):
|
||||||
NotImplementedError("This is an exception") # PLW0133
|
NotImplementedError("This is an exception") # PLW0133
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 8: Useless exception statement inside context manager
|
# Test case 8: Useless exception statement inside context manager
|
||||||
def func():
|
def func():
|
||||||
with suppress(Exception):
|
with suppress(AttributeError):
|
||||||
AttributeError("This is an exception") # PLW0133
|
AttributeError("This is an exception") # PLW0133
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 9: Useless exception statement in parentheses
|
# Test case 9: Useless exception statement in parentheses
|
||||||
def func():
|
def func():
|
||||||
(RuntimeError("This is an exception")) # PLW0133
|
(RuntimeError("This is an exception")) # PLW0133
|
||||||
(MyError("This is an exception")) # PLW0133
|
|
||||||
(MySubError("This is an exception")) # PLW0133
|
|
||||||
(MyValueError("This is an exception")) # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 10: Useless exception statement in continuation
|
# Test case 10: Useless exception statement in continuation
|
||||||
def func():
|
def func():
|
||||||
x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
|
x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
|
||||||
x = 1; (MyError("This is an exception")); y = 2 # PLW0133
|
|
||||||
x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
|
|
||||||
x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 11: Useless warning statement
|
# Test case 11: Useless warning statement
|
||||||
def func():
|
def func():
|
||||||
UserWarning("This is a user warning") # PLW0133
|
UserWarning("This is an assertion error") # PLW0133
|
||||||
MyUserWarning("This is a custom user warning") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Test case 12: Useless exception statement at module level
|
|
||||||
import builtins
|
|
||||||
|
|
||||||
builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
|
|
||||||
|
|
||||||
PythonFinalizationError("Added in Python 3.13") # PLW0133
|
|
||||||
|
|
||||||
MyError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
MySubError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
MyValueError("This is an exception") # PLW0133
|
|
||||||
|
|
||||||
UserWarning("This is a user warning") # PLW0133
|
|
||||||
|
|
||||||
MyUserWarning("This is a custom user warning") # PLW0133
|
|
||||||
|
|
||||||
|
|
||||||
# Non-violation test cases: PLW0133
|
# Non-violation test cases: PLW0133
|
||||||
|
|
@ -187,3 +119,10 @@ def func():
|
||||||
def func():
|
def func():
|
||||||
with suppress(AttributeError):
|
with suppress(AttributeError):
|
||||||
raise AttributeError("This is an exception") # OK
|
raise AttributeError("This is an exception") # OK
|
||||||
|
|
||||||
|
|
||||||
|
import builtins
|
||||||
|
|
||||||
|
builtins.TypeError("still an exception even though it's an Attribute")
|
||||||
|
|
||||||
|
PythonFinalizationError("Added in Python 3.13")
|
||||||
|
|
|
||||||
|
|
@ -1,5 +0,0 @@
|
||||||
from .builtins import next
|
|
||||||
from ..builtins import str
|
|
||||||
from ...builtins import int
|
|
||||||
from .builtins import next as _next
|
|
||||||
|
|
||||||
|
|
@ -132,6 +132,7 @@ async def c():
|
||||||
# Non-errors
|
# Non-errors
|
||||||
###
|
###
|
||||||
|
|
||||||
|
# False-negative: RustPython doesn't parse the `\N{snowman}`.
|
||||||
"\N{snowman} {}".format(a)
|
"\N{snowman} {}".format(a)
|
||||||
|
|
||||||
"{".format(a)
|
"{".format(a)
|
||||||
|
|
@ -275,6 +276,3 @@ if __name__ == "__main__":
|
||||||
number = 0
|
number = 0
|
||||||
string = "{}".format(number := number + 1)
|
string = "{}".format(number := number + 1)
|
||||||
print(string)
|
print(string)
|
||||||
|
|
||||||
# Unicode escape
|
|
||||||
"\N{angle}AOB = {angle}°".format(angle=180)
|
|
||||||
|
|
|
||||||
|
|
@ -125,19 +125,3 @@ with open(*filename, mode="r") as f:
|
||||||
# `buffering`.
|
# `buffering`.
|
||||||
with open(*filename, file="file.txt", mode="r") as f:
|
with open(*filename, file="file.txt", mode="r") as f:
|
||||||
x = f.read()
|
x = f.read()
|
||||||
|
|
||||||
# FURB101
|
|
||||||
with open("file.txt", encoding="utf-8") as f:
|
|
||||||
contents: str = f.read()
|
|
||||||
|
|
||||||
# FURB101 but no fix because it would remove the assignment to `x`
|
|
||||||
with open("file.txt", encoding="utf-8") as f:
|
|
||||||
contents, x = f.read(), 2
|
|
||||||
|
|
||||||
# FURB101 but no fix because it would remove the `process_contents` call
|
|
||||||
with open("file.txt", encoding="utf-8") as f:
|
|
||||||
contents = process_contents(f.read())
|
|
||||||
|
|
||||||
with open("file1.txt", encoding="utf-8") as f:
|
|
||||||
contents: str = process_contents(f.read())
|
|
||||||
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
with Path("file.txt").open() as f:
|
|
||||||
contents = f.read()
|
|
||||||
|
|
||||||
with Path("file.txt").open("r") as f:
|
|
||||||
contents = f.read()
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue