mirror of https://github.com/astral-sh/ruff
Merge branch 'main' into SIM300-CONSTANT-CASE-false-positives
This commit is contained in:
commit
3f36780904
|
|
@ -48,8 +48,8 @@ jobs:
|
|||
- "!crates/ruff_dev/**"
|
||||
- "!crates/ruff_shrinking/**"
|
||||
- scripts/*
|
||||
- .github/workflows/ci.yaml
|
||||
- python/**
|
||||
- .github/workflows/ci.yaml
|
||||
|
||||
formatter:
|
||||
- Cargo.toml
|
||||
|
|
@ -68,7 +68,7 @@ jobs:
|
|||
- .github/workflows/ci.yaml
|
||||
|
||||
code:
|
||||
- "*/**"
|
||||
- "**/*"
|
||||
- "!**/*.md"
|
||||
- "!docs/**"
|
||||
- "!assets/**"
|
||||
|
|
@ -86,7 +86,7 @@ jobs:
|
|||
name: "cargo clippy"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
|
|
@ -102,7 +102,7 @@ jobs:
|
|||
cargo-test-linux:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo test (linux)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
@ -128,7 +128,7 @@ jobs:
|
|||
cargo-test-windows:
|
||||
runs-on: windows-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo test (windows)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
@ -147,7 +147,7 @@ jobs:
|
|||
cargo-test-wasm:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo test (wasm)"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
@ -168,7 +168,7 @@ jobs:
|
|||
cargo-fuzz:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
name: "cargo fuzz"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
|
@ -187,7 +187,7 @@ jobs:
|
|||
name: "test scripts"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install Rust toolchain"
|
||||
|
|
@ -215,7 +215,7 @@ jobs:
|
|||
}}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
|
|
@ -226,7 +226,7 @@ jobs:
|
|||
name: ruff
|
||||
path: target/debug
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download baseline Ruff binary
|
||||
with:
|
||||
name: ruff
|
||||
|
|
@ -321,7 +321,7 @@ jobs:
|
|||
name: "cargo udeps"
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: "Install nightly Rust toolchain"
|
||||
|
|
@ -338,7 +338,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -362,7 +362,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Install Rust toolchain"
|
||||
|
|
@ -392,7 +392,7 @@ jobs:
|
|||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
|
|
@ -444,7 +444,7 @@ jobs:
|
|||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- uses: extractions/setup-just@v1
|
||||
env:
|
||||
|
|
@ -455,7 +455,7 @@ jobs:
|
|||
with:
|
||||
repository: "astral-sh/ruff-lsp"
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
|
|
@ -483,7 +483,7 @@ jobs:
|
|||
benchmarks:
|
||||
runs-on: ubuntu-latest
|
||||
needs: determine_changes
|
||||
if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main'
|
||||
if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@v4
|
||||
|
|
@ -502,7 +502,7 @@ jobs:
|
|||
run: cargo codspeed build --features codspeed -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@v1
|
||||
uses: CodSpeedHQ/action@v2
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@v0.8.0
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -69,7 +69,7 @@ jobs:
|
|||
target: [x64, x86]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.target }}
|
||||
|
|
@ -97,7 +97,7 @@ jobs:
|
|||
target: [x86_64, i686]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -124,7 +124,7 @@ jobs:
|
|||
target: [aarch64, armv7, s390x, ppc64le, ppc64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
|
|
@ -161,7 +161,7 @@ jobs:
|
|||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -197,7 +197,7 @@ jobs:
|
|||
arch: armv7
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Build wheels"
|
||||
|
|
@ -237,7 +237,7 @@ jobs:
|
|||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: wheels
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- name: "Publish to PyPi"
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: Download pull request number
|
||||
with:
|
||||
name: pr-number
|
||||
|
|
@ -32,7 +32,7 @@ jobs:
|
|||
echo "pr-number=$(<pr-number)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- uses: dawidd6/action-download-artifact@v2
|
||||
- uses: dawidd6/action-download-artifact@v3
|
||||
name: "Download ecosystem results"
|
||||
id: download-ecosystem-result
|
||||
if: steps.pr-number.outputs.pr-number
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
|
|
@ -63,7 +63,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -86,7 +86,7 @@ jobs:
|
|||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
|
@ -103,7 +103,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -125,7 +125,7 @@ jobs:
|
|||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
|
@ -151,7 +151,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: ${{ matrix.platform.arch }}
|
||||
|
|
@ -177,7 +177,7 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
|
@ -199,7 +199,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -224,7 +224,7 @@ jobs:
|
|||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
|
@ -258,7 +258,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
|
|
@ -291,7 +291,7 @@ jobs:
|
|||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
|
@ -313,7 +313,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
architecture: x64
|
||||
|
|
@ -332,10 +332,10 @@ jobs:
|
|||
image: alpine:latest
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add py3-pip
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall
|
||||
ruff --help
|
||||
python -m ruff --help
|
||||
apk add python3
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
|
|
@ -343,7 +343,7 @@ jobs:
|
|||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
|
@ -369,7 +369,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ inputs.sha }}
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
- name: "Prep README.md"
|
||||
|
|
@ -388,10 +388,11 @@ jobs:
|
|||
distro: alpine_latest
|
||||
githubToken: ${{ github.token }}
|
||||
install: |
|
||||
apk add py3-pip
|
||||
apk add python3
|
||||
run: |
|
||||
pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
ruff check --help
|
||||
python -m venv .venv
|
||||
.venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/ruff check --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
|
|
@ -399,7 +400,7 @@ jobs:
|
|||
path: dist
|
||||
- name: "Archive binary"
|
||||
run: |
|
||||
ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz
|
||||
ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz
|
||||
tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
- name: "Upload binary"
|
||||
|
|
|
|||
|
|
@ -1,5 +1,42 @@
|
|||
# Breaking Changes
|
||||
|
||||
## 0.1.9
|
||||
|
||||
### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513))
|
||||
|
||||
Ruff maintains a list of default exclusions, which now consists of the following patterns:
|
||||
|
||||
- `.bzr`
|
||||
- `.direnv`
|
||||
- `.eggs`
|
||||
- `.git-rewrite`
|
||||
- `.git`
|
||||
- `.hg`
|
||||
- `.ipynb_checkpoints`
|
||||
- `.mypy_cache`
|
||||
- `.nox`
|
||||
- `.pants.d`
|
||||
- `.pyenv`
|
||||
- `.pytest_cache`
|
||||
- `.pytype`
|
||||
- `.ruff_cache`
|
||||
- `.svn`
|
||||
- `.tox`
|
||||
- `.venv`
|
||||
- `.vscode`
|
||||
- `__pypackages__`
|
||||
- `_build`
|
||||
- `buck-out`
|
||||
- `build`
|
||||
- `dist`
|
||||
- `node_modules`
|
||||
- `site-packages`
|
||||
- `venv`
|
||||
|
||||
Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends
|
||||
to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff
|
||||
from VS Code outside a virtual environment.
|
||||
|
||||
## 0.1.0
|
||||
|
||||
### The deprecated `format` setting has been removed
|
||||
|
|
|
|||
151
CHANGELOG.md
151
CHANGELOG.md
|
|
@ -1,5 +1,156 @@
|
|||
# Changelog
|
||||
|
||||
## 0.1.8
|
||||
|
||||
This release includes opt-in support for formatting Python snippets within
|
||||
docstrings via the `docstring-code-format` setting.
|
||||
[Check out the blog post](https://astral.sh/blog/ruff-v0.1.8) for more details!
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822))
|
||||
- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943))
|
||||
- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072))
|
||||
- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075))
|
||||
- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047))
|
||||
- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001))
|
||||
- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911))
|
||||
- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034))
|
||||
- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036))
|
||||
- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065))
|
||||
- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094))
|
||||
- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107))
|
||||
- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854))
|
||||
- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020))
|
||||
- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098))
|
||||
- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030))
|
||||
- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003))
|
||||
- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076))
|
||||
- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051))
|
||||
|
||||
### CLI
|
||||
|
||||
- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095))
|
||||
- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012))
|
||||
- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059))
|
||||
- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450))
|
||||
- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015))
|
||||
- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027))
|
||||
- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041))
|
||||
- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039))
|
||||
- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099))
|
||||
- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000))
|
||||
- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037))
|
||||
- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054))
|
||||
- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100))
|
||||
- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060))
|
||||
|
||||
## 0.1.7
|
||||
|
||||
### Preview features
|
||||
|
||||
- Implement multiline dictionary and list hugging for preview style ([#8293](https://github.com/astral-sh/ruff/pull/8293))
|
||||
- Implement the `fix_power_op_line_length` preview style ([#8947](https://github.com/astral-sh/ruff/pull/8947))
|
||||
- Use Python version to determine typing rewrite safety ([#8919](https://github.com/astral-sh/ruff/pull/8919))
|
||||
- \[`flake8-annotations`\] Enable auto-return-type involving `Optional` and `Union` annotations ([#8885](https://github.com/astral-sh/ruff/pull/8885))
|
||||
- \[`flake8-bandit`\] Implement `django-raw-sql` (`S611`) ([#8651](https://github.com/astral-sh/ruff/pull/8651))
|
||||
- \[`flake8-bandit`\] Implement `tarfile-unsafe-members` (`S202`) ([#8829](https://github.com/astral-sh/ruff/pull/8829))
|
||||
- \[`flake8-pyi`\] Implement fix for `unnecessary-literal-union` (`PYI030`) ([#7934](https://github.com/astral-sh/ruff/pull/7934))
|
||||
- \[`flake8-simplify`\] Extend `dict-get-with-none-default` (`SIM910`) to non-literals ([#8762](https://github.com/astral-sh/ruff/pull/8762))
|
||||
- \[`pylint`\] - add `unnecessary-list-index-lookup` (`PLR1736`) + autofix ([#7999](https://github.com/astral-sh/ruff/pull/7999))
|
||||
- \[`pylint`\] - implement R0202 and R0203 with autofixes ([#8335](https://github.com/astral-sh/ruff/pull/8335))
|
||||
- \[`pylint`\] Implement `repeated-keyword` (`PLe1132`) ([#8706](https://github.com/astral-sh/ruff/pull/8706))
|
||||
- \[`pylint`\] Implement `too-many-positional` (`PLR0917`) ([#8995](https://github.com/astral-sh/ruff/pull/8995))
|
||||
- \[`pylint`\] Implement `unnecessary-dict-index-lookup` (`PLR1733`) ([#8036](https://github.com/astral-sh/ruff/pull/8036))
|
||||
- \[`refurb`\] Implement `redundant-log-base` (`FURB163`) ([#8842](https://github.com/astral-sh/ruff/pull/8842))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`flake8-boolean-trap`\] Allow booleans in `@override` methods ([#8882](https://github.com/astral-sh/ruff/pull/8882))
|
||||
- \[`flake8-bugbear`\] Avoid `B015`,`B018` for last expression in a cell ([#8815](https://github.com/astral-sh/ruff/pull/8815))
|
||||
- \[`flake8-pie`\] Allow ellipses for enum values in stub files ([#8825](https://github.com/astral-sh/ruff/pull/8825))
|
||||
- \[`flake8-pyi`\] Check PEP 695 type aliases for `snake-case-type-alias` and `t-suffixed-type-alias` ([#8966](https://github.com/astral-sh/ruff/pull/8966))
|
||||
- \[`flake8-pyi`\] Check for kwarg and vararg `NoReturn` type annotations ([#8948](https://github.com/astral-sh/ruff/pull/8948))
|
||||
- \[`flake8-simplify`\] Omit select context managers from `SIM117` ([#8801](https://github.com/astral-sh/ruff/pull/8801))
|
||||
- \[`pep8-naming`\] Allow Django model loads in `non-lowercase-variable-in-function` (`N806`) ([#8917](https://github.com/astral-sh/ruff/pull/8917))
|
||||
- \[`pycodestyle`\] Avoid `E703` for last expression in a cell ([#8821](https://github.com/astral-sh/ruff/pull/8821))
|
||||
- \[`pycodestyle`\] Update `E402` to work at cell level for notebooks ([#8872](https://github.com/astral-sh/ruff/pull/8872))
|
||||
- \[`pydocstyle`\] Avoid `D100` for Jupyter Notebooks ([#8816](https://github.com/astral-sh/ruff/pull/8816))
|
||||
- \[`pylint`\] Implement fix for `unspecified-encoding` (`PLW1514`) ([#8928](https://github.com/astral-sh/ruff/pull/8928))
|
||||
|
||||
### Formatter
|
||||
|
||||
- Avoid unstable formatting in ellipsis-only body with trailing comment ([#8984](https://github.com/astral-sh/ruff/pull/8984))
|
||||
- Inline trailing comments for type alias similar to assignments ([#8941](https://github.com/astral-sh/ruff/pull/8941))
|
||||
- Insert trailing comma when function breaks with single argument ([#8921](https://github.com/astral-sh/ruff/pull/8921))
|
||||
|
||||
### CLI
|
||||
|
||||
- Update `ruff check` and `ruff format` to default to the current directory ([#8791](https://github.com/astral-sh/ruff/pull/8791))
|
||||
- Stop at the first resolved parent configuration ([#8864](https://github.com/astral-sh/ruff/pull/8864))
|
||||
|
||||
### Configuration
|
||||
|
||||
- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998))
|
||||
- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812))
|
||||
- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663))
|
||||
- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add support for `@functools.singledispatch` ([#8934](https://github.com/astral-sh/ruff/pull/8934))
|
||||
- Avoid off-by-one error in stripping noqa following multi-byte char ([#8979](https://github.com/astral-sh/ruff/pull/8979))
|
||||
- Avoid off-by-one error in with-item named expressions ([#8915](https://github.com/astral-sh/ruff/pull/8915))
|
||||
- Avoid syntax error via invalid ur string prefix ([#8971](https://github.com/astral-sh/ruff/pull/8971))
|
||||
- Avoid underflow in `get_model` matching ([#8965](https://github.com/astral-sh/ruff/pull/8965))
|
||||
- Avoid unnecessary index diagnostics when value is modified ([#8970](https://github.com/astral-sh/ruff/pull/8970))
|
||||
- Convert over-indentation rule to use number of characters ([#8983](https://github.com/astral-sh/ruff/pull/8983))
|
||||
- Detect implicit returns in auto-return-types ([#8952](https://github.com/astral-sh/ruff/pull/8952))
|
||||
- Fix start >= end error in over-indentation ([#8982](https://github.com/astral-sh/ruff/pull/8982))
|
||||
- Ignore `@overload` and `@override` methods for too-many-arguments checks ([#8954](https://github.com/astral-sh/ruff/pull/8954))
|
||||
- Lexer start of line is false only for `Mode::Expression` ([#8880](https://github.com/astral-sh/ruff/pull/8880))
|
||||
- Mark `pydantic_settings.BaseSettings` as having default copy semantics ([#8793](https://github.com/astral-sh/ruff/pull/8793))
|
||||
- Respect dictionary unpacking in `NamedTuple` assignments ([#8810](https://github.com/astral-sh/ruff/pull/8810))
|
||||
- Respect local subclasses in `flake8-type-checking` ([#8768](https://github.com/astral-sh/ruff/pull/8768))
|
||||
- Support type alias statements in simple statement positions ([#8916](https://github.com/astral-sh/ruff/pull/8916))
|
||||
- \[`flake8-annotations`\] Avoid filtering out un-representable types in return annotation ([#8881](https://github.com/astral-sh/ruff/pull/8881))
|
||||
- \[`flake8-pie`\] Retain extra ellipses in protocols and abstract methods ([#8769](https://github.com/astral-sh/ruff/pull/8769))
|
||||
- \[`flake8-pyi`\] Respect local enum subclasses in `simple-defaults` (`PYI052`) ([#8767](https://github.com/astral-sh/ruff/pull/8767))
|
||||
- \[`flake8-trio`\] Use correct range for `TRIO115` fix ([#8933](https://github.com/astral-sh/ruff/pull/8933))
|
||||
- \[`flake8-trio`\] Use full arguments range for zero-sleep-call ([#8936](https://github.com/astral-sh/ruff/pull/8936))
|
||||
- \[`isort`\] fix: mark `__main__` as first-party import ([#8805](https://github.com/astral-sh/ruff/pull/8805))
|
||||
- \[`pep8-naming`\] Avoid `N806` errors for type alias statements ([#8785](https://github.com/astral-sh/ruff/pull/8785))
|
||||
- \[`perflint`\] Avoid `PERF101` if there's an append in loop body ([#8809](https://github.com/astral-sh/ruff/pull/8809))
|
||||
- \[`pycodestyle`\] Allow space-before-colon after end-of-slice ([#8838](https://github.com/astral-sh/ruff/pull/8838))
|
||||
- \[`pydocstyle`\] Avoid non-character breaks in `over-indentation` (`D208`) ([#8866](https://github.com/astral-sh/ruff/pull/8866))
|
||||
- \[`pydocstyle`\] Ignore underlines when determining docstring logical lines ([#8929](https://github.com/astral-sh/ruff/pull/8929))
|
||||
- \[`pylint`\] Extend `self-assigning-variable` to multi-target assignments ([#8839](https://github.com/astral-sh/ruff/pull/8839))
|
||||
- \[`tryceratops`\] Avoid repeated triggers in nested `tryceratops` diagnostics ([#8772](https://github.com/astral-sh/ruff/pull/8772))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add advice for fixing RUF008 when mutability is not desired ([#8853](https://github.com/astral-sh/ruff/pull/8853))
|
||||
- Added the command to run ruff using pkgx to the installation.md ([#8955](https://github.com/astral-sh/ruff/pull/8955))
|
||||
- Document fix safety for flake8-comprehensions and some pyupgrade rules ([#8918](https://github.com/astral-sh/ruff/pull/8918))
|
||||
- Fix doc formatting for zero-sleep-call ([#8937](https://github.com/astral-sh/ruff/pull/8937))
|
||||
- Remove duplicate imports from os-stat documentation ([#8930](https://github.com/astral-sh/ruff/pull/8930))
|
||||
- Replace generated reference to MkDocs ([#8806](https://github.com/astral-sh/ruff/pull/8806))
|
||||
- Update Arch Linux package URL in installation.md ([#8802](https://github.com/astral-sh/ruff/pull/8802))
|
||||
- \[`flake8-pyi`\] Fix error in `t-suffixed-type-alias` (`PYI043`) example ([#8963](https://github.com/astral-sh/ruff/pull/8963))
|
||||
- \[`flake8-pyi`\] Improve motivation for `custom-type-var-return-type` (`PYI019`) ([#8766](https://github.com/astral-sh/ruff/pull/8766))
|
||||
|
||||
## 0.1.6
|
||||
|
||||
### Preview features
|
||||
|
|
|
|||
|
|
@ -556,10 +556,10 @@ examples.
|
|||
|
||||
#### Linux
|
||||
|
||||
Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf
|
||||
Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf
|
||||
|
||||
```shell
|
||||
cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1
|
||||
cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1
|
||||
```
|
||||
|
||||
You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to
|
||||
|
|
@ -567,8 +567,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an
|
|||
of checks, to your liking)
|
||||
|
||||
```shell
|
||||
cargo build --bin ruff_dev --profile=release-debug
|
||||
perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||
cargo build --bin ruff_dev --profile=profiling
|
||||
perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null
|
||||
```
|
||||
|
||||
Then convert the recorded profile
|
||||
|
|
@ -598,7 +598,7 @@ cargo install cargo-instruments
|
|||
Then run the profiler with
|
||||
|
||||
```shell
|
||||
cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1
|
||||
cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1
|
||||
```
|
||||
|
||||
- `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc`
|
||||
|
|
|
|||
|
|
@ -16,14 +16,15 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
|
|||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.3"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f"
|
||||
checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -381,7 +382,7 @@ dependencies = [
|
|||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -606,7 +607,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"strsim",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -617,7 +618,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5"
|
|||
dependencies = [
|
||||
"darling_core",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -790,14 +791,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.22"
|
||||
version = "0.2.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0"
|
||||
checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall 0.3.5",
|
||||
"windows-sys 0.48.0",
|
||||
"redox_syscall 0.4.1",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -808,7 +809,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
|
|||
|
||||
[[package]]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.1.6"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
|
|
@ -827,7 +828,7 @@ dependencies = [
|
|||
"serde_json",
|
||||
"strum",
|
||||
"strum_macros",
|
||||
"toml 0.7.8",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -848,18 +849,18 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
|||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.2.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
|
||||
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
|
||||
dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fs-err"
|
||||
version = "2.10.0"
|
||||
version = "2.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb5fd9bcbe8b1087cbd395b51498c01bc997cef73e778a80b77a811af5e2d29f"
|
||||
checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
]
|
||||
|
|
@ -987,9 +988,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
|||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
|
||||
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
|
||||
dependencies = [
|
||||
"unicode-bidi",
|
||||
"unicode-normalization",
|
||||
|
|
@ -1122,15 +1123,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "is-macro"
|
||||
version = "0.3.0"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4467ed1321b310c2625c5aa6c1b1ffc5de4d9e42668cf697a08fb033ee8265e"
|
||||
checksum = "bc74b7abae208af9314a406bd7dcc65091230b6e749c09e07a645885fecf34f9"
|
||||
dependencies = [
|
||||
"Inflector",
|
||||
"pmutil 0.6.1",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1170,9 +1171,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
|
|||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.65"
|
||||
version = "0.3.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8"
|
||||
checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
|
@ -1481,9 +1482,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
|||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.18.0"
|
||||
version = "1.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
|
||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
|
||||
[[package]]
|
||||
name = "oorandom"
|
||||
|
|
@ -1622,9 +1623,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "2.3.0"
|
||||
version = "2.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
|
||||
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
|
|
@ -1708,7 +1709,7 @@ checksum = "52a40bc70c2c58040d2d8b167ba9a5ff59fc9dab7ad44771cfde3dcfde7a09c6"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1810,7 +1811,7 @@ dependencies = [
|
|||
"pep440_rs",
|
||||
"pep508_rs",
|
||||
"serde",
|
||||
"toml 0.8.2",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2062,7 +2063,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ruff_cli"
|
||||
version = "0.1.6"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"annotate-snippets 0.9.2",
|
||||
"anyhow",
|
||||
|
|
@ -2154,7 +2155,7 @@ dependencies = [
|
|||
"strum",
|
||||
"strum_macros",
|
||||
"tempfile",
|
||||
"toml 0.7.8",
|
||||
"toml",
|
||||
"tracing",
|
||||
"tracing-indicatif",
|
||||
"tracing-subscriber",
|
||||
|
|
@ -2198,7 +2199,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.1.6"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"annotate-snippets 0.9.2",
|
||||
|
|
@ -2254,10 +2255,11 @@ dependencies = [
|
|||
"tempfile",
|
||||
"test-case",
|
||||
"thiserror",
|
||||
"toml 0.7.8",
|
||||
"toml",
|
||||
"typed-arena",
|
||||
"unicode-width",
|
||||
"unicode_names2",
|
||||
"url",
|
||||
"wsl",
|
||||
]
|
||||
|
||||
|
|
@ -2269,7 +2271,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"ruff_python_trivia",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2450,7 +2452,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ruff_shrinking"
|
||||
version = "0.1.6"
|
||||
version = "0.1.8"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
|
|
@ -2541,7 +2543,7 @@ dependencies = [
|
|||
"shellexpand",
|
||||
"strum",
|
||||
"tempfile",
|
||||
"toml 0.7.8",
|
||||
"toml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2618,9 +2620,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "schemars"
|
||||
version = "0.8.15"
|
||||
version = "0.8.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c"
|
||||
checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29"
|
||||
dependencies = [
|
||||
"dyn-clone",
|
||||
"schemars_derive",
|
||||
|
|
@ -2630,9 +2632,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "0.8.15"
|
||||
version = "0.8.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c"
|
||||
checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -2676,18 +2678,18 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090"
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.190"
|
||||
version = "1.0.193"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7"
|
||||
checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde-wasm-bindgen"
|
||||
version = "0.6.1"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17ba92964781421b6cef36bf0d7da26d201e96d84e1b10e7ae6ed416e516906d"
|
||||
checksum = "b9b713f70513ae1f8d92665bbbbda5c295c2cf1da5542881ae5eefe20c9af132"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"serde",
|
||||
|
|
@ -2696,13 +2698,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.190"
|
||||
version = "1.0.193"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3"
|
||||
checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2764,7 +2766,7 @@ dependencies = [
|
|||
"darling",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2868,7 +2870,7 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
"quote",
|
||||
"rustversion",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2884,9 +2886,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.39"
|
||||
version = "2.0.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a"
|
||||
checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -2973,7 +2975,7 @@ dependencies = [
|
|||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2985,7 +2987,7 @@ dependencies = [
|
|||
"proc-macro-error",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
"test-case-core",
|
||||
]
|
||||
|
||||
|
|
@ -3006,7 +3008,7 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3089,18 +3091,6 @@ version = "0.1.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_edit 0.19.15",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.2"
|
||||
|
|
@ -3110,7 +3100,7 @@ dependencies = [
|
|||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_edit 0.20.2",
|
||||
"toml_edit",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3122,19 +3112,6 @@ dependencies = [
|
|||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.19.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.20.2"
|
||||
|
|
@ -3168,7 +3145,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3183,9 +3160,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-indicatif"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57e05fe4a1c906d94b275d8aeb8ff8b9deaca502aeb59ae8ab500a92b8032ac8"
|
||||
checksum = "069580424efe11d97c3fef4197fa98c004fa26672cc71ad8770d224e23b1951d"
|
||||
dependencies = [
|
||||
"indicatif",
|
||||
"tracing",
|
||||
|
|
@ -3305,9 +3282,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
|
|||
|
||||
[[package]]
|
||||
name = "unicode_names2"
|
||||
version = "1.2.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d5506ae2c3c1ccbdf468e52fc5ef536c2ccd981f01273a4cb81aa61021f3a5f"
|
||||
checksum = "ac64ef2f016dc69dfa8283394a70b057066eb054d5fcb6b9eb17bd2ec5097211"
|
||||
dependencies = [
|
||||
"phf",
|
||||
"unicode_names2_generator",
|
||||
|
|
@ -3315,9 +3292,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "unicode_names2_generator"
|
||||
version = "1.2.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6dfc680313e95bc6637fa278cd7a22390c3c2cd7b8b2bd28755bc6c0fc811e7"
|
||||
checksum = "013f6a731e80f3930de580e55ba41dfa846de4e0fdee4a701f97989cb1597d6a"
|
||||
dependencies = [
|
||||
"getopts",
|
||||
"log",
|
||||
|
|
@ -3334,9 +3311,9 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
|
|||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.8.0"
|
||||
version = "2.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f5ccd538d4a604753ebc2f17cd9946e89b77bf87f6a8e2309667c6f2e87855e3"
|
||||
checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"flate2",
|
||||
|
|
@ -3350,9 +3327,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.4.1"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
|
||||
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
|
|
@ -3386,7 +3363,7 @@ checksum = "f49e7f3f3db8040a100710a11932239fd30697115e2ba4107080d8252939845e"
|
|||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3461,9 +3438,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.88"
|
||||
version = "0.2.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce"
|
||||
checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"wasm-bindgen-macro",
|
||||
|
|
@ -3471,24 +3448,24 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.88"
|
||||
version = "0.2.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217"
|
||||
checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"log",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.38"
|
||||
version = "0.4.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02"
|
||||
checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
|
|
@ -3498,9 +3475,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.88"
|
||||
version = "0.2.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2"
|
||||
checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
|
|
@ -3508,28 +3485,28 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.88"
|
||||
version = "0.2.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907"
|
||||
checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
"wasm-bindgen-backend",
|
||||
"wasm-bindgen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.88"
|
||||
version = "0.2.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b"
|
||||
checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f"
|
||||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test"
|
||||
version = "0.3.38"
|
||||
version = "0.3.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c6433b7c56db97397842c46b67e11873eda263170afeb3a2dc74a7cb370fee0d"
|
||||
checksum = "2cf9242c0d27999b831eae4767b2a146feb0b27d332d553e605864acd2afd403"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"js-sys",
|
||||
|
|
@ -3541,13 +3518,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-test-macro"
|
||||
version = "0.3.38"
|
||||
version = "0.3.39"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "493fcbab756bb764fa37e6bee8cec2dd709eb4273d06d0c282a5e74275ded735"
|
||||
checksum = "794645f5408c9a039fd09f4d113cdfb2e7eba5ff1956b07bcf701cf4b394fe89"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.39",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3644,6 +3621,15 @@ dependencies = [
|
|||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3674,6 +3660,21 @@ dependencies = [
|
|||
"windows_x86_64_msvc 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.0",
|
||||
"windows_aarch64_msvc 0.52.0",
|
||||
"windows_i686_gnu 0.52.0",
|
||||
"windows_i686_msvc 0.52.0",
|
||||
"windows_x86_64_gnu 0.52.0",
|
||||
"windows_x86_64_gnullvm 0.52.0",
|
||||
"windows_x86_64_msvc 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3686,6 +3687,12 @@ version = "0.48.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3698,6 +3705,12 @@ version = "0.48.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3710,6 +3723,12 @@ version = "0.48.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3722,6 +3741,12 @@ version = "0.48.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3734,6 +3759,12 @@ version = "0.48.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3746,6 +3777,12 @@ version = "0.48.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.42.2"
|
||||
|
|
@ -3758,6 +3795,12 @@ version = "0.48.5"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
|
||||
|
||||
[[package]]
|
||||
name = "winnow"
|
||||
version = "0.5.15"
|
||||
|
|
@ -3796,3 +3839,23 @@ checksum = "fe5c30ade05e61656247b2e334a031dfd0cc466fadef865bdcdea8d537951bf1"
|
|||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.40",
|
||||
]
|
||||
|
|
|
|||
39
Cargo.toml
39
Cargo.toml
|
|
@ -17,24 +17,24 @@ bitflags = { version = "2.4.1" }
|
|||
chrono = { version = "0.4.31", default-features = false, features = ["clock"] }
|
||||
clap = { version = "4.4.7", features = ["derive"] }
|
||||
colored = { version = "2.0.0" }
|
||||
filetime = { version = "0.2.20" }
|
||||
filetime = { version = "0.2.23" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.14" }
|
||||
ignore = { version = "0.4.20" }
|
||||
insta = { version = "1.34.0", feature = ["filters", "glob"] }
|
||||
is-macro = { version = "0.3.0" }
|
||||
is-macro = { version = "0.3.1" }
|
||||
itertools = { version = "0.11.0" }
|
||||
libcst = { version = "1.1.0", default-features = false }
|
||||
log = { version = "0.4.17" }
|
||||
memchr = { version = "2.6.4" }
|
||||
once_cell = { version = "1.17.1" }
|
||||
once_cell = { version = "1.19.0" }
|
||||
path-absolutize = { version = "3.1.1" }
|
||||
proc-macro2 = { version = "1.0.70" }
|
||||
quote = { version = "1.0.23" }
|
||||
regex = { version = "1.10.2" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
schemars = { version = "0.8.15" }
|
||||
serde = { version = "1.0.190", features = ["derive"] }
|
||||
schemars = { version = "0.8.16" }
|
||||
serde = { version = "1.0.193", features = ["derive"] }
|
||||
serde_json = { version = "1.0.108" }
|
||||
shellexpand = { version = "3.0.0" }
|
||||
similar = { version = "2.3.0", features = ["inline"] }
|
||||
|
|
@ -42,15 +42,15 @@ smallvec = { version = "1.11.2" }
|
|||
static_assertions = "1.1.0"
|
||||
strum = { version = "0.25.0", features = ["strum_macros"] }
|
||||
strum_macros = { version = "0.25.3" }
|
||||
syn = { version = "2.0.39" }
|
||||
syn = { version = "2.0.40" }
|
||||
test-case = { version = "3.2.1" }
|
||||
thiserror = { version = "1.0.50" }
|
||||
toml = { version = "0.7.8" }
|
||||
toml = { version = "0.8.2" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-indicatif = { version = "0.3.4" }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
unicode-ident = { version = "1.0.12" }
|
||||
unicode_names2 = { version = "1.2.0" }
|
||||
unicode_names2 = { version = "1.2.1" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
|
@ -88,7 +88,20 @@ rc_mutex = "warn"
|
|||
rest_pat_in_fully_bound_structs = "warn"
|
||||
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
# Note that we set these explicitly, and these values
|
||||
# were chosen based on a trade-off between compile times
|
||||
# and runtime performance[1].
|
||||
#
|
||||
# [1]: https://github.com/astral-sh/ruff/pull/9031
|
||||
lto = "thin"
|
||||
codegen-units = 16
|
||||
|
||||
# Some crates don't change as much but benefit more from
|
||||
# more expensive optimization passes, so we selectively
|
||||
# decrease codegen-units in some cases.
|
||||
[profile.release.package.ruff_python_parser]
|
||||
codegen-units = 1
|
||||
[profile.release.package.ruff_python_ast]
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev.package.insta]
|
||||
|
|
@ -102,8 +115,8 @@ opt-level = 3
|
|||
[profile.dev.package.ruff_python_parser]
|
||||
opt-level = 1
|
||||
|
||||
# Use the `--profile release-debug` flag to show symbols in release mode.
|
||||
# e.g. `cargo build --profile release-debug`
|
||||
[profile.release-debug]
|
||||
# Use the `--profile profiling` flag to show symbols in release mode.
|
||||
# e.g. `cargo build --profile profiling`
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
|||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.1.6
|
||||
rev: v0.1.8
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
|
@ -194,20 +194,25 @@ exclude = [
|
|||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".ipynb_checkpoints",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pyenv",
|
||||
".pytest_cache",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
".vscode",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"site-packages",
|
||||
"venv",
|
||||
]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "flake8-to-ruff"
|
||||
version = "0.1.6"
|
||||
version = "0.1.8"
|
||||
description = """
|
||||
Convert Flake8 configuration files to Ruff configuration files.
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -34,8 +34,8 @@ harness = false
|
|||
once_cell.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
url = "2.3.1"
|
||||
ureq = "2.8.0"
|
||||
url = "2.5.0"
|
||||
ureq = "2.9.1"
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
codspeed-criterion-compat = { version="2.3.3", default-features = false, optional = true}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "ruff_cli"
|
||||
version = "0.1.6"
|
||||
version = "0.1.8"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
|
@ -69,7 +69,7 @@ insta = { workspace = true, features = ["filters", "json"] }
|
|||
insta-cmd = { version = "0.4.0" }
|
||||
tempfile = "3.8.1"
|
||||
test-case = { workspace = true }
|
||||
ureq = { version = "2.8.0", features = [] }
|
||||
ureq = { version = "2.9.1", features = [] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
mimalloc = "0.1.39"
|
||||
|
|
|
|||
|
|
@ -515,7 +515,7 @@ impl<'a> FormatResults<'a> {
|
|||
if changed > 0 && unchanged > 0 {
|
||||
writeln!(
|
||||
f,
|
||||
"{} file{} {}, {} file{} left unchanged",
|
||||
"{} file{} {}, {} file{} {}",
|
||||
changed,
|
||||
if changed == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
|
|
@ -524,6 +524,10 @@ impl<'a> FormatResults<'a> {
|
|||
},
|
||||
unchanged,
|
||||
if unchanged == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
FormatMode::Write => "left unchanged",
|
||||
FormatMode::Check | FormatMode::Diff => "already formatted",
|
||||
},
|
||||
)
|
||||
} else if changed > 0 {
|
||||
writeln!(
|
||||
|
|
@ -539,9 +543,13 @@ impl<'a> FormatResults<'a> {
|
|||
} else if unchanged > 0 {
|
||||
writeln!(
|
||||
f,
|
||||
"{} file{} left unchanged",
|
||||
"{} file{} {}",
|
||||
unchanged,
|
||||
if unchanged == 1 { "" } else { "s" },
|
||||
match self.mode {
|
||||
FormatMode::Write => "left unchanged",
|
||||
FormatMode::Check | FormatMode::Diff => "already formatted",
|
||||
},
|
||||
)
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use ruff_linter::fs::relativize_path;
|
|||
use ruff_linter::logging::LogLevel;
|
||||
use ruff_linter::message::{
|
||||
AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, TextEmitter,
|
||||
JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter,
|
||||
};
|
||||
use ruff_linter::notify_user;
|
||||
use ruff_linter::registry::{AsRule, Rule};
|
||||
|
|
@ -125,15 +125,7 @@ impl Printer {
|
|||
if let Some(fixables) = fixables {
|
||||
let fix_prefix = format!("[{}]", "*".cyan());
|
||||
|
||||
if self.unsafe_fixes.is_enabled() {
|
||||
if fixables.applicable > 0 {
|
||||
writeln!(
|
||||
writer,
|
||||
"{fix_prefix} {} fixable with the --fix option.",
|
||||
fixables.applicable
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
if self.unsafe_fixes.is_hint() {
|
||||
if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 {
|
||||
let es = if fixables.unapplicable_unsafe == 1 {
|
||||
""
|
||||
|
|
@ -163,6 +155,14 @@ impl Printer {
|
|||
fixables.unapplicable_unsafe
|
||||
)?;
|
||||
}
|
||||
} else {
|
||||
if fixables.applicable > 0 {
|
||||
writeln!(
|
||||
writer,
|
||||
"{fix_prefix} {} fixable with the --fix option.",
|
||||
fixables.applicable
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
|
@ -291,6 +291,9 @@ impl Printer {
|
|||
SerializationFormat::Azure => {
|
||||
AzureEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
SerializationFormat::Sarif => {
|
||||
SarifEmitter.emit(writer, &diagnostics.messages, &context)?;
|
||||
}
|
||||
}
|
||||
|
||||
writer.flush()?;
|
||||
|
|
|
|||
|
|
@ -139,6 +139,99 @@ if condition:
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn docstring_options() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
let ruff_toml = tempdir.path().join("ruff.toml");
|
||||
fs::write(
|
||||
&ruff_toml,
|
||||
r#"
|
||||
[format]
|
||||
docstring-code-format = true
|
||||
docstring-code-line-length = 20
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.args(["format", "--config"])
|
||||
.arg(&ruff_toml)
|
||||
.arg("-")
|
||||
.pass_stdin(r#"
|
||||
def f(x):
|
||||
'''
|
||||
Something about `f`. And an example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
||||
|
||||
Another example:
|
||||
|
||||
```py
|
||||
foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
||||
```
|
||||
|
||||
And another:
|
||||
|
||||
>>> foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear)
|
||||
'''
|
||||
pass
|
||||
"#), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
def f(x):
|
||||
"""
|
||||
Something about `f`. And an example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
(
|
||||
foo,
|
||||
bar,
|
||||
quux,
|
||||
) = this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
|
||||
Another example:
|
||||
|
||||
```py
|
||||
(
|
||||
foo,
|
||||
bar,
|
||||
quux,
|
||||
) = this_is_a_long_line(
|
||||
lion,
|
||||
hippo,
|
||||
lemur,
|
||||
bear,
|
||||
)
|
||||
```
|
||||
|
||||
And another:
|
||||
|
||||
>>> (
|
||||
... foo,
|
||||
... bar,
|
||||
... quux,
|
||||
... ) = this_is_a_long_line(
|
||||
... lion,
|
||||
... hippo,
|
||||
... lemur,
|
||||
... bear,
|
||||
... )
|
||||
"""
|
||||
pass
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mixed_line_endings() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
|
@ -162,7 +255,7 @@ fn mixed_line_endings() -> Result<()> {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
2 files left unchanged
|
||||
2 files already formatted
|
||||
"###);
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -235,6 +328,60 @@ OTHER = "OTHER"
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn messages() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
||||
fs::write(
|
||||
tempdir.path().join("main.py"),
|
||||
r#"
|
||||
from test import say_hy
|
||||
|
||||
if __name__ == "__main__":
|
||||
say_hy("dear Ruff contributor")
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--no-cache", "--isolated", "--check"])
|
||||
.arg("main.py"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
Would reformat: main.py
|
||||
1 file would be reformatted
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--no-cache", "--isolated"])
|
||||
.arg("main.py"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
1 file reformatted
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME))
|
||||
.current_dir(tempdir.path())
|
||||
.args(["format", "--no-cache", "--isolated"])
|
||||
.arg("main.py"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
1 file left unchanged
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn force_exclude() -> Result<()> {
|
||||
let tempdir = TempDir::new()?;
|
||||
|
|
@ -783,7 +930,7 @@ fn test_diff() {
|
|||
|
||||
|
||||
----- stderr -----
|
||||
2 files would be reformatted, 1 file left unchanged
|
||||
2 files would be reformatted, 1 file already formatted
|
||||
"###);
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1158,6 +1158,44 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() {
|
|||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "F601,UP034", "--no-unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\nprint(('foo'))\n"),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:14: F601 Dictionary key literal `'a'` repeated
|
||||
-:2:7: UP034 [*] Avoid extraneous parentheses
|
||||
Found 2 errors.
|
||||
[*] 1 fixable with the --fix option.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
.args(["--select", "F601", "--no-unsafe-fixes"])
|
||||
.build();
|
||||
assert_cmd_snapshot!(cmd
|
||||
.pass_stdin("x = {'a': 1, 'a': 1}\n"),
|
||||
@r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
-:1:14: F601 Dictionary key literal `'a'` repeated
|
||||
Found 1 error.
|
||||
|
||||
----- stderr -----
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn check_shows_unsafe_fixes_with_opt_in() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize};
|
|||
|
||||
/// A text edit to be applied to a source file. Inserts, deletes, or replaces
|
||||
/// content at a given location.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
pub struct Edit {
|
||||
/// The start location of the edit.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.1.6"
|
||||
version = "0.1.8"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
|
@ -71,6 +71,7 @@ toml = { workspace = true }
|
|||
typed-arena = { version = "2.0.2" }
|
||||
unicode-width = { workspace = true }
|
||||
unicode_names2 = { workspace = true }
|
||||
url = { version = "2.2.2" }
|
||||
wsl = { version = "0.1.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
|
|
|
|||
|
|
@ -147,3 +147,68 @@ def func(x: int):
|
|||
while x > 0:
|
||||
break
|
||||
return 1
|
||||
|
||||
|
||||
import abc
|
||||
from abc import abstractmethod
|
||||
|
||||
|
||||
class Foo(abc.ABC):
|
||||
@abstractmethod
|
||||
def method(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def method(self):
|
||||
"""Docstring."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def method(self):
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
@abstractmethod
|
||||
def method():
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def method(cls):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def method(self):
|
||||
if self.x > 0:
|
||||
return 1
|
||||
else:
|
||||
return 1.5
|
||||
|
||||
|
||||
def func(x: int):
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
return 2
|
||||
|
||||
|
||||
def func(x: int):
|
||||
try:
|
||||
pass
|
||||
except:
|
||||
return 2
|
||||
else:
|
||||
return 3
|
||||
|
||||
|
||||
def func(x: int):
|
||||
if not x:
|
||||
raise ValueError
|
||||
else:
|
||||
raise TypeError
|
||||
|
||||
|
||||
def func(x: int):
|
||||
if not x:
|
||||
raise ValueError
|
||||
else:
|
||||
return 1
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ def func(address):
|
|||
# Error
|
||||
"0.0.0.0"
|
||||
'0.0.0.0'
|
||||
f"0.0.0.0"
|
||||
|
||||
|
||||
# Error
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ with open("/abc/tmp", "w") as f:
|
|||
with open("/tmp/abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
with open(f"/tmp/abc", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
with open("/var/tmp/123", "w") as f:
|
||||
f.write("def")
|
||||
|
||||
|
|
|
|||
|
|
@ -127,3 +127,21 @@ class MultipleConsecutiveFields(models.Model):
|
|||
pass
|
||||
|
||||
middle_name = models.CharField(max_length=32)
|
||||
|
||||
|
||||
class BaseModel(models.Model):
|
||||
pass
|
||||
|
||||
|
||||
class StrBeforeFieldInheritedModel(BaseModel):
|
||||
"""Model with `__str__` before fields."""
|
||||
|
||||
class Meta:
|
||||
verbose_name = "test"
|
||||
verbose_name_plural = "tests"
|
||||
|
||||
def __str__(self):
|
||||
return "foobar"
|
||||
|
||||
first_name = models.CharField(max_length=32)
|
||||
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ def f_ok():
|
|||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def f_unfixable():
|
||||
def f_msg_defined():
|
||||
msg = "hello"
|
||||
raise RuntimeError("This is an example exception")
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ Foo.objects.create(**{**bar}) # PIE804
|
|||
|
||||
foo(**{})
|
||||
|
||||
|
||||
foo(**{**data, "foo": "buzz"})
|
||||
foo(**buzz)
|
||||
foo(**{"bar-foo": True})
|
||||
|
|
@ -20,3 +19,8 @@ foo(**{buzz: True})
|
|||
foo(**{"": True})
|
||||
foo(**{f"buzz__{bar}": True})
|
||||
abc(**{"for": 3})
|
||||
foo(**{},)
|
||||
|
||||
# Duplicated key names won't be fixed, to avoid syntax errors.
|
||||
abc(**{'a': b}, **{'a': c}) # PIE804
|
||||
abc(a=1, **{'a': c}, **{'b': c}) # PIE804
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
import typing
|
||||
import typing_extensions
|
||||
from typing import TypeVar
|
||||
from typing_extensions import ParamSpec, TypeVarTuple
|
||||
|
||||
_T = typing.TypeVar("_T")
|
||||
_P = TypeVar("_P")
|
||||
_Ts = typing_extensions.TypeVarTuple("_Ts")
|
||||
_P = ParamSpec("_P")
|
||||
_P2 = typing.ParamSpec("_P2")
|
||||
_Ts2 = TypeVarTuple("_Ts2")
|
||||
|
||||
# OK
|
||||
_UsedTypeVar = TypeVar("_UsedTypeVar")
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
import typing
|
||||
import typing_extensions
|
||||
from typing import TypeVar
|
||||
from typing_extensions import ParamSpec, TypeVarTuple
|
||||
|
||||
_T = typing.TypeVar("_T")
|
||||
_P = TypeVar("_P")
|
||||
_Ts = typing_extensions.TypeVarTuple("_Ts")
|
||||
_P = ParamSpec("_P")
|
||||
_P2 = typing.ParamSpec("_P2")
|
||||
_Ts2 = TypeVarTuple("_Ts2")
|
||||
|
||||
# OK
|
||||
_UsedTypeVar = TypeVar("_UsedTypeVar")
|
||||
|
|
|
|||
|
|
@ -22,3 +22,7 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case
|
|||
|
||||
# check that this edge case doesn't crash
|
||||
_: TypeAlias = str | int
|
||||
|
||||
# PEP 695
|
||||
type foo_bar = int | str
|
||||
type FooBar = int | str
|
||||
|
|
|
|||
|
|
@ -22,3 +22,7 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case
|
|||
|
||||
# check that this edge case doesn't crash
|
||||
_: TypeAlias = str | int
|
||||
|
||||
# PEP 695
|
||||
type foo_bar = int | str
|
||||
type FooBar = int | str
|
||||
|
|
|
|||
|
|
@ -21,3 +21,7 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"]
|
|||
|
||||
# check that this edge case doesn't crash
|
||||
_: TypeAlias = str | int
|
||||
|
||||
# PEP 695
|
||||
type _FooT = str | int
|
||||
type Foo = str | int
|
||||
|
|
|
|||
|
|
@ -21,3 +21,7 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"]
|
|||
|
||||
# check that this edge case doesn't crash
|
||||
_: TypeAlias = str | int
|
||||
|
||||
# PEP 695
|
||||
type _FooT = str | int
|
||||
type Foo = str | int
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ def f8(x: bytes = b"50 character byte stringgggggggggggggggggggggggggg\xff") ->
|
|||
|
||||
foo: str = "50 character stringggggggggggggggggggggggggggggggg"
|
||||
bar: str = "51 character stringgggggggggggggggggggggggggggggggg"
|
||||
baz: str = f"51 character stringgggggggggggggggggggggggggggggggg"
|
||||
|
||||
baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg"
|
||||
|
||||
|
|
|
|||
|
|
@ -29,6 +29,10 @@ baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg" # OK
|
|||
|
||||
qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053
|
||||
|
||||
ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK
|
||||
|
||||
fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053
|
||||
|
||||
class Demo:
|
||||
"""Docstrings are excluded from this rule. Some padding.""" # OK
|
||||
|
||||
|
|
|
|||
|
|
@ -37,3 +37,28 @@ def func():
|
|||
|
||||
# PYI055
|
||||
x: Union[type[requests_mock.Mocker], type[httpretty], type[str]] = requests_mock.Mocker
|
||||
|
||||
|
||||
def convert_union(union: UnionType) -> _T | None:
|
||||
converters: tuple[
|
||||
type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T], ... # PYI055
|
||||
] = union.__args__
|
||||
...
|
||||
|
||||
def convert_union(union: UnionType) -> _T | None:
|
||||
converters: tuple[
|
||||
Union[type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T]], ... # PYI055
|
||||
] = union.__args__
|
||||
...
|
||||
|
||||
def convert_union(union: UnionType) -> _T | None:
|
||||
converters: tuple[
|
||||
Union[type[_T] | type[Converter[_T]]] | Converter[_T] | Callable[[str], _T], ... # PYI055
|
||||
] = union.__args__
|
||||
...
|
||||
|
||||
def convert_union(union: UnionType) -> _T | None:
|
||||
converters: tuple[
|
||||
Union[type[_T] | type[Converter[_T]] | str] | Converter[_T] | Callable[[str], _T], ... # PYI055
|
||||
] = union.__args__
|
||||
...
|
||||
|
|
|
|||
|
|
@ -82,3 +82,14 @@ raise IndexError();
|
|||
|
||||
# RSE102
|
||||
raise Foo()
|
||||
|
||||
# OK
|
||||
raise ctypes.WinError()
|
||||
|
||||
|
||||
def func():
|
||||
pass
|
||||
|
||||
|
||||
# OK
|
||||
raise func()
|
||||
|
|
|
|||
|
|
@ -19,8 +19,32 @@ async def func():
|
|||
bar = "bar"
|
||||
trio.sleep(bar)
|
||||
|
||||
x, y = 0, 2000
|
||||
trio.sleep(x) # TRIO115
|
||||
trio.sleep(y) # OK
|
||||
|
||||
(a, b, [c, (d, e)]) = (1, 2, (0, [4, 0]))
|
||||
trio.sleep(c) # TRIO115
|
||||
trio.sleep(d) # OK
|
||||
trio.sleep(e) # TRIO115
|
||||
|
||||
m_x, m_y = 0
|
||||
trio.sleep(m_y) # OK
|
||||
trio.sleep(m_x) # OK
|
||||
|
||||
m_a = m_b = 0
|
||||
trio.sleep(m_a) # TRIO115
|
||||
trio.sleep(m_b) # TRIO115
|
||||
|
||||
m_c = (m_d, m_e) = (0, 0)
|
||||
trio.sleep(m_c) # OK
|
||||
trio.sleep(m_d) # TRIO115
|
||||
trio.sleep(m_e) # TRIO115
|
||||
|
||||
|
||||
def func():
|
||||
import trio
|
||||
|
||||
trio.run(trio.sleep(0)) # TRIO115
|
||||
|
||||
|
||||
|
|
@ -33,3 +57,10 @@ def func():
|
|||
|
||||
async def func():
|
||||
await sleep(seconds=0) # TRIO115
|
||||
|
||||
|
||||
def func():
|
||||
import trio
|
||||
|
||||
if (walrus := 0) == 0:
|
||||
trio.sleep(walrus) # TRIO115
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from typing import TypeVar
|
||||
|
||||
|
||||
x: "int" | str # TCH006
|
||||
x: ("int" | str) | "bool" # TCH006
|
||||
|
||||
|
||||
def func():
|
||||
x: "int" | str # OK
|
||||
|
||||
|
||||
z: list[str, str | "int"] = [] # TCH006
|
||||
|
||||
type A = Value["int" | str] # OK
|
||||
|
||||
OldS = TypeVar('OldS', int | 'str', str) # TCH006
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
from typing import TypeVar
|
||||
|
||||
|
||||
x: "int" | str # TCH006
|
||||
x: ("int" | str) | "bool" # TCH006
|
||||
|
||||
|
||||
def func():
|
||||
x: "int" | str # OK
|
||||
|
||||
|
||||
z: list[str, str | "int"] = [] # TCH006
|
||||
|
||||
type A = Value["int" | str] # OK
|
||||
|
||||
OldS = TypeVar('OldS', int | 'str', str) # TCH006
|
||||
7
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_1.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_1.py
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
"""Add `TYPE_CHECKING` to an existing `typing` import. Another member is moved."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final
|
||||
|
||||
Const: Final[dict] = {}
|
||||
7
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_2.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_2.py
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
"""Using `TYPE_CHECKING` from an existing `typing` import. Another member is moved."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final, TYPE_CHECKING
|
||||
|
||||
Const: Final[dict] = {}
|
||||
7
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_3.py
vendored
Normal file
7
crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_3.py
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
"""Using `TYPE_CHECKING` from an existing `typing` import. Another member is moved."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Final, Mapping
|
||||
|
||||
Const: Final[dict] = {}
|
||||
|
|
@ -0,0 +1,92 @@
|
|||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame[int]:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame["int"]:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
import pandas as pd
|
||||
|
||||
def baz() -> pd.DataFrame:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
import pandas as pd
|
||||
|
||||
def baz() -> pd.DataFrame.Extra:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
import pandas as pd
|
||||
|
||||
def baz() -> pd.DataFrame | int:
|
||||
...
|
||||
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame():
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from typing import Literal
|
||||
|
||||
from pandas import DataFrame
|
||||
|
||||
def baz() -> DataFrame[Literal["int"]]:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pandas import DataFrame
|
||||
|
||||
def func(value: DataFrame):
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame, Series
|
||||
|
||||
def baz() -> DataFrame | Series:
|
||||
...
|
||||
|
||||
|
||||
def f():
|
||||
from pandas import DataFrame, Series
|
||||
|
||||
def baz() -> (
|
||||
DataFrame |
|
||||
Series
|
||||
):
|
||||
...
|
||||
|
||||
class C:
|
||||
x: DataFrame[
|
||||
int
|
||||
] = 1
|
||||
|
||||
def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]:
|
||||
...
|
||||
2
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_future.py
vendored
Normal file
2
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_future.py
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
import __future__
|
||||
from __future__ import annotations
|
||||
4
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_lines_between.py
vendored
Normal file
4
crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_lines_between.py
vendored
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
from a import x
|
||||
import b
|
||||
from c import y
|
||||
import d
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
import __future__
|
||||
from __future__ import annotations
|
||||
|
|
@ -52,3 +52,9 @@ def model_assign() -> None:
|
|||
|
||||
Bad = import_string("django.core.exceptions.ValidationError") # N806
|
||||
ValidationError = import_string("django.core.exceptions.ValidationError") # OK
|
||||
|
||||
Bad = apps.get_model() # N806
|
||||
Bad = apps.get_model(model_name="Stream") # N806
|
||||
|
||||
Address: Type = apps.get_model("zerver", variable) # OK
|
||||
ValidationError = import_string(variable) # N806
|
||||
|
|
|
|||
|
|
@ -63,3 +63,8 @@ for i in list(foo_tuple): # Ok
|
|||
|
||||
for i in list(foo_set): # Ok
|
||||
foo_set.append(i + 1)
|
||||
|
||||
x, y, nested_tuple = (1, 2, (3, 4, 5))
|
||||
|
||||
for i in list(nested_tuple): # PERF101
|
||||
pass
|
||||
|
|
|
|||
|
|
@ -72,3 +72,15 @@ a = 42 # (Two spaces)
|
|||
# EF Means test is giving error and Failing
|
||||
#! Means test is segfaulting
|
||||
# 8 Means test runs forever
|
||||
|
||||
#: Colon prefix is okay
|
||||
|
||||
###This is a variable ###
|
||||
|
||||
# We should strip the space, but preserve the hashes.
|
||||
#: E266:1:3
|
||||
## Foo
|
||||
|
||||
a = 1 ## Foo
|
||||
|
||||
a = 1 #:Foo
|
||||
|
|
|
|||
|
|
@ -60,3 +60,6 @@ def f():
|
|||
if (a and
|
||||
b):
|
||||
pass
|
||||
#: Okay
|
||||
def f():
|
||||
return 1
|
||||
|
|
|
|||
|
|
@ -19,21 +19,32 @@ if x > 0:
|
|||
else:
|
||||
import e
|
||||
|
||||
__some__magic = 1
|
||||
import sys
|
||||
sys.path.insert(0, "some/path")
|
||||
|
||||
import f
|
||||
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use("Agg")
|
||||
|
||||
import g
|
||||
|
||||
__some__magic = 1
|
||||
|
||||
import h
|
||||
|
||||
|
||||
def foo() -> None:
|
||||
import e
|
||||
import i
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import g
|
||||
import j
|
||||
|
||||
import h; import i
|
||||
import k; import l
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import j; \
|
||||
import k
|
||||
import m; \
|
||||
import n
|
||||
|
|
|
|||
|
|
@ -43,3 +43,6 @@ regex = '''
|
|||
''' # noqa
|
||||
|
||||
regex = '\\\_'
|
||||
|
||||
#: W605:1:7
|
||||
u'foo\ bar'
|
||||
|
|
|
|||
|
|
@ -1,40 +1,54 @@
|
|||
# Same as `W605_0.py` but using f-strings instead.
|
||||
|
||||
#: W605:1:10
|
||||
regex = '\.png$'
|
||||
regex = f'\.png$'
|
||||
|
||||
#: W605:2:1
|
||||
regex = '''
|
||||
regex = f'''
|
||||
\.png$
|
||||
'''
|
||||
|
||||
#: W605:2:6
|
||||
f(
|
||||
'\_'
|
||||
f'\_'
|
||||
)
|
||||
|
||||
#: W605:4:6
|
||||
"""
|
||||
f"""
|
||||
multi-line
|
||||
literal
|
||||
with \_ somewhere
|
||||
in the middle
|
||||
"""
|
||||
|
||||
#: W605:1:38
|
||||
value = f'new line\nand invalid escape \_ here'
|
||||
|
||||
def f():
|
||||
#: W605:1:11
|
||||
return'\.png$'
|
||||
|
||||
#: Okay
|
||||
regex = r'\.png$'
|
||||
regex = '\\.png$'
|
||||
regex = r'''
|
||||
regex = fr'\.png$'
|
||||
regex = f'\\.png$'
|
||||
regex = fr'''
|
||||
\.png$
|
||||
'''
|
||||
regex = r'''
|
||||
regex = fr'''
|
||||
\\.png$
|
||||
'''
|
||||
s = '\\'
|
||||
regex = '\w' # noqa
|
||||
regex = '''
|
||||
s = f'\\'
|
||||
regex = f'\w' # noqa
|
||||
regex = f'''
|
||||
\w
|
||||
''' # noqa
|
||||
|
||||
regex = f'\\\_'
|
||||
value = f'\{{1}}'
|
||||
value = f'\{1}'
|
||||
value = f'{1:\}'
|
||||
value = f"{f"\{1}"}"
|
||||
value = rf"{f"\{1}"}"
|
||||
|
||||
# Okay
|
||||
value = rf'\{{1}}'
|
||||
value = rf'\{1}'
|
||||
value = rf'{1:\}'
|
||||
value = f"{rf"\{1}"}"
|
||||
|
|
|
|||
|
|
@ -1,54 +0,0 @@
|
|||
# Same as `W605_0.py` but using f-strings instead.
|
||||
|
||||
#: W605:1:10
|
||||
regex = f'\.png$'
|
||||
|
||||
#: W605:2:1
|
||||
regex = f'''
|
||||
\.png$
|
||||
'''
|
||||
|
||||
#: W605:2:6
|
||||
f(
|
||||
f'\_'
|
||||
)
|
||||
|
||||
#: W605:4:6
|
||||
f"""
|
||||
multi-line
|
||||
literal
|
||||
with \_ somewhere
|
||||
in the middle
|
||||
"""
|
||||
|
||||
#: W605:1:38
|
||||
value = f'new line\nand invalid escape \_ here'
|
||||
|
||||
|
||||
#: Okay
|
||||
regex = fr'\.png$'
|
||||
regex = f'\\.png$'
|
||||
regex = fr'''
|
||||
\.png$
|
||||
'''
|
||||
regex = fr'''
|
||||
\\.png$
|
||||
'''
|
||||
s = f'\\'
|
||||
regex = f'\w' # noqa
|
||||
regex = f'''
|
||||
\w
|
||||
''' # noqa
|
||||
|
||||
regex = f'\\\_'
|
||||
value = f'\{{1}}'
|
||||
value = f'\{1}'
|
||||
value = f'{1:\}'
|
||||
value = f"{f"\{1}"}"
|
||||
value = rf"{f"\{1}"}"
|
||||
|
||||
# Okay
|
||||
value = rf'\{{1}}'
|
||||
value = rf'\{1}'
|
||||
value = rf'{1:\}'
|
||||
value = f"{rf"\{1}"}"
|
||||
|
|
@ -713,5 +713,12 @@ def retain_extra_whitespace_not_overindented():
|
|||
|
||||
This is not overindented
|
||||
This is overindented, but since one line is not overindented this should not raise
|
||||
And so is this, but it we should preserve the extra space on this line relative
|
||||
And so is this, but it we should preserve the extra space on this line relative
|
||||
"""
|
||||
|
||||
|
||||
def inconsistent_indent_byte_size():
|
||||
"""There's a non-breaking space (2-bytes) after 3 spaces (https://github.com/astral-sh/ruff/issues/9080).
|
||||
|
||||
Returns:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -1,5 +1,16 @@
|
|||
"""
|
||||
Author
|
||||
"""
|
||||
|
||||
|
||||
class Platform:
|
||||
""" Remove sampler
|
||||
Args:
|
||||
Returns:
|
||||
"""
|
||||
|
||||
|
||||
def memory_test():
|
||||
"""
|
||||
参数含义:precision:精确到小数点后几位
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -0,0 +1,4 @@
|
|||
import re
|
||||
from typing import Annotated
|
||||
|
||||
type X = Annotated[int, lambda: re.compile("x")]
|
||||
|
|
@ -3,6 +3,11 @@ import subprocess
|
|||
# Errors.
|
||||
subprocess.run("ls")
|
||||
subprocess.run("ls", shell=True)
|
||||
subprocess.run(
|
||||
["ls"],
|
||||
shell=False,
|
||||
)
|
||||
subprocess.run(["ls"], **kwargs)
|
||||
|
||||
# Non-errors.
|
||||
subprocess.run("ls", check=True)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
def func() -> None: # OK
|
||||
# 15 is max default
|
||||
first = 1
|
||||
second = 2
|
||||
third = 3
|
||||
fourth = 4
|
||||
fifth = 5
|
||||
sixth = 6
|
||||
seventh = 7
|
||||
eighth = 8
|
||||
ninth = 9
|
||||
tenth = 10
|
||||
eleventh = 11
|
||||
twelveth = 12
|
||||
thirteenth = 13
|
||||
fourteenth = 14
|
||||
fifteenth = 15
|
||||
|
||||
|
||||
def func() -> None: # PLR0914
|
||||
first = 1
|
||||
second = 2
|
||||
third = 3
|
||||
fourth = 4
|
||||
fifth = 5
|
||||
sixth = 6
|
||||
seventh = 7
|
||||
eighth = 8
|
||||
ninth = 9
|
||||
tenth = 10
|
||||
eleventh = 11
|
||||
twelfth = 12
|
||||
thirteenth = 13
|
||||
fourteenth = 14
|
||||
fifteenth = 15
|
||||
sixteenth = 16
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
def f(x, y, z, t, u, v, w, r): # Too many positional arguments (8/3)
|
||||
pass
|
||||
|
||||
|
||||
def f(x): # OK
|
||||
pass
|
||||
|
||||
|
||||
def f(x, y, z, _t, _u, _v, _w, r): # OK (underscore-prefixed names are ignored
|
||||
pass
|
||||
|
||||
|
||||
def f(x, y, z, *, u=1, v=1, r=1): # OK
|
||||
pass
|
||||
|
||||
|
||||
def f(x=1, y=1, z=1): # OK
|
||||
pass
|
||||
|
||||
|
||||
def f(x, y, z, /, u, v, w): # Too many positional arguments (6/3)
|
||||
pass
|
||||
|
||||
|
||||
def f(x, y, z, *, u, v, w): # OK
|
||||
pass
|
||||
|
||||
|
||||
def f(x, y, z, a, b, c, *, u, v, w): # Too many positional arguments (6/3)
|
||||
pass
|
||||
10
crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional_params.py
vendored
Normal file
10
crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional_params.py
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
# Too many positional arguments (7/4) for max_positional=4
|
||||
# OK for dummy_variable_rgx ~ "skip_.*"
|
||||
def f(w, x, y, z, skip_t, skip_u, skip_v):
|
||||
pass
|
||||
|
||||
|
||||
# Too many positional arguments (7/4) for max_args=4
|
||||
# Too many positional arguments (7/3) for dummy_variable_rgx ~ "skip_.*"
|
||||
def f(w, x, y, z, t, u, v):
|
||||
pass
|
||||
|
|
@ -14,16 +14,27 @@ def fix_these():
|
|||
def dont_fix_these():
|
||||
# once there is an assignment to the dict[index], we stop emitting diagnostics
|
||||
for fruit_name, fruit_count in FRUITS.items():
|
||||
FRUITS[fruit_name] = 0 # Ok
|
||||
assert FRUITS[fruit_name] == 0 # Ok
|
||||
FRUITS[fruit_name] = 0 # OK
|
||||
assert FRUITS[fruit_name] == 0 # OK
|
||||
|
||||
# once there is an assignment to the key, we stop emitting diagnostics
|
||||
for fruit_name, fruit_count in FRUITS.items():
|
||||
fruit_name = 0 # OK
|
||||
assert FRUITS[fruit_name] == 0 # OK
|
||||
|
||||
# once there is an assignment to the value, we stop emitting diagnostics
|
||||
for fruit_name, fruit_count in FRUITS.items():
|
||||
if fruit_count < 5:
|
||||
fruit_count = -fruit_count
|
||||
assert FRUITS[fruit_name] == 0 # OK
|
||||
|
||||
|
||||
def value_intentionally_unused():
|
||||
[FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()] # Ok
|
||||
{FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # Ok
|
||||
{fruit_name: FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # Ok
|
||||
[FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()] # OK
|
||||
{FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # OK
|
||||
{fruit_name: FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # OK
|
||||
|
||||
for fruit_name, _ in FRUITS.items():
|
||||
print(FRUITS[fruit_name]) # Ok
|
||||
blah = FRUITS[fruit_name] # Ok
|
||||
assert FRUITS[fruit_name] == "pear" # Ok
|
||||
print(FRUITS[fruit_name]) # OK
|
||||
blah = FRUITS[fruit_name] # OK
|
||||
assert FRUITS[fruit_name] == "pear" # OK
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ def fix_these():
|
|||
print(letters[index]) # PLR1736
|
||||
blah = letters[index] # PLR1736
|
||||
assert letters[index] == "d" # PLR1736
|
||||
|
||||
|
||||
for index, letter in builtins.enumerate(letters):
|
||||
print(letters[index]) # PLR1736
|
||||
blah = letters[index] # PLR1736
|
||||
|
|
@ -22,38 +22,43 @@ def fix_these():
|
|||
def dont_fix_these():
|
||||
# once there is an assignment to the sequence[index], we stop emitting diagnostics
|
||||
for index, letter in enumerate(letters):
|
||||
letters[index] = "d" # Ok
|
||||
letters[index] += "e" # Ok
|
||||
assert letters[index] == "de" # Ok
|
||||
|
||||
letters[index] = "d" # OK
|
||||
letters[index] += "e" # OK
|
||||
assert letters[index] == "de" # OK
|
||||
|
||||
# once there is an assignment to the index, we stop emitting diagnostics
|
||||
for index, letter in enumerate(letters):
|
||||
index += 1 # Ok
|
||||
print(letters[index]) # Ok
|
||||
|
||||
index += 1 # OK
|
||||
print(letters[index]) # OK
|
||||
|
||||
# once there is an assignment to the sequence, we stop emitting diagnostics
|
||||
for index, letter in enumerate(letters):
|
||||
letters = ["d", "e", "f"] # Ok
|
||||
print(letters[index]) # Ok
|
||||
letters = ["d", "e", "f"] # OK
|
||||
print(letters[index]) # OK
|
||||
|
||||
# once there is an assignment to the value, we stop emitting diagnostics
|
||||
for index, letter in enumerate(letters):
|
||||
letter = "d"
|
||||
print(letters[index]) # OK
|
||||
|
||||
# once there is an deletion from or of the sequence or index, we stop emitting diagnostics
|
||||
for index, letter in enumerate(letters):
|
||||
del letters[index] # Ok
|
||||
print(letters[index]) # Ok
|
||||
del letters[index] # OK
|
||||
print(letters[index]) # OK
|
||||
for index, letter in enumerate(letters):
|
||||
del letters # Ok
|
||||
print(letters[index]) # Ok
|
||||
del letters # OK
|
||||
print(letters[index]) # OK
|
||||
for index, letter in enumerate(letters):
|
||||
del index # Ok
|
||||
print(letters[index]) # Ok
|
||||
del index # OK
|
||||
print(letters[index]) # OK
|
||||
|
||||
|
||||
def value_intentionally_unused():
|
||||
[letters[index] for index, _ in enumerate(letters)] # Ok
|
||||
{letters[index] for index, _ in enumerate(letters)} # Ok
|
||||
{index: letters[index] for index, _ in enumerate(letters)} # Ok
|
||||
[letters[index] for index, _ in enumerate(letters)] # OK
|
||||
{letters[index] for index, _ in enumerate(letters)} # OK
|
||||
{index: letters[index] for index, _ in enumerate(letters)} # OK
|
||||
|
||||
for index, _ in enumerate(letters):
|
||||
print(letters[index]) # Ok
|
||||
blah = letters[index] # Ok
|
||||
letters[index] = "d" # Ok
|
||||
print(letters[index]) # OK
|
||||
blah = letters[index] # OK
|
||||
letters[index] = "d" # OK
|
||||
|
|
|
|||
|
|
@ -110,3 +110,10 @@ print('Hello %(arg)s' % bar['bop'])
|
|||
"%s" % (
|
||||
x, # comment
|
||||
)
|
||||
|
||||
|
||||
path = "%s-%s-%s.pem" % (
|
||||
safe_domain_name(cn), # common name, which should be filename safe because it is IDNA-encoded, but in case of a malformed cert make sure it's ok to use as a filename
|
||||
cert.not_valid_after.date().isoformat().replace("-", ""), # expiration date
|
||||
hexlify(cert.fingerprint(hashes.SHA256())).decode("ascii")[0:8], # fingerprint prefix
|
||||
)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,61 @@
|
|||
# Errors.
|
||||
op_bitnot = lambda x: ~x
|
||||
op_not = lambda x: not x
|
||||
op_pos = lambda x: +x
|
||||
op_neg = lambda x: -x
|
||||
|
||||
op_add = lambda x, y: x + y
|
||||
op_sub = lambda x, y: x - y
|
||||
op_mult = lambda x, y: x * y
|
||||
op_matmutl = lambda x, y: x @ y
|
||||
op_truediv = lambda x, y: x / y
|
||||
op_mod = lambda x, y: x % y
|
||||
op_pow = lambda x, y: x ** y
|
||||
op_lshift = lambda x, y: x << y
|
||||
op_rshift = lambda x, y: x >> y
|
||||
op_bitor = lambda x, y: x | y
|
||||
op_xor = lambda x, y: x ^ y
|
||||
op_bitand = lambda x, y: x & y
|
||||
op_floordiv = lambda x, y: x // y
|
||||
|
||||
op_eq = lambda x, y: x == y
|
||||
op_ne = lambda x, y: x != y
|
||||
op_lt = lambda x, y: x < y
|
||||
op_lte = lambda x, y: x <= y
|
||||
op_gt = lambda x, y: x > y
|
||||
op_gte = lambda x, y: x >= y
|
||||
op_is = lambda x, y: x is y
|
||||
op_isnot = lambda x, y: x is not y
|
||||
op_in = lambda x, y: y in x
|
||||
|
||||
|
||||
def op_not2(x):
|
||||
return not x
|
||||
|
||||
|
||||
def op_add2(x, y):
|
||||
return x + y
|
||||
|
||||
|
||||
class Adder:
|
||||
def add(x, y):
|
||||
return x + y
|
||||
|
||||
# OK.
|
||||
op_add3 = lambda x, y = 1: x + y
|
||||
op_neg2 = lambda x, y: y - x
|
||||
op_notin = lambda x, y: y not in x
|
||||
op_and = lambda x, y: y and x
|
||||
op_or = lambda x, y: y or x
|
||||
op_in = lambda x, y: x in y
|
||||
|
||||
|
||||
def op_neg3(x, y):
|
||||
return y - x
|
||||
|
||||
def op_add4(x, y = 1):
|
||||
return x + y
|
||||
|
||||
def op_add5(x, y):
|
||||
print("op_add5")
|
||||
return x + y
|
||||
|
|
@ -5,3 +5,11 @@ A = 3.14 * r ** 2 # FURB152
|
|||
C = 6.28 * r # FURB152
|
||||
|
||||
e = 2.71 # FURB152
|
||||
|
||||
r = 3.15 # OK
|
||||
|
||||
r = 3.141 # FURB152
|
||||
|
||||
r = 3.1415 # FURB152
|
||||
|
||||
e = 2.7 # OK
|
||||
|
|
|
|||
|
|
@ -16,6 +16,8 @@ special_log(1, 2)
|
|||
special_log(1, 10)
|
||||
special_log(1, math.e)
|
||||
special_log(1, special_e)
|
||||
math.log(1, 2.0)
|
||||
math.log(1, 10.0)
|
||||
|
||||
# Ok.
|
||||
math.log2(1)
|
||||
|
|
@ -45,3 +47,6 @@ def log(*args):
|
|||
log(1, 2)
|
||||
log(1, 10)
|
||||
log(1, math.e)
|
||||
|
||||
math.log(1, 2.0001)
|
||||
math.log(1, 10.0001)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,57 @@
|
|||
import hashlib
|
||||
from hashlib import (
|
||||
blake2b,
|
||||
blake2s,
|
||||
md5,
|
||||
sha1,
|
||||
sha3_224,
|
||||
sha3_256,
|
||||
sha3_384,
|
||||
sha3_512,
|
||||
sha224,
|
||||
)
|
||||
from hashlib import sha256
|
||||
from hashlib import sha256 as hash_algo
|
||||
from hashlib import sha384, sha512, shake_128, shake_256
|
||||
|
||||
# these will match
|
||||
|
||||
blake2b().digest().hex()
|
||||
blake2s().digest().hex()
|
||||
md5().digest().hex()
|
||||
sha1().digest().hex()
|
||||
sha224().digest().hex()
|
||||
sha256().digest().hex()
|
||||
sha384().digest().hex()
|
||||
sha3_224().digest().hex()
|
||||
sha3_256().digest().hex()
|
||||
sha3_384().digest().hex()
|
||||
sha3_512().digest().hex()
|
||||
sha512().digest().hex()
|
||||
shake_128().digest(10).hex()
|
||||
shake_256().digest(10).hex()
|
||||
|
||||
hashlib.sha256().digest().hex()
|
||||
|
||||
sha256(b"text").digest().hex()
|
||||
|
||||
hash_algo().digest().hex()
|
||||
|
||||
# not yet supported
|
||||
h = sha256()
|
||||
h.digest().hex()
|
||||
|
||||
|
||||
# these will not
|
||||
|
||||
sha256().digest()
|
||||
sha256().digest().hex("_")
|
||||
sha256().digest().hex(bytes_per_sep=4)
|
||||
sha256().hexdigest()
|
||||
|
||||
class Hash:
|
||||
def digest(self) -> bytes:
|
||||
return b""
|
||||
|
||||
|
||||
Hash().digest().hex()
|
||||
|
|
@ -63,11 +63,29 @@ def f():
|
|||
tasks = [asyncio.create_task(task) for task in tasks]
|
||||
|
||||
|
||||
# OK (false negative)
|
||||
# Error
|
||||
def f():
|
||||
task = asyncio.create_task(coordinator.ws_connect())
|
||||
|
||||
|
||||
# Error
|
||||
def f():
|
||||
loop = asyncio.get_running_loop()
|
||||
task: asyncio.Task = loop.create_task(coordinator.ws_connect())
|
||||
|
||||
|
||||
# OK (potential false negative)
|
||||
def f():
|
||||
task = asyncio.create_task(coordinator.ws_connect())
|
||||
background_tasks.add(task)
|
||||
|
||||
|
||||
# OK
|
||||
async def f():
|
||||
task = asyncio.create_task(coordinator.ws_connect())
|
||||
await task
|
||||
|
||||
|
||||
# OK (potential false negative)
|
||||
def f():
|
||||
do_nothing_with_the_task(asyncio.create_task(coordinator.ws_connect()))
|
||||
|
|
@ -88,3 +106,49 @@ def f():
|
|||
def f():
|
||||
loop = asyncio.get_running_loop()
|
||||
loop.do_thing(coordinator.ws_connect())
|
||||
|
||||
|
||||
# OK
|
||||
async def f():
|
||||
task = unused = asyncio.create_task(coordinator.ws_connect())
|
||||
await task
|
||||
|
||||
|
||||
# OK (false negative)
|
||||
async def f():
|
||||
task = unused = asyncio.create_task(coordinator.ws_connect())
|
||||
|
||||
|
||||
# OK
|
||||
async def f():
|
||||
task[i] = asyncio.create_task(coordinator.ws_connect())
|
||||
|
||||
|
||||
# OK
|
||||
async def f(x: int):
|
||||
if x > 0:
|
||||
task = asyncio.create_task(make_request())
|
||||
else:
|
||||
task = asyncio.create_task(make_request())
|
||||
await task
|
||||
|
||||
|
||||
# OK
|
||||
async def f(x: bool):
|
||||
if x:
|
||||
t = asyncio.create_task(asyncio.sleep(1))
|
||||
else:
|
||||
t = None
|
||||
try:
|
||||
await asyncio.sleep(1)
|
||||
finally:
|
||||
if t:
|
||||
await t
|
||||
|
||||
|
||||
# Error
|
||||
async def f(x: bool):
|
||||
if x:
|
||||
t = asyncio.create_task(asyncio.sleep(1))
|
||||
else:
|
||||
t = None
|
||||
|
|
|
|||
|
|
@ -59,3 +59,11 @@ class F(BaseSettings):
|
|||
without_annotation = []
|
||||
class_variable: ClassVar[list[int]] = []
|
||||
final_variable: Final[list[int]] = []
|
||||
|
||||
|
||||
class G(F):
|
||||
mutable_default: list[int] = []
|
||||
immutable_annotation: Sequence[int] = []
|
||||
without_annotation = []
|
||||
class_variable: ClassVar[list[int]] = []
|
||||
final_variable: Final[list[int]] = []
|
||||
|
|
|
|||
|
|
@ -23,3 +23,6 @@ print(a) # noqa: E501, F821 # comment
|
|||
print(a) # noqa: E501, F821 # comment
|
||||
print(a) # noqa: E501, F821 comment
|
||||
print(a) # noqa: E501, F821 comment
|
||||
|
||||
print(a) # comment with unicode µ # noqa: E501
|
||||
print(a) # comment with unicode µ # noqa: E501, F821
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ use ruff_python_ast::Expr;
|
|||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{flake8_pie, pylint};
|
||||
use crate::rules::{flake8_pie, pylint, refurb};
|
||||
|
||||
/// Run lint rules over all deferred lambdas in the [`SemanticModel`].
|
||||
pub(crate) fn deferred_lambdas(checker: &mut Checker) {
|
||||
|
|
@ -21,6 +21,9 @@ pub(crate) fn deferred_lambdas(checker: &mut Checker) {
|
|||
if checker.enabled(Rule::ReimplementedContainerBuiltin) {
|
||||
flake8_pie::rules::reimplemented_container_builtin(checker, lambda);
|
||||
}
|
||||
if checker.enabled(Rule::ReimplementedOperator) {
|
||||
refurb::rules::reimplemented_operator(checker, &lambda.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,16 +5,21 @@ use ruff_text_size::Ranged;
|
|||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{flake8_pyi, flake8_type_checking, flake8_unused_arguments, pyflakes, pylint};
|
||||
use crate::rules::{
|
||||
flake8_pyi, flake8_type_checking, flake8_unused_arguments, pyflakes, pylint, ruff,
|
||||
};
|
||||
|
||||
/// Run lint rules over all deferred scopes in the [`SemanticModel`].
|
||||
pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
||||
if !checker.any_enabled(&[
|
||||
Rule::AsyncioDanglingTask,
|
||||
Rule::GlobalVariableNotAssigned,
|
||||
Rule::ImportShadowedByLoopVar,
|
||||
Rule::NoSelfUse,
|
||||
Rule::RedefinedArgumentFromLocal,
|
||||
Rule::RedefinedWhileUnused,
|
||||
Rule::RuntimeImportInTypeCheckingBlock,
|
||||
Rule::TooManyLocals,
|
||||
Rule::TypingOnlyFirstPartyImport,
|
||||
Rule::TypingOnlyStandardLibraryImport,
|
||||
Rule::TypingOnlyThirdPartyImport,
|
||||
|
|
@ -31,7 +36,6 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
|||
Rule::UnusedPrivateTypedDict,
|
||||
Rule::UnusedStaticMethodArgument,
|
||||
Rule::UnusedVariable,
|
||||
Rule::NoSelfUse,
|
||||
]) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -59,6 +63,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
|||
flake8_type_checking::helpers::is_valid_runtime_import(
|
||||
binding,
|
||||
&checker.semantic,
|
||||
&checker.settings.flake8_type_checking,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
|
|
@ -268,6 +273,10 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
|||
flake8_pyi::rules::unused_private_typed_dict(checker, scope, &mut diagnostics);
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::AsyncioDanglingTask) {
|
||||
ruff::rules::asyncio_dangling_binding(scope, &checker.semantic, &mut diagnostics);
|
||||
}
|
||||
|
||||
if matches!(scope.kind, ScopeKind::Function(_) | ScopeKind::Lambda(_)) {
|
||||
if checker.enabled(Rule::UnusedVariable) {
|
||||
pyflakes::rules::unused_variable(checker, scope, &mut diagnostics);
|
||||
|
|
@ -335,6 +344,10 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) {
|
|||
if checker.enabled(Rule::NoSelfUse) {
|
||||
pylint::rules::no_self_use(checker, scope_id, scope, &mut diagnostics);
|
||||
}
|
||||
|
||||
if checker.enabled(Rule::TooManyLocals) {
|
||||
pylint::rules::too_many_locals(checker, scope, &mut diagnostics);
|
||||
}
|
||||
}
|
||||
}
|
||||
checker.diagnostics.extend(diagnostics);
|
||||
|
|
|
|||
|
|
@ -15,8 +15,9 @@ use crate::rules::{
|
|||
flake8_comprehensions, flake8_datetimez, flake8_debugger, flake8_django,
|
||||
flake8_future_annotations, flake8_gettext, flake8_implicit_str_concat, flake8_logging,
|
||||
flake8_logging_format, flake8_pie, flake8_print, flake8_pyi, flake8_pytest_style, flake8_self,
|
||||
flake8_simplify, flake8_tidy_imports, flake8_trio, flake8_use_pathlib, flynt, numpy,
|
||||
pandas_vet, pep8_naming, pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade, refurb, ruff,
|
||||
flake8_simplify, flake8_tidy_imports, flake8_trio, flake8_type_checking, flake8_use_pathlib,
|
||||
flynt, numpy, pandas_vet, pep8_naming, pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade,
|
||||
refurb, ruff,
|
||||
};
|
||||
use crate::settings::types::PythonVersion;
|
||||
|
||||
|
|
@ -356,6 +357,8 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
Rule::FString,
|
||||
// flynt
|
||||
Rule::StaticJoinToFString,
|
||||
// refurb
|
||||
Rule::HashlibDigestHex,
|
||||
]) {
|
||||
if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() {
|
||||
let attr = attr.as_str();
|
||||
|
|
@ -543,7 +546,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
flake8_bugbear::rules::no_explicit_stacklevel(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::UnnecessaryDictKwargs) {
|
||||
flake8_pie::rules::unnecessary_dict_kwargs(checker, expr, keywords);
|
||||
flake8_pie::rules::unnecessary_dict_kwargs(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::UnnecessaryRangeStart) {
|
||||
flake8_pie::rules::unnecessary_range_start(checker, call);
|
||||
|
|
@ -581,6 +584,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
if checker.enabled(Rule::HashlibInsecureHashFunction) {
|
||||
flake8_bandit::rules::hashlib_insecure_hash_functions(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::HashlibDigestHex) {
|
||||
refurb::rules::hashlib_digest_hex(checker, call);
|
||||
}
|
||||
if checker.enabled(Rule::RequestWithoutTimeout) {
|
||||
flake8_bandit::rules::request_without_timeout(checker, call);
|
||||
}
|
||||
|
|
@ -1165,6 +1171,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
if checker.enabled(Rule::UnnecessaryTypeUnion) {
|
||||
flake8_pyi::rules::unnecessary_type_union(checker, expr);
|
||||
}
|
||||
if checker.enabled(Rule::RuntimeStringUnion) {
|
||||
flake8_type_checking::rules::runtime_string_union(checker, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::UnaryOp(
|
||||
|
|
@ -1270,32 +1279,12 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
|
|||
refurb::rules::math_constant(checker, number_literal);
|
||||
}
|
||||
}
|
||||
Expr::BytesLiteral(_) => {
|
||||
if checker.source_type.is_stub() && checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
Expr::StringLiteral(string) => {
|
||||
if checker.enabled(Rule::HardcodedBindAllInterfaces) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_bandit::rules::hardcoded_bind_all_interfaces(string)
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::HardcodedTempFile) {
|
||||
flake8_bandit::rules::hardcoded_tmp_directory(checker, string);
|
||||
}
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
if checker.enabled(Rule::UnicodeKindPrefix) {
|
||||
for string_part in string.value.parts() {
|
||||
for string_part in value {
|
||||
pyupgrade::rules::unicode_kind_prefix(checker, string_part);
|
||||
}
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
Expr::IfExp(
|
||||
if_exp @ ast::ExprIfExp {
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ pub(super) use module::module;
|
|||
pub(super) use parameter::parameter;
|
||||
pub(super) use parameters::parameters;
|
||||
pub(super) use statement::statement;
|
||||
pub(super) use string_like::string_like;
|
||||
pub(super) use suite::suite;
|
||||
pub(super) use unresolved_references::unresolved_references;
|
||||
|
||||
|
|
@ -25,5 +26,6 @@ mod module;
|
|||
mod parameter;
|
||||
mod parameters;
|
||||
mod statement;
|
||||
mod string_like;
|
||||
mod suite;
|
||||
mod unresolved_references;
|
||||
|
|
|
|||
|
|
@ -250,6 +250,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
if checker.enabled(Rule::TooManyArguments) {
|
||||
pylint::rules::too_many_arguments(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyPositional) {
|
||||
pylint::rules::too_many_positional(checker, function_def);
|
||||
}
|
||||
if checker.enabled(Rule::TooManyReturnStatements) {
|
||||
if let Some(diagnostic) = pylint::rules::too_many_return_statements(
|
||||
stmt,
|
||||
|
|
@ -365,6 +368,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
.diagnostics
|
||||
.extend(ruff::rules::unreachable::in_function(name, body));
|
||||
}
|
||||
if checker.enabled(Rule::ReimplementedOperator) {
|
||||
refurb::rules::reimplemented_operator(checker, &function_def.into());
|
||||
}
|
||||
}
|
||||
Stmt::Return(_) => {
|
||||
if checker.enabled(Rule::ReturnOutsideFunction) {
|
||||
|
|
@ -394,27 +400,13 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
flake8_django::rules::nullable_model_string_field(checker, body);
|
||||
}
|
||||
if checker.enabled(Rule::DjangoExcludeWithModelForm) {
|
||||
if let Some(diagnostic) = flake8_django::rules::exclude_with_model_form(
|
||||
checker,
|
||||
arguments.as_deref(),
|
||||
body,
|
||||
) {
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
flake8_django::rules::exclude_with_model_form(checker, class_def);
|
||||
}
|
||||
if checker.enabled(Rule::DjangoAllWithModelForm) {
|
||||
if let Some(diagnostic) =
|
||||
flake8_django::rules::all_with_model_form(checker, arguments.as_deref(), body)
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
flake8_django::rules::all_with_model_form(checker, class_def);
|
||||
}
|
||||
if checker.enabled(Rule::DjangoUnorderedBodyContentInModel) {
|
||||
flake8_django::rules::unordered_body_content_in_model(
|
||||
checker,
|
||||
arguments.as_deref(),
|
||||
body,
|
||||
);
|
||||
flake8_django::rules::unordered_body_content_in_model(checker, class_def);
|
||||
}
|
||||
if !checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::DjangoModelWithoutDunderStr) {
|
||||
|
|
@ -1534,6 +1526,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
}
|
||||
}
|
||||
}
|
||||
Stmt::TypeAlias(ast::StmtTypeAlias { name, .. }) => {
|
||||
if checker.enabled(Rule::SnakeCaseTypeAlias) {
|
||||
flake8_pyi::rules::snake_case_type_alias(checker, name);
|
||||
}
|
||||
if checker.enabled(Rule::TSuffixedTypeAlias) {
|
||||
flake8_pyi::rules::t_suffixed_type_alias(checker, name);
|
||||
}
|
||||
}
|
||||
Stmt::Delete(delete @ ast::StmtDelete { targets, range: _ }) => {
|
||||
if checker.enabled(Rule::GlobalStatement) {
|
||||
for target in targets {
|
||||
|
|
@ -1560,7 +1560,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
pylint::rules::named_expr_without_context(checker, value);
|
||||
}
|
||||
if checker.enabled(Rule::AsyncioDanglingTask) {
|
||||
ruff::rules::asyncio_dangling_task(checker, value);
|
||||
if let Some(diagnostic) =
|
||||
ruff::rules::asyncio_dangling_task(value, checker.semantic())
|
||||
{
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
if checker.enabled(Rule::RepeatedAppend) {
|
||||
refurb::rules::repeated_append(checker, stmt);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,20 @@
|
|||
use ruff_python_ast::StringLike;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::codes::Rule;
|
||||
use crate::rules::{flake8_bandit, flake8_pyi};
|
||||
|
||||
/// Run lint rules over a [`StringLike`] syntax nodes.
|
||||
pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) {
|
||||
if checker.enabled(Rule::HardcodedBindAllInterfaces) {
|
||||
flake8_bandit::rules::hardcoded_bind_all_interfaces(checker, string_like);
|
||||
}
|
||||
if checker.enabled(Rule::HardcodedTempFile) {
|
||||
flake8_bandit::rules::hardcoded_tmp_directory(checker, string_like);
|
||||
}
|
||||
if checker.source_type.is_stub() {
|
||||
if checker.enabled(Rule::StringOrBytesTooLong) {
|
||||
flake8_pyi::rules::string_or_bytes_too_long(checker, string_like);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
use ruff_python_semantic::{ScopeKind, SemanticModel};
|
||||
|
||||
use crate::rules::flake8_type_checking;
|
||||
use crate::settings::LinterSettings;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub(super) enum AnnotationContext {
|
||||
/// Python will evaluate the annotation at runtime, but it's not _required_ and, as such, could
|
||||
/// be quoted to convert it into a typing-only annotation.
|
||||
///
|
||||
/// For example:
|
||||
/// ```python
|
||||
/// from pandas import DataFrame
|
||||
///
|
||||
/// def foo() -> DataFrame:
|
||||
/// ...
|
||||
/// ```
|
||||
///
|
||||
/// Above, Python will evaluate `DataFrame` at runtime in order to add it to `__annotations__`.
|
||||
RuntimeEvaluated,
|
||||
/// Python will evaluate the annotation at runtime, and it's required to be available at
|
||||
/// runtime, as a library (like Pydantic) needs access to it.
|
||||
RuntimeRequired,
|
||||
/// The annotation is only evaluated at type-checking time.
|
||||
TypingOnly,
|
||||
}
|
||||
|
||||
impl AnnotationContext {
|
||||
pub(super) fn from_model(semantic: &SemanticModel, settings: &LinterSettings) -> Self {
|
||||
// If the annotation is in a class scope (e.g., an annotated assignment for a
|
||||
// class field), and that class is marked as annotation as runtime-required.
|
||||
if semantic
|
||||
.current_scope()
|
||||
.kind
|
||||
.as_class()
|
||||
.is_some_and(|class_def| {
|
||||
flake8_type_checking::helpers::runtime_required_class(
|
||||
class_def,
|
||||
&settings.flake8_type_checking.runtime_required_base_classes,
|
||||
&settings.flake8_type_checking.runtime_required_decorators,
|
||||
semantic,
|
||||
)
|
||||
})
|
||||
{
|
||||
return Self::RuntimeRequired;
|
||||
}
|
||||
|
||||
// If `__future__` annotations are enabled, then annotations are never evaluated
|
||||
// at runtime, so we can treat them as typing-only.
|
||||
if semantic.future_annotations() {
|
||||
return Self::TypingOnly;
|
||||
}
|
||||
|
||||
// Otherwise, if we're in a class or module scope, then the annotation needs to
|
||||
// be available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
if matches!(
|
||||
semantic.current_scope().kind,
|
||||
ScopeKind::Class(_) | ScopeKind::Module
|
||||
) {
|
||||
return Self::RuntimeEvaluated;
|
||||
}
|
||||
|
||||
Self::TypingOnly
|
||||
}
|
||||
}
|
||||
|
|
@ -44,12 +44,12 @@ use ruff_python_ast::helpers::{
|
|||
};
|
||||
use ruff_python_ast::identifier::Identifier;
|
||||
use ruff_python_ast::str::trailing_quote;
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_pattern, Visitor};
|
||||
use ruff_python_ast::visitor::{walk_except_handler, walk_f_string_element, walk_pattern, Visitor};
|
||||
use ruff_python_ast::{helpers, str, visitor, PySourceType};
|
||||
use ruff_python_codegen::{Generator, Quote, Stylist};
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind};
|
||||
use ruff_python_semantic::analyze::{typing, visibility};
|
||||
use ruff_python_semantic::analyze::{imports, typing, visibility};
|
||||
use ruff_python_semantic::{
|
||||
BindingFlags, BindingId, BindingKind, Exceptions, Export, FromImport, Globals, Import, Module,
|
||||
ModuleKind, NodeId, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, Snapshot,
|
||||
|
|
@ -58,6 +58,7 @@ use ruff_python_semantic::{
|
|||
use ruff_python_stdlib::builtins::{IPYTHON_BUILTINS, MAGIC_GLOBALS, PYTHON_BUILTINS};
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::checkers::ast::annotation::AnnotationContext;
|
||||
use crate::checkers::ast::deferred::Deferred;
|
||||
use crate::docstrings::extraction::ExtractionTarget;
|
||||
use crate::importer::Importer;
|
||||
|
|
@ -68,6 +69,7 @@ use crate::settings::{flags, LinterSettings};
|
|||
use crate::{docstrings, noqa};
|
||||
|
||||
mod analyze;
|
||||
mod annotation;
|
||||
mod deferred;
|
||||
|
||||
pub(crate) struct Checker<'a> {
|
||||
|
|
@ -303,9 +305,12 @@ where
|
|||
}
|
||||
_ => {
|
||||
self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY;
|
||||
if !self.semantic.seen_import_boundary()
|
||||
&& !helpers::is_assignment_to_a_dunder(stmt)
|
||||
&& !helpers::in_nested_block(self.semantic.current_statements())
|
||||
if !(self.semantic.seen_import_boundary()
|
||||
|| helpers::is_assignment_to_a_dunder(stmt)
|
||||
|| helpers::in_nested_block(self.semantic.current_statements())
|
||||
|| imports::is_matplotlib_activation(stmt, self.semantic())
|
||||
|| self.settings.preview.is_enabled()
|
||||
&& imports::is_sys_path_modification(stmt, self.semantic()))
|
||||
{
|
||||
self.semantic.flags |= SemanticModelFlags::IMPORT_BOUNDARY;
|
||||
}
|
||||
|
|
@ -512,8 +517,10 @@ where
|
|||
.chain(¶meters.kwonlyargs)
|
||||
{
|
||||
if let Some(expr) = ¶meter_with_default.parameter.annotation {
|
||||
if runtime_annotation || singledispatch {
|
||||
self.visit_runtime_annotation(expr);
|
||||
if singledispatch {
|
||||
self.visit_runtime_required_annotation(expr);
|
||||
} else if runtime_annotation {
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
|
|
@ -526,7 +533,7 @@ where
|
|||
if let Some(arg) = ¶meters.vararg {
|
||||
if let Some(expr) = &arg.annotation {
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(expr);
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
|
|
@ -535,7 +542,7 @@ where
|
|||
if let Some(arg) = ¶meters.kwarg {
|
||||
if let Some(expr) = &arg.annotation {
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(expr);
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
|
|
@ -543,7 +550,7 @@ where
|
|||
}
|
||||
for expr in returns {
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(expr);
|
||||
self.visit_runtime_evaluated_annotation(expr);
|
||||
} else {
|
||||
self.visit_annotation(expr);
|
||||
};
|
||||
|
|
@ -674,40 +681,16 @@ where
|
|||
value,
|
||||
..
|
||||
}) => {
|
||||
// If we're in a class or module scope, then the annotation needs to be
|
||||
// available at runtime.
|
||||
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
|
||||
let runtime_annotation = if self.semantic.future_annotations() {
|
||||
self.semantic
|
||||
.current_scope()
|
||||
.kind
|
||||
.as_class()
|
||||
.is_some_and(|class_def| {
|
||||
flake8_type_checking::helpers::runtime_evaluated_class(
|
||||
class_def,
|
||||
&self
|
||||
.settings
|
||||
.flake8_type_checking
|
||||
.runtime_evaluated_base_classes,
|
||||
&self
|
||||
.settings
|
||||
.flake8_type_checking
|
||||
.runtime_evaluated_decorators,
|
||||
&self.semantic,
|
||||
)
|
||||
})
|
||||
} else {
|
||||
matches!(
|
||||
self.semantic.current_scope().kind,
|
||||
ScopeKind::Class(_) | ScopeKind::Module
|
||||
)
|
||||
};
|
||||
|
||||
if runtime_annotation {
|
||||
self.visit_runtime_annotation(annotation);
|
||||
} else {
|
||||
self.visit_annotation(annotation);
|
||||
match AnnotationContext::from_model(&self.semantic, self.settings) {
|
||||
AnnotationContext::RuntimeRequired => {
|
||||
self.visit_runtime_required_annotation(annotation);
|
||||
}
|
||||
AnnotationContext::RuntimeEvaluated => {
|
||||
self.visit_runtime_evaluated_annotation(annotation);
|
||||
}
|
||||
AnnotationContext::TypingOnly => self.visit_annotation(annotation),
|
||||
}
|
||||
|
||||
if let Some(expr) = value {
|
||||
if self.semantic.match_typing_expr(annotation, "TypeAlias") {
|
||||
self.visit_type_definition(expr);
|
||||
|
|
@ -815,8 +798,7 @@ where
|
|||
|
||||
fn visit_expr(&mut self, expr: &'b Expr) {
|
||||
// Step 0: Pre-processing
|
||||
if !self.semantic.in_f_string()
|
||||
&& !self.semantic.in_literal()
|
||||
if !self.semantic.in_typing_literal()
|
||||
&& !self.semantic.in_deferred_type_definition()
|
||||
&& self.semantic.in_type_definition()
|
||||
&& self.semantic.future_annotations()
|
||||
|
|
@ -1198,7 +1180,7 @@ where
|
|||
) {
|
||||
// Ex) Literal["Class"]
|
||||
Some(typing::SubscriptKind::Literal) => {
|
||||
self.semantic.flags |= SemanticModelFlags::LITERAL;
|
||||
self.semantic.flags |= SemanticModelFlags::TYPING_LITERAL;
|
||||
|
||||
self.visit_expr(slice);
|
||||
self.visit_expr_context(ctx);
|
||||
|
|
@ -1238,10 +1220,7 @@ where
|
|||
}
|
||||
}
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
if self.semantic.in_type_definition()
|
||||
&& !self.semantic.in_literal()
|
||||
&& !self.semantic.in_f_string()
|
||||
{
|
||||
if self.semantic.in_type_definition() && !self.semantic.in_typing_literal() {
|
||||
self.deferred.string_type_definitions.push((
|
||||
expr.range(),
|
||||
value.to_str(),
|
||||
|
|
@ -1271,6 +1250,13 @@ where
|
|||
|
||||
// Step 4: Analysis
|
||||
analyze::expression(expr, self);
|
||||
match expr {
|
||||
Expr::StringLiteral(string_literal) => {
|
||||
analyze::string_like(string_literal.into(), self);
|
||||
}
|
||||
Expr::BytesLiteral(bytes_literal) => analyze::string_like(bytes_literal.into(), self),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.semantic.flags = flags_snapshot;
|
||||
self.semantic.pop_node();
|
||||
|
|
@ -1326,17 +1312,6 @@ where
|
|||
self.semantic.flags = flags_snapshot;
|
||||
}
|
||||
|
||||
fn visit_format_spec(&mut self, format_spec: &'b Expr) {
|
||||
match format_spec {
|
||||
Expr::FString(ast::ExprFString { value, .. }) => {
|
||||
for expr in value.elements() {
|
||||
self.visit_expr(expr);
|
||||
}
|
||||
}
|
||||
_ => unreachable!("Unexpected expression for format_spec"),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_parameters(&mut self, parameters: &'b Parameters) {
|
||||
// Step 1: Binding.
|
||||
// Bind, but intentionally avoid walking default expressions, as we handle them
|
||||
|
|
@ -1446,6 +1421,16 @@ where
|
|||
.push((bound, self.semantic.snapshot()));
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_f_string_element(&mut self, f_string_element: &'b ast::FStringElement) {
|
||||
// Step 2: Traversal
|
||||
walk_f_string_element(self, f_string_element);
|
||||
|
||||
// Step 4: Analysis
|
||||
if let Some(literal) = f_string_element.as_literal() {
|
||||
analyze::string_like(literal.into(), self);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Checker<'a> {
|
||||
|
|
@ -1522,10 +1507,18 @@ impl<'a> Checker<'a> {
|
|||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a runtime-required type annotation.
|
||||
fn visit_runtime_annotation(&mut self, expr: &'a Expr) {
|
||||
/// Visit an [`Expr`], and treat it as a runtime-evaluated type annotation.
|
||||
fn visit_runtime_evaluated_annotation(&mut self, expr: &'a Expr) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_ANNOTATION;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_EVALUATED_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
||||
/// Visit an [`Expr`], and treat it as a runtime-required type annotation.
|
||||
fn visit_runtime_required_annotation(&mut self, expr: &'a Expr) {
|
||||
let snapshot = self.semantic.flags;
|
||||
self.semantic.flags |= SemanticModelFlags::RUNTIME_REQUIRED_ANNOTATION;
|
||||
self.visit_type_definition(expr);
|
||||
self.semantic.flags = snapshot;
|
||||
}
|
||||
|
|
@ -2020,13 +2013,15 @@ pub(crate) fn check_ast(
|
|||
// Iterate over the AST.
|
||||
checker.visit_body(python_ast);
|
||||
|
||||
// Visit any deferred syntax nodes.
|
||||
// Visit any deferred syntax nodes. Take care to visit in order, such that we avoid adding
|
||||
// new deferred nodes after visiting nodes of that kind. For example, visiting a deferred
|
||||
// function can add a deferred lambda, but the opposite is not true.
|
||||
checker.visit_deferred_functions();
|
||||
checker.visit_deferred_lambdas();
|
||||
checker.visit_deferred_future_type_definitions();
|
||||
checker.visit_deferred_type_param_definitions();
|
||||
checker.visit_deferred_future_type_definitions();
|
||||
let allocator = typed_arena::Arena::new();
|
||||
checker.visit_deferred_string_type_definitions(&allocator);
|
||||
checker.visit_deferred_lambdas();
|
||||
checker.visit_exports();
|
||||
|
||||
// Check docstrings, bindings, and unresolved references.
|
||||
|
|
|
|||
|
|
@ -3,10 +3,10 @@
|
|||
use std::path::Path;
|
||||
|
||||
use itertools::Itertools;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Edit, Fix};
|
||||
use ruff_python_trivia::CommentRanges;
|
||||
use ruff_python_trivia::{CommentRanges, PythonWhitespace};
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
use crate::noqa;
|
||||
|
|
@ -200,17 +200,11 @@ fn delete_noqa(range: TextRange, locator: &Locator) -> Edit {
|
|||
|
||||
// Compute the leading space.
|
||||
let prefix = locator.slice(TextRange::new(line_range.start(), range.start()));
|
||||
let leading_space = prefix
|
||||
.rfind(|c: char| !c.is_whitespace())
|
||||
.map_or(prefix.len(), |i| prefix.len() - i - 1);
|
||||
let leading_space_len = TextSize::try_from(leading_space).unwrap();
|
||||
let leading_space_len = prefix.text_len() - prefix.trim_whitespace_end().text_len();
|
||||
|
||||
// Compute the trailing space.
|
||||
let suffix = locator.slice(TextRange::new(range.end(), line_range.end()));
|
||||
let trailing_space = suffix
|
||||
.find(|c: char| !c.is_whitespace())
|
||||
.map_or(suffix.len(), |i| i);
|
||||
let trailing_space_len = TextSize::try_from(trailing_space).unwrap();
|
||||
let trailing_space_len = suffix.text_len() - suffix.trim_whitespace_start().text_len();
|
||||
|
||||
// Ex) `# noqa`
|
||||
if line_range
|
||||
|
|
|
|||
|
|
@ -252,8 +252,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
|||
(Pylint, "R0911") => (RuleGroup::Stable, rules::pylint::rules::TooManyReturnStatements),
|
||||
(Pylint, "R0912") => (RuleGroup::Stable, rules::pylint::rules::TooManyBranches),
|
||||
(Pylint, "R0913") => (RuleGroup::Stable, rules::pylint::rules::TooManyArguments),
|
||||
(Pylint, "R0914") => (RuleGroup::Preview, rules::pylint::rules::TooManyLocals),
|
||||
(Pylint, "R0915") => (RuleGroup::Stable, rules::pylint::rules::TooManyStatements),
|
||||
(Pylint, "R0916") => (RuleGroup::Preview, rules::pylint::rules::TooManyBooleanExpressions),
|
||||
(Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositional),
|
||||
(Pylint, "R1701") => (RuleGroup::Stable, rules::pylint::rules::RepeatedIsinstanceCalls),
|
||||
(Pylint, "R1704") => (RuleGroup::Preview, rules::pylint::rules::RedefinedArgumentFromLocal),
|
||||
(Pylint, "R1711") => (RuleGroup::Stable, rules::pylint::rules::UselessReturn),
|
||||
|
|
@ -806,6 +808,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
|||
(Flake8TypeChecking, "003") => (RuleGroup::Stable, rules::flake8_type_checking::rules::TypingOnlyStandardLibraryImport),
|
||||
(Flake8TypeChecking, "004") => (RuleGroup::Stable, rules::flake8_type_checking::rules::RuntimeImportInTypeCheckingBlock),
|
||||
(Flake8TypeChecking, "005") => (RuleGroup::Stable, rules::flake8_type_checking::rules::EmptyTypeCheckingBlock),
|
||||
(Flake8TypeChecking, "006") => (RuleGroup::Preview, rules::flake8_type_checking::rules::RuntimeStringUnion),
|
||||
|
||||
// tryceratops
|
||||
(Tryceratops, "002") => (RuleGroup::Stable, rules::tryceratops::rules::RaiseVanillaClass),
|
||||
|
|
@ -950,6 +953,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
|||
(Refurb, "105") => (RuleGroup::Preview, rules::refurb::rules::PrintEmptyString),
|
||||
#[allow(deprecated)]
|
||||
(Refurb, "113") => (RuleGroup::Nursery, rules::refurb::rules::RepeatedAppend),
|
||||
(Refurb, "118") => (RuleGroup::Preview, rules::refurb::rules::ReimplementedOperator),
|
||||
#[allow(deprecated)]
|
||||
(Refurb, "131") => (RuleGroup::Nursery, rules::refurb::rules::DeleteFullSlice),
|
||||
#[allow(deprecated)]
|
||||
|
|
@ -964,6 +968,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
|||
(Refurb, "169") => (RuleGroup::Preview, rules::refurb::rules::TypeNoneComparison),
|
||||
(Refurb, "171") => (RuleGroup::Preview, rules::refurb::rules::SingleItemMembershipTest),
|
||||
(Refurb, "177") => (RuleGroup::Preview, rules::refurb::rules::ImplicitCwd),
|
||||
(Refurb, "181") => (RuleGroup::Preview, rules::refurb::rules::HashlibDigestHex),
|
||||
|
||||
// flake8-logging
|
||||
(Flake8Logging, "001") => (RuleGroup::Preview, rules::flake8_logging::rules::DirectLoggerInstantiation),
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use ruff_text_size::{Ranged, TextSize};
|
|||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::imports::{AnyImport, Import, ImportFrom};
|
||||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::{ImportedName, SemanticModel};
|
||||
use ruff_python_trivia::textwrap::indent;
|
||||
use ruff_source_file::Locator;
|
||||
|
||||
|
|
@ -132,7 +132,48 @@ impl<'a> Importer<'a> {
|
|||
)?;
|
||||
|
||||
// Import the `TYPE_CHECKING` symbol from the typing module.
|
||||
let (type_checking_edit, type_checking) = self.get_or_import_type_checking(at, semantic)?;
|
||||
let (type_checking_edit, type_checking) =
|
||||
if let Some(type_checking) = Self::find_type_checking(at, semantic)? {
|
||||
// Special-case: if the `TYPE_CHECKING` symbol is imported as part of the same
|
||||
// statement that we're modifying, avoid adding a no-op edit. For example, here,
|
||||
// the `TYPE_CHECKING` no-op edit would overlap with the edit to remove `Final`
|
||||
// from the import:
|
||||
// ```python
|
||||
// from __future__ import annotations
|
||||
//
|
||||
// from typing import Final, TYPE_CHECKING
|
||||
//
|
||||
// Const: Final[dict] = {}
|
||||
// ```
|
||||
let edit = if type_checking.statement(semantic) == import.statement {
|
||||
None
|
||||
} else {
|
||||
Some(Edit::range_replacement(
|
||||
self.locator.slice(type_checking.range()).to_string(),
|
||||
type_checking.range(),
|
||||
))
|
||||
};
|
||||
(edit, type_checking.into_name())
|
||||
} else {
|
||||
// Special-case: if the `TYPE_CHECKING` symbol would be added to the same import
|
||||
// we're modifying, import it as a separate import statement. For example, here,
|
||||
// we're concurrently removing `Final` and adding `TYPE_CHECKING`, so it's easier to
|
||||
// use a separate import statement:
|
||||
// ```python
|
||||
// from __future__ import annotations
|
||||
//
|
||||
// from typing import Final
|
||||
//
|
||||
// Const: Final[dict] = {}
|
||||
// ```
|
||||
let (edit, name) = self.import_symbol(
|
||||
&ImportRequest::import_from("typing", "TYPE_CHECKING"),
|
||||
at,
|
||||
Some(import.statement),
|
||||
semantic,
|
||||
)?;
|
||||
(Some(edit), name)
|
||||
};
|
||||
|
||||
// Add the import to a `TYPE_CHECKING` block.
|
||||
let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) {
|
||||
|
|
@ -157,28 +198,21 @@ impl<'a> Importer<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
/// Generate an [`Edit`] to reference `typing.TYPE_CHECKING`. Returns the [`Edit`] necessary to
|
||||
/// make the symbol available in the current scope along with the bound name of the symbol.
|
||||
fn get_or_import_type_checking(
|
||||
&self,
|
||||
/// Find a reference to `typing.TYPE_CHECKING`.
|
||||
fn find_type_checking(
|
||||
at: TextSize,
|
||||
semantic: &SemanticModel,
|
||||
) -> Result<(Edit, String), ResolutionError> {
|
||||
) -> Result<Option<ImportedName>, ResolutionError> {
|
||||
for module in semantic.typing_modules() {
|
||||
if let Some((edit, name)) = self.get_symbol(
|
||||
if let Some(imported_name) = Self::find_symbol(
|
||||
&ImportRequest::import_from(module, "TYPE_CHECKING"),
|
||||
at,
|
||||
semantic,
|
||||
)? {
|
||||
return Ok((edit, name));
|
||||
return Ok(Some(imported_name));
|
||||
}
|
||||
}
|
||||
|
||||
self.import_symbol(
|
||||
&ImportRequest::import_from("typing", "TYPE_CHECKING"),
|
||||
at,
|
||||
semantic,
|
||||
)
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Generate an [`Edit`] to reference the given symbol. Returns the [`Edit`] necessary to make
|
||||
|
|
@ -192,16 +226,15 @@ impl<'a> Importer<'a> {
|
|||
semantic: &SemanticModel,
|
||||
) -> Result<(Edit, String), ResolutionError> {
|
||||
self.get_symbol(symbol, at, semantic)?
|
||||
.map_or_else(|| self.import_symbol(symbol, at, semantic), Ok)
|
||||
.map_or_else(|| self.import_symbol(symbol, at, None, semantic), Ok)
|
||||
}
|
||||
|
||||
/// Return an [`Edit`] to reference an existing symbol, if it's present in the given [`SemanticModel`].
|
||||
fn get_symbol(
|
||||
&self,
|
||||
/// Return the [`ImportedName`] to for existing symbol, if it's present in the given [`SemanticModel`].
|
||||
fn find_symbol(
|
||||
symbol: &ImportRequest,
|
||||
at: TextSize,
|
||||
semantic: &SemanticModel,
|
||||
) -> Result<Option<(Edit, String)>, ResolutionError> {
|
||||
) -> Result<Option<ImportedName>, ResolutionError> {
|
||||
// If the symbol is already available in the current scope, use it.
|
||||
let Some(imported_name) =
|
||||
semantic.resolve_qualified_import_name(symbol.module, symbol.member)
|
||||
|
|
@ -226,6 +259,21 @@ impl<'a> Importer<'a> {
|
|||
return Err(ResolutionError::IncompatibleContext);
|
||||
}
|
||||
|
||||
Ok(Some(imported_name))
|
||||
}
|
||||
|
||||
/// Return an [`Edit`] to reference an existing symbol, if it's present in the given [`SemanticModel`].
|
||||
fn get_symbol(
|
||||
&self,
|
||||
symbol: &ImportRequest,
|
||||
at: TextSize,
|
||||
semantic: &SemanticModel,
|
||||
) -> Result<Option<(Edit, String)>, ResolutionError> {
|
||||
// Find the symbol in the current scope.
|
||||
let Some(imported_name) = Self::find_symbol(symbol, at, semantic)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// We also add a no-op edit to force conflicts with any other fixes that might try to
|
||||
// remove the import. Consider:
|
||||
//
|
||||
|
|
@ -259,9 +307,13 @@ impl<'a> Importer<'a> {
|
|||
&self,
|
||||
symbol: &ImportRequest,
|
||||
at: TextSize,
|
||||
except: Option<&Stmt>,
|
||||
semantic: &SemanticModel,
|
||||
) -> Result<(Edit, String), ResolutionError> {
|
||||
if let Some(stmt) = self.find_import_from(symbol.module, at) {
|
||||
if let Some(stmt) = self
|
||||
.find_import_from(symbol.module, at)
|
||||
.filter(|stmt| except != Some(stmt))
|
||||
{
|
||||
// Case 1: `from functools import lru_cache` is in scope, and we're trying to reference
|
||||
// `functools.cache`; thus, we add `cache` to the import, and return `"cache"` as the
|
||||
// bound name.
|
||||
|
|
@ -423,14 +475,18 @@ impl RuntimeImportEdit {
|
|||
#[derive(Debug)]
|
||||
pub(crate) struct TypingImportEdit {
|
||||
/// The edit to add the `TYPE_CHECKING` symbol to the module.
|
||||
type_checking_edit: Edit,
|
||||
type_checking_edit: Option<Edit>,
|
||||
/// The edit to add the import to a `TYPE_CHECKING` block.
|
||||
add_import_edit: Edit,
|
||||
}
|
||||
|
||||
impl TypingImportEdit {
|
||||
pub(crate) fn into_edits(self) -> Vec<Edit> {
|
||||
vec![self.type_checking_edit, self.add_import_edit]
|
||||
pub(crate) fn into_edits(self) -> (Edit, Option<Edit>) {
|
||||
if let Some(type_checking_edit) = self.type_checking_edit {
|
||||
(type_checking_edit, Some(self.add_import_edit))
|
||||
} else {
|
||||
(self.add_import_edit, None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix};
|
|||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{SourceFile, SourceLocation};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
pub use sarif::SarifEmitter;
|
||||
pub use text::TextEmitter;
|
||||
|
||||
mod azure;
|
||||
|
|
@ -28,6 +29,7 @@ mod json;
|
|||
mod json_lines;
|
||||
mod junit;
|
||||
mod pylint;
|
||||
mod sarif;
|
||||
mod text;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,212 @@
|
|||
use std::io::Write;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::json;
|
||||
|
||||
use ruff_source_file::OneIndexed;
|
||||
|
||||
use crate::codes::Rule;
|
||||
use crate::fs::normalize_path;
|
||||
use crate::message::{Emitter, EmitterContext, Message};
|
||||
use crate::registry::{AsRule, Linter, RuleNamespace};
|
||||
use crate::VERSION;
|
||||
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
pub struct SarifEmitter;
|
||||
|
||||
impl Emitter for SarifEmitter {
|
||||
fn emit(
|
||||
&mut self,
|
||||
writer: &mut dyn Write,
|
||||
messages: &[Message],
|
||||
_context: &EmitterContext,
|
||||
) -> Result<()> {
|
||||
let results = messages
|
||||
.iter()
|
||||
.map(SarifResult::from_message)
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
let output = json!({
|
||||
"$schema": "https://json.schemastore.org/sarif-2.1.0.json",
|
||||
"version": "2.1.0",
|
||||
"runs": [{
|
||||
"tool": {
|
||||
"driver": {
|
||||
"name": "ruff",
|
||||
"informationUri": "https://github.com/astral-sh/ruff",
|
||||
"rules": Rule::iter().map(SarifRule::from).collect::<Vec<_>>(),
|
||||
"version": VERSION.to_string(),
|
||||
}
|
||||
},
|
||||
"results": results,
|
||||
}],
|
||||
});
|
||||
serde_json::to_writer_pretty(writer, &output)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct SarifRule<'a> {
|
||||
name: &'a str,
|
||||
code: String,
|
||||
linter: &'a str,
|
||||
summary: &'a str,
|
||||
explanation: Option<&'a str>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
impl From<Rule> for SarifRule<'_> {
|
||||
fn from(rule: Rule) -> Self {
|
||||
let code = rule.noqa_code().to_string();
|
||||
let (linter, _) = Linter::parse_code(&code).unwrap();
|
||||
Self {
|
||||
name: rule.into(),
|
||||
code,
|
||||
linter: linter.name(),
|
||||
summary: rule.message_formats()[0],
|
||||
explanation: rule.explanation(),
|
||||
url: rule.url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for SarifRule<'_> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
json!({
|
||||
"id": self.code,
|
||||
"shortDescription": {
|
||||
"text": self.summary,
|
||||
},
|
||||
"fullDescription": {
|
||||
"text": self.explanation,
|
||||
},
|
||||
"help": {
|
||||
"text": self.summary,
|
||||
},
|
||||
"helpUri": self.url,
|
||||
"properties": {
|
||||
"id": self.code,
|
||||
"kind": self.linter,
|
||||
"name": self.name,
|
||||
"problem.severity": "error".to_string(),
|
||||
},
|
||||
})
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct SarifResult {
|
||||
rule: Rule,
|
||||
level: String,
|
||||
message: String,
|
||||
uri: String,
|
||||
start_line: OneIndexed,
|
||||
start_column: OneIndexed,
|
||||
end_line: OneIndexed,
|
||||
end_column: OneIndexed,
|
||||
}
|
||||
|
||||
impl SarifResult {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn from_message(message: &Message) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(message.filename());
|
||||
Ok(Self {
|
||||
rule: message.kind.rule(),
|
||||
level: "error".to_string(),
|
||||
message: message.kind.name.clone(),
|
||||
uri: url::Url::from_file_path(&path)
|
||||
.map_err(|()| anyhow::anyhow!("Failed to convert path to URL: {}", path.display()))?
|
||||
.to_string(),
|
||||
start_line: start_location.row,
|
||||
start_column: start_location.column,
|
||||
end_line: end_location.row,
|
||||
end_column: end_location.column,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
fn from_message(message: &Message) -> Result<Self> {
|
||||
let start_location = message.compute_start_location();
|
||||
let end_location = message.compute_end_location();
|
||||
let path = normalize_path(message.filename());
|
||||
Ok(Self {
|
||||
rule: message.kind.rule(),
|
||||
level: "error".to_string(),
|
||||
message: message.kind.name.clone(),
|
||||
uri: path.display().to_string(),
|
||||
start_line: start_location.row,
|
||||
start_column: start_location.column,
|
||||
end_line: end_location.row,
|
||||
end_column: end_location.column,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for SarifResult {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
json!({
|
||||
"level": self.level,
|
||||
"message": {
|
||||
"text": self.message,
|
||||
},
|
||||
"locations": [{
|
||||
"physicalLocation": {
|
||||
"artifactLocation": {
|
||||
"uri": self.uri,
|
||||
},
|
||||
"region": {
|
||||
"startLine": self.start_line,
|
||||
"startColumn": self.start_column,
|
||||
"endLine": self.end_line,
|
||||
"endColumn": self.end_column,
|
||||
}
|
||||
}
|
||||
}],
|
||||
"ruleId": self.rule.noqa_code().to_string(),
|
||||
})
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::message::tests::{capture_emitter_output, create_messages};
|
||||
use crate::message::SarifEmitter;
|
||||
|
||||
fn get_output() -> String {
|
||||
let mut emitter = SarifEmitter {};
|
||||
capture_emitter_output(&mut emitter, &create_messages())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid_json() {
|
||||
let content = get_output();
|
||||
serde_json::from_str::<serde_json::Value>(&content).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_results() {
|
||||
let content = get_output();
|
||||
let sarif = serde_json::from_str::<serde_json::Value>(content.as_str()).unwrap();
|
||||
let rules = sarif["runs"][0]["tool"]["driver"]["rules"]
|
||||
.as_array()
|
||||
.unwrap();
|
||||
let results = sarif["runs"][0]["results"].as_array().unwrap();
|
||||
assert_eq!(results.len(), 3);
|
||||
assert!(rules.len() > 3);
|
||||
}
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@ use rustc_hash::FxHashSet;
|
|||
|
||||
use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::helpers::{
|
||||
implicit_return, pep_604_union, typing_optional, typing_union, ReturnStatementVisitor,
|
||||
pep_604_union, typing_optional, typing_union, ReturnStatementVisitor, Terminal,
|
||||
};
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{self as ast, Expr, ExprContext};
|
||||
|
|
@ -57,6 +57,14 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option<AutoPy
|
|||
visitor.returns
|
||||
};
|
||||
|
||||
// Determine the terminal behavior (i.e., implicit return, no return, etc.).
|
||||
let terminal = Terminal::from_function(function);
|
||||
|
||||
// If every control flow path raises an exception, return `NoReturn`.
|
||||
if terminal == Some(Terminal::Raise) {
|
||||
return Some(AutoPythonType::Never);
|
||||
}
|
||||
|
||||
// Determine the return type of the first `return` statement.
|
||||
let Some((return_statement, returns)) = returns.split_first() else {
|
||||
return Some(AutoPythonType::Atom(PythonType::None));
|
||||
|
|
@ -80,7 +88,7 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option<AutoPy
|
|||
// if x > 0:
|
||||
// return 1
|
||||
// ```
|
||||
if implicit_return(function) {
|
||||
if terminal.is_none() {
|
||||
return_type = return_type.union(ResolvedPythonType::Atom(PythonType::None));
|
||||
}
|
||||
|
||||
|
|
@ -94,6 +102,7 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option<AutoPy
|
|||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum AutoPythonType {
|
||||
Never,
|
||||
Atom(PythonType),
|
||||
Union(FxHashSet<PythonType>),
|
||||
}
|
||||
|
|
@ -111,6 +120,28 @@ impl AutoPythonType {
|
|||
target_version: PythonVersion,
|
||||
) -> Option<(Expr, Vec<Edit>)> {
|
||||
match self {
|
||||
AutoPythonType::Never => {
|
||||
let (no_return_edit, binding) = importer
|
||||
.get_or_import_symbol(
|
||||
&ImportRequest::import_from(
|
||||
"typing",
|
||||
if target_version >= PythonVersion::Py311 {
|
||||
"Never"
|
||||
} else {
|
||||
"NoReturn"
|
||||
},
|
||||
),
|
||||
at,
|
||||
semantic,
|
||||
)
|
||||
.ok()?;
|
||||
let expr = Expr::Name(ast::ExprName {
|
||||
id: binding,
|
||||
range: TextRange::default(),
|
||||
ctx: ExprContext::Load,
|
||||
});
|
||||
Some((expr, vec![no_return_edit]))
|
||||
}
|
||||
AutoPythonType::Atom(python_type) => {
|
||||
let expr = type_expr(python_type)?;
|
||||
Some((expr, vec![]))
|
||||
|
|
|
|||
|
|
@ -537,6 +537,19 @@ fn check_dynamically_typed<F>(
|
|||
}
|
||||
}
|
||||
|
||||
fn is_empty_body(body: &[Stmt]) -> bool {
|
||||
body.iter().all(|stmt| match stmt {
|
||||
Stmt::Pass(_) => true,
|
||||
Stmt::Expr(ast::StmtExpr { value, range: _ }) => {
|
||||
matches!(
|
||||
value.as_ref(),
|
||||
Expr::StringLiteral(_) | Expr::EllipsisLiteral(_)
|
||||
)
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
||||
/// Generate flake8-annotation checks for a given `Definition`.
|
||||
pub(crate) fn definition(
|
||||
checker: &Checker,
|
||||
|
|
@ -725,16 +738,22 @@ pub(crate) fn definition(
|
|||
) {
|
||||
if is_method && visibility::is_classmethod(decorator_list, checker.semantic()) {
|
||||
if checker.enabled(Rule::MissingReturnTypeClassMethod) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits));
|
||||
let return_type = if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits))
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypeClassMethod {
|
||||
name: name.to_string(),
|
||||
|
|
@ -752,16 +771,22 @@ pub(crate) fn definition(
|
|||
}
|
||||
} else if is_method && visibility::is_staticmethod(decorator_list, checker.semantic()) {
|
||||
if checker.enabled(Rule::MissingReturnTypeStaticMethod) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits));
|
||||
let return_type = if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| (checker.generator().expr(&return_type), edits))
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypeStaticMethod {
|
||||
name: name.to_string(),
|
||||
|
|
@ -818,18 +843,25 @@ pub(crate) fn definition(
|
|||
match visibility {
|
||||
visibility::Visibility::Public => {
|
||||
if checker.enabled(Rule::MissingReturnTypeUndocumentedPublicFunction) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
});
|
||||
let return_type =
|
||||
if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
})
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypeUndocumentedPublicFunction {
|
||||
name: name.to_string(),
|
||||
|
|
@ -853,18 +885,25 @@ pub(crate) fn definition(
|
|||
}
|
||||
visibility::Visibility::Private => {
|
||||
if checker.enabled(Rule::MissingReturnTypePrivateFunction) {
|
||||
let return_type = auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
});
|
||||
let return_type =
|
||||
if visibility::is_abstract(decorator_list, checker.semantic())
|
||||
&& is_empty_body(body)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
auto_return_type(function)
|
||||
.and_then(|return_type| {
|
||||
return_type.into_expression(
|
||||
checker.importer(),
|
||||
function.parameters.start(),
|
||||
checker.semantic(),
|
||||
checker.settings.target_version,
|
||||
)
|
||||
})
|
||||
.map(|(return_type, edits)| {
|
||||
(checker.generator().expr(&return_type), edits)
|
||||
})
|
||||
};
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
MissingReturnTypePrivateFunction {
|
||||
name: name.to_string(),
|
||||
|
|
|
|||
|
|
@ -427,4 +427,156 @@ auto_return_type.py:146:5: ANN201 [*] Missing return type annotation for public
|
|||
148 148 | break
|
||||
149 149 | return 1
|
||||
|
||||
auto_return_type.py:158:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
156 | class Foo(abc.ABC):
|
||||
157 | @abstractmethod
|
||||
158 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
159 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:162:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
161 | @abc.abstractmethod
|
||||
162 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
163 | """Docstring."""
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:166:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
165 | @abc.abstractmethod
|
||||
166 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
167 | ...
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:171:9: ANN205 Missing return type annotation for staticmethod `method`
|
||||
|
|
||||
169 | @staticmethod
|
||||
170 | @abstractmethod
|
||||
171 | def method():
|
||||
| ^^^^^^ ANN205
|
||||
172 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:176:9: ANN206 Missing return type annotation for classmethod `method`
|
||||
|
|
||||
174 | @classmethod
|
||||
175 | @abstractmethod
|
||||
176 | def method(cls):
|
||||
| ^^^^^^ ANN206
|
||||
177 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:180:9: ANN201 [*] Missing return type annotation for public function `method`
|
||||
|
|
||||
179 | @abstractmethod
|
||||
180 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
181 | if self.x > 0:
|
||||
182 | return 1
|
||||
|
|
||||
= help: Add return type annotation: `float`
|
||||
|
||||
ℹ Unsafe fix
|
||||
177 177 | pass
|
||||
178 178 |
|
||||
179 179 | @abstractmethod
|
||||
180 |- def method(self):
|
||||
180 |+ def method(self) -> float:
|
||||
181 181 | if self.x > 0:
|
||||
182 182 | return 1
|
||||
183 183 | else:
|
||||
|
||||
auto_return_type.py:187:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
187 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
188 | try:
|
||||
189 | pass
|
||||
|
|
||||
= help: Add return type annotation: `int | None`
|
||||
|
||||
ℹ Unsafe fix
|
||||
184 184 | return 1.5
|
||||
185 185 |
|
||||
186 186 |
|
||||
187 |-def func(x: int):
|
||||
187 |+def func(x: int) -> int | None:
|
||||
188 188 | try:
|
||||
189 189 | pass
|
||||
190 190 | except:
|
||||
|
||||
auto_return_type.py:194:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
194 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
195 | try:
|
||||
196 | pass
|
||||
|
|
||||
= help: Add return type annotation: `int`
|
||||
|
||||
ℹ Unsafe fix
|
||||
191 191 | return 2
|
||||
192 192 |
|
||||
193 193 |
|
||||
194 |-def func(x: int):
|
||||
194 |+def func(x: int) -> int:
|
||||
195 195 | try:
|
||||
196 196 | pass
|
||||
197 197 | except:
|
||||
|
||||
auto_return_type.py:203:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
203 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
204 | if not x:
|
||||
205 | raise ValueError
|
||||
|
|
||||
= help: Add return type annotation: `Never`
|
||||
|
||||
ℹ Unsafe fix
|
||||
151 151 |
|
||||
152 152 | import abc
|
||||
153 153 | from abc import abstractmethod
|
||||
154 |+from typing import Never
|
||||
154 155 |
|
||||
155 156 |
|
||||
156 157 | class Foo(abc.ABC):
|
||||
--------------------------------------------------------------------------------
|
||||
200 201 | return 3
|
||||
201 202 |
|
||||
202 203 |
|
||||
203 |-def func(x: int):
|
||||
204 |+def func(x: int) -> Never:
|
||||
204 205 | if not x:
|
||||
205 206 | raise ValueError
|
||||
206 207 | else:
|
||||
|
||||
auto_return_type.py:210:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
210 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
211 | if not x:
|
||||
212 | raise ValueError
|
||||
|
|
||||
= help: Add return type annotation: `int`
|
||||
|
||||
ℹ Unsafe fix
|
||||
207 207 | raise TypeError
|
||||
208 208 |
|
||||
209 209 |
|
||||
210 |-def func(x: int):
|
||||
210 |+def func(x: int) -> int:
|
||||
211 211 | if not x:
|
||||
212 212 | raise ValueError
|
||||
213 213 | else:
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -482,4 +482,164 @@ auto_return_type.py:146:5: ANN201 [*] Missing return type annotation for public
|
|||
148 149 | break
|
||||
149 150 | return 1
|
||||
|
||||
auto_return_type.py:158:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
156 | class Foo(abc.ABC):
|
||||
157 | @abstractmethod
|
||||
158 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
159 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:162:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
161 | @abc.abstractmethod
|
||||
162 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
163 | """Docstring."""
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:166:9: ANN201 Missing return type annotation for public function `method`
|
||||
|
|
||||
165 | @abc.abstractmethod
|
||||
166 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
167 | ...
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:171:9: ANN205 Missing return type annotation for staticmethod `method`
|
||||
|
|
||||
169 | @staticmethod
|
||||
170 | @abstractmethod
|
||||
171 | def method():
|
||||
| ^^^^^^ ANN205
|
||||
172 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:176:9: ANN206 Missing return type annotation for classmethod `method`
|
||||
|
|
||||
174 | @classmethod
|
||||
175 | @abstractmethod
|
||||
176 | def method(cls):
|
||||
| ^^^^^^ ANN206
|
||||
177 | pass
|
||||
|
|
||||
= help: Add return type annotation
|
||||
|
||||
auto_return_type.py:180:9: ANN201 [*] Missing return type annotation for public function `method`
|
||||
|
|
||||
179 | @abstractmethod
|
||||
180 | def method(self):
|
||||
| ^^^^^^ ANN201
|
||||
181 | if self.x > 0:
|
||||
182 | return 1
|
||||
|
|
||||
= help: Add return type annotation: `float`
|
||||
|
||||
ℹ Unsafe fix
|
||||
177 177 | pass
|
||||
178 178 |
|
||||
179 179 | @abstractmethod
|
||||
180 |- def method(self):
|
||||
180 |+ def method(self) -> float:
|
||||
181 181 | if self.x > 0:
|
||||
182 182 | return 1
|
||||
183 183 | else:
|
||||
|
||||
auto_return_type.py:187:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
187 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
188 | try:
|
||||
189 | pass
|
||||
|
|
||||
= help: Add return type annotation: `Optional[int]`
|
||||
|
||||
ℹ Unsafe fix
|
||||
151 151 |
|
||||
152 152 | import abc
|
||||
153 153 | from abc import abstractmethod
|
||||
154 |+from typing import Optional
|
||||
154 155 |
|
||||
155 156 |
|
||||
156 157 | class Foo(abc.ABC):
|
||||
--------------------------------------------------------------------------------
|
||||
184 185 | return 1.5
|
||||
185 186 |
|
||||
186 187 |
|
||||
187 |-def func(x: int):
|
||||
188 |+def func(x: int) -> Optional[int]:
|
||||
188 189 | try:
|
||||
189 190 | pass
|
||||
190 191 | except:
|
||||
|
||||
auto_return_type.py:194:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
194 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
195 | try:
|
||||
196 | pass
|
||||
|
|
||||
= help: Add return type annotation: `int`
|
||||
|
||||
ℹ Unsafe fix
|
||||
191 191 | return 2
|
||||
192 192 |
|
||||
193 193 |
|
||||
194 |-def func(x: int):
|
||||
194 |+def func(x: int) -> int:
|
||||
195 195 | try:
|
||||
196 196 | pass
|
||||
197 197 | except:
|
||||
|
||||
auto_return_type.py:203:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
203 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
204 | if not x:
|
||||
205 | raise ValueError
|
||||
|
|
||||
= help: Add return type annotation: `NoReturn`
|
||||
|
||||
ℹ Unsafe fix
|
||||
151 151 |
|
||||
152 152 | import abc
|
||||
153 153 | from abc import abstractmethod
|
||||
154 |+from typing import NoReturn
|
||||
154 155 |
|
||||
155 156 |
|
||||
156 157 | class Foo(abc.ABC):
|
||||
--------------------------------------------------------------------------------
|
||||
200 201 | return 3
|
||||
201 202 |
|
||||
202 203 |
|
||||
203 |-def func(x: int):
|
||||
204 |+def func(x: int) -> NoReturn:
|
||||
204 205 | if not x:
|
||||
205 206 | raise ValueError
|
||||
206 207 | else:
|
||||
|
||||
auto_return_type.py:210:5: ANN201 [*] Missing return type annotation for public function `func`
|
||||
|
|
||||
210 | def func(x: int):
|
||||
| ^^^^ ANN201
|
||||
211 | if not x:
|
||||
212 | raise ValueError
|
||||
|
|
||||
= help: Add return type annotation: `int`
|
||||
|
||||
ℹ Unsafe fix
|
||||
207 207 | raise TypeError
|
||||
208 208 |
|
||||
209 209 |
|
||||
210 |-def func(x: int):
|
||||
210 |+def func(x: int) -> int:
|
||||
211 211 | if not x:
|
||||
212 212 | raise ValueError
|
||||
213 213 | else:
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,9 @@
|
|||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::ExprStringLiteral;
|
||||
use ruff_python_ast::{self as ast, StringLike};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for hardcoded bindings to all network interfaces (`0.0.0.0`).
|
||||
|
|
@ -34,10 +37,16 @@ impl Violation for HardcodedBindAllInterfaces {
|
|||
}
|
||||
|
||||
/// S104
|
||||
pub(crate) fn hardcoded_bind_all_interfaces(string: &ExprStringLiteral) -> Option<Diagnostic> {
|
||||
if string.value.to_str() == "0.0.0.0" {
|
||||
Some(Diagnostic::new(HardcodedBindAllInterfaces, string.range))
|
||||
} else {
|
||||
None
|
||||
pub(crate) fn hardcoded_bind_all_interfaces(checker: &mut Checker, string: StringLike) {
|
||||
let is_bind_all_interface = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "0.0.0.0",
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value == "0.0.0.0",
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
};
|
||||
|
||||
if is_bind_all_interface {
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(HardcodedBindAllInterfaces, string.range()));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -57,7 +57,7 @@ impl Violation for HardcodedSQLExpression {
|
|||
/// becomes `foobar {x}baz`.
|
||||
fn concatenated_f_string(expr: &ast::ExprFString, locator: &Locator) -> String {
|
||||
expr.value
|
||||
.parts()
|
||||
.iter()
|
||||
.filter_map(|part| {
|
||||
raw_contents(locator.slice(part)).map(|s| s.escape_default().to_string())
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
use ruff_python_ast::{self as ast, Expr};
|
||||
use ruff_python_ast::{self as ast, Expr, StringLike};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
|
@ -51,13 +52,19 @@ impl Violation for HardcodedTempFile {
|
|||
}
|
||||
|
||||
/// S108
|
||||
pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: &ast::ExprStringLiteral) {
|
||||
pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: StringLike) {
|
||||
let value = match string {
|
||||
StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.to_str(),
|
||||
StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value,
|
||||
StringLike::BytesLiteral(_) => return,
|
||||
};
|
||||
|
||||
if !checker
|
||||
.settings
|
||||
.flake8_bandit
|
||||
.hardcoded_tmp_directory
|
||||
.iter()
|
||||
.any(|prefix| string.value.to_str().starts_with(prefix))
|
||||
.any(|prefix| value.starts_with(prefix))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
|
@ -76,8 +83,8 @@ pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: &ast::ExprS
|
|||
|
||||
checker.diagnostics.push(Diagnostic::new(
|
||||
HardcodedTempFile {
|
||||
string: string.value.to_string(),
|
||||
string: value.to_string(),
|
||||
},
|
||||
string.range,
|
||||
string.range(),
|
||||
));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ S104.py:9:1: S104 Possible binding to all interfaces
|
|||
9 | "0.0.0.0"
|
||||
| ^^^^^^^^^ S104
|
||||
10 | '0.0.0.0'
|
||||
11 | f"0.0.0.0"
|
||||
|
|
||||
|
||||
S104.py:10:1: S104 Possible binding to all interfaces
|
||||
|
|
@ -15,21 +16,30 @@ S104.py:10:1: S104 Possible binding to all interfaces
|
|||
9 | "0.0.0.0"
|
||||
10 | '0.0.0.0'
|
||||
| ^^^^^^^^^ S104
|
||||
11 | f"0.0.0.0"
|
||||
|
|
||||
|
||||
S104.py:14:6: S104 Possible binding to all interfaces
|
||||
S104.py:11:3: S104 Possible binding to all interfaces
|
||||
|
|
||||
13 | # Error
|
||||
14 | func("0.0.0.0")
|
||||
9 | "0.0.0.0"
|
||||
10 | '0.0.0.0'
|
||||
11 | f"0.0.0.0"
|
||||
| ^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:15:6: S104 Possible binding to all interfaces
|
||||
|
|
||||
14 | # Error
|
||||
15 | func("0.0.0.0")
|
||||
| ^^^^^^^^^ S104
|
||||
|
|
||||
|
||||
S104.py:18:9: S104 Possible binding to all interfaces
|
||||
S104.py:19:9: S104 Possible binding to all interfaces
|
||||
|
|
||||
17 | def my_func():
|
||||
18 | x = "0.0.0.0"
|
||||
18 | def my_func():
|
||||
19 | x = "0.0.0.0"
|
||||
| ^^^^^^^^^ S104
|
||||
19 | print(x)
|
||||
20 | print(x)
|
||||
|
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -10,22 +10,31 @@ S108.py:5:11: S108 Probable insecure usage of temporary file or directory: "/tmp
|
|||
6 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:8:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
S108.py:8:13: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
6 | f.write("def")
|
||||
7 |
|
||||
8 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
8 | with open(f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
9 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
|
|
||||
9 | f.write("def")
|
||||
10 |
|
||||
11 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
11 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
12 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
|
|
||||
12 | f.write("def")
|
||||
13 |
|
||||
14 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
15 | f.write("def")
|
||||
|
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -10,30 +10,39 @@ S108.py:5:11: S108 Probable insecure usage of temporary file or directory: "/tmp
|
|||
6 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:8:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
S108.py:8:13: S108 Probable insecure usage of temporary file or directory: "/tmp/abc"
|
||||
|
|
||||
6 | f.write("def")
|
||||
7 |
|
||||
8 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
8 | with open(f"/tmp/abc", "w") as f:
|
||||
| ^^^^^^^^ S108
|
||||
9 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123"
|
||||
|
|
||||
9 | f.write("def")
|
||||
10 |
|
||||
11 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
11 | with open("/var/tmp/123", "w") as f:
|
||||
| ^^^^^^^^^^^^^^ S108
|
||||
12 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:15:11: S108 Probable insecure usage of temporary file or directory: "/foo/bar"
|
||||
S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test"
|
||||
|
|
||||
14 | # not ok by config
|
||||
15 | with open("/foo/bar", "w") as f:
|
||||
12 | f.write("def")
|
||||
13 |
|
||||
14 | with open("/dev/shm/unit/test", "w") as f:
|
||||
| ^^^^^^^^^^^^^^^^^^^^ S108
|
||||
15 | f.write("def")
|
||||
|
|
||||
|
||||
S108.py:18:11: S108 Probable insecure usage of temporary file or directory: "/foo/bar"
|
||||
|
|
||||
17 | # not ok by config
|
||||
18 | with open("/foo/bar", "w") as f:
|
||||
| ^^^^^^^^^^ S108
|
||||
16 | f.write("def")
|
||||
19 | f.write("def")
|
||||
|
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_diagnostics::{AlwaysFixableViolation, Applicability, Diagnostic, Fix};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
|
||||
use ruff_python_ast::{self as ast, Arguments, Expr};
|
||||
|
|
@ -6,6 +6,7 @@ use ruff_python_semantic::SemanticModel;
|
|||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::fix::edits::add_argument;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `zip` calls without an explicit `strict` parameter.
|
||||
|
|
@ -28,16 +29,25 @@ use crate::checkers::ast::Checker;
|
|||
/// zip(a, b, strict=True)
|
||||
/// ```
|
||||
///
|
||||
/// ## Fix safety
|
||||
/// This rule's fix is marked as unsafe for `zip` calls that contain
|
||||
/// `**kwargs`, as adding a `check` keyword argument to such a call may lead
|
||||
/// to a duplicate keyword argument error.
|
||||
///
|
||||
/// ## References
|
||||
/// - [Python documentation: `zip`](https://docs.python.org/3/library/functions.html#zip)
|
||||
#[violation]
|
||||
pub struct ZipWithoutExplicitStrict;
|
||||
|
||||
impl Violation for ZipWithoutExplicitStrict {
|
||||
impl AlwaysFixableViolation for ZipWithoutExplicitStrict {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
format!("`zip()` without an explicit `strict=` parameter")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> String {
|
||||
"Add explicit `strict=False`".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// B905
|
||||
|
|
@ -52,9 +62,27 @@ pub(crate) fn zip_without_explicit_strict(checker: &mut Checker, call: &ast::Exp
|
|||
.iter()
|
||||
.any(|arg| is_infinite_iterator(arg, checker.semantic()))
|
||||
{
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(ZipWithoutExplicitStrict, call.range()));
|
||||
let mut diagnostic = Diagnostic::new(ZipWithoutExplicitStrict, call.range());
|
||||
diagnostic.set_fix(Fix::applicable_edit(
|
||||
add_argument(
|
||||
"strict=False",
|
||||
&call.arguments,
|
||||
checker.indexer().comment_ranges(),
|
||||
checker.locator().contents(),
|
||||
),
|
||||
// If the function call contains `**kwargs`, mark the fix as unsafe.
|
||||
if call
|
||||
.arguments
|
||||
.keywords
|
||||
.iter()
|
||||
.any(|keyword| keyword.arg.is_none())
|
||||
{
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
},
|
||||
));
|
||||
checker.diagnostics.push(diagnostic);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_bugbear/mod.rs
|
||||
---
|
||||
B905.py:4:1: B905 `zip()` without an explicit `strict=` parameter
|
||||
B905.py:4:1: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
3 | # Errors
|
||||
4 | zip()
|
||||
|
|
@ -9,8 +9,19 @@ B905.py:4:1: B905 `zip()` without an explicit `strict=` parameter
|
|||
5 | zip(range(3))
|
||||
6 | zip("a", "b")
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:5:1: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
1 1 | from itertools import count, cycle, repeat
|
||||
2 2 |
|
||||
3 3 | # Errors
|
||||
4 |-zip()
|
||||
4 |+zip(strict=False)
|
||||
5 5 | zip(range(3))
|
||||
6 6 | zip("a", "b")
|
||||
7 7 | zip("a", "b", *zip("c"))
|
||||
|
||||
B905.py:5:1: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
3 | # Errors
|
||||
4 | zip()
|
||||
|
|
@ -19,8 +30,19 @@ B905.py:5:1: B905 `zip()` without an explicit `strict=` parameter
|
|||
6 | zip("a", "b")
|
||||
7 | zip("a", "b", *zip("c"))
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:6:1: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
2 2 |
|
||||
3 3 | # Errors
|
||||
4 4 | zip()
|
||||
5 |-zip(range(3))
|
||||
5 |+zip(range(3), strict=False)
|
||||
6 6 | zip("a", "b")
|
||||
7 7 | zip("a", "b", *zip("c"))
|
||||
8 8 | zip(zip("a"), strict=False)
|
||||
|
||||
B905.py:6:1: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
4 | zip()
|
||||
5 | zip(range(3))
|
||||
|
|
@ -29,8 +51,19 @@ B905.py:6:1: B905 `zip()` without an explicit `strict=` parameter
|
|||
7 | zip("a", "b", *zip("c"))
|
||||
8 | zip(zip("a"), strict=False)
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:7:1: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
3 3 | # Errors
|
||||
4 4 | zip()
|
||||
5 5 | zip(range(3))
|
||||
6 |-zip("a", "b")
|
||||
6 |+zip("a", "b", strict=False)
|
||||
7 7 | zip("a", "b", *zip("c"))
|
||||
8 8 | zip(zip("a"), strict=False)
|
||||
9 9 | zip(zip("a", strict=True))
|
||||
|
||||
B905.py:7:1: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
5 | zip(range(3))
|
||||
6 | zip("a", "b")
|
||||
|
|
@ -39,8 +72,19 @@ B905.py:7:1: B905 `zip()` without an explicit `strict=` parameter
|
|||
8 | zip(zip("a"), strict=False)
|
||||
9 | zip(zip("a", strict=True))
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:7:16: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
4 4 | zip()
|
||||
5 5 | zip(range(3))
|
||||
6 6 | zip("a", "b")
|
||||
7 |-zip("a", "b", *zip("c"))
|
||||
7 |+zip("a", "b", *zip("c"), strict=False)
|
||||
8 8 | zip(zip("a"), strict=False)
|
||||
9 9 | zip(zip("a", strict=True))
|
||||
10 10 |
|
||||
|
||||
B905.py:7:16: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
5 | zip(range(3))
|
||||
6 | zip("a", "b")
|
||||
|
|
@ -49,8 +93,19 @@ B905.py:7:16: B905 `zip()` without an explicit `strict=` parameter
|
|||
8 | zip(zip("a"), strict=False)
|
||||
9 | zip(zip("a", strict=True))
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:8:5: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
4 4 | zip()
|
||||
5 5 | zip(range(3))
|
||||
6 6 | zip("a", "b")
|
||||
7 |-zip("a", "b", *zip("c"))
|
||||
7 |+zip("a", "b", *zip("c", strict=False))
|
||||
8 8 | zip(zip("a"), strict=False)
|
||||
9 9 | zip(zip("a", strict=True))
|
||||
10 10 |
|
||||
|
||||
B905.py:8:5: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
6 | zip("a", "b")
|
||||
7 | zip("a", "b", *zip("c"))
|
||||
|
|
@ -58,8 +113,19 @@ B905.py:8:5: B905 `zip()` without an explicit `strict=` parameter
|
|||
| ^^^^^^^^ B905
|
||||
9 | zip(zip("a", strict=True))
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:9:1: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
5 5 | zip(range(3))
|
||||
6 6 | zip("a", "b")
|
||||
7 7 | zip("a", "b", *zip("c"))
|
||||
8 |-zip(zip("a"), strict=False)
|
||||
8 |+zip(zip("a", strict=False), strict=False)
|
||||
9 9 | zip(zip("a", strict=True))
|
||||
10 10 |
|
||||
11 11 | # OK
|
||||
|
||||
B905.py:9:1: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
7 | zip("a", "b", *zip("c"))
|
||||
8 | zip(zip("a"), strict=False)
|
||||
|
|
@ -68,21 +134,49 @@ B905.py:9:1: B905 `zip()` without an explicit `strict=` parameter
|
|||
10 |
|
||||
11 | # OK
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:24:1: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
6 6 | zip("a", "b")
|
||||
7 7 | zip("a", "b", *zip("c"))
|
||||
8 8 | zip(zip("a"), strict=False)
|
||||
9 |-zip(zip("a", strict=True))
|
||||
9 |+zip(zip("a", strict=True), strict=False)
|
||||
10 10 |
|
||||
11 11 | # OK
|
||||
12 12 | zip(range(3), strict=True)
|
||||
|
||||
B905.py:24:1: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
23 | # Errors (limited iterators).
|
||||
24 | zip([1, 2, 3], repeat(1, 1))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B905
|
||||
25 | zip([1, 2, 3], repeat(1, times=4))
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
B905.py:25:1: B905 `zip()` without an explicit `strict=` parameter
|
||||
ℹ Safe fix
|
||||
21 21 | zip([1, 2, 3], repeat(1, times=None))
|
||||
22 22 |
|
||||
23 23 | # Errors (limited iterators).
|
||||
24 |-zip([1, 2, 3], repeat(1, 1))
|
||||
24 |+zip([1, 2, 3], repeat(1, 1), strict=False)
|
||||
25 25 | zip([1, 2, 3], repeat(1, times=4))
|
||||
|
||||
B905.py:25:1: B905 [*] `zip()` without an explicit `strict=` parameter
|
||||
|
|
||||
23 | # Errors (limited iterators).
|
||||
24 | zip([1, 2, 3], repeat(1, 1))
|
||||
25 | zip([1, 2, 3], repeat(1, times=4))
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B905
|
||||
|
|
||||
= help: Add explicit `strict=False`
|
||||
|
||||
ℹ Safe fix
|
||||
22 22 |
|
||||
23 23 | # Errors (limited iterators).
|
||||
24 24 | zip([1, 2, 3], repeat(1, 1))
|
||||
25 |-zip([1, 2, 3], repeat(1, times=4))
|
||||
25 |+zip([1, 2, 3], repeat(1, times=4), strict=False)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1083,7 +1083,7 @@ pub(crate) fn fix_unnecessary_map(
|
|||
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
||||
// syntax errors.
|
||||
if matches!(object_type, ObjectType::Set | ObjectType::Dict) {
|
||||
if parent.is_some_and(Expr::is_formatted_value_expr) {
|
||||
if parent.is_some_and(Expr::is_f_string_expr) {
|
||||
content = format!(" {content} ");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
use ruff_python_ast::{self as ast, Arguments, Expr, Stmt};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
|
@ -48,21 +47,12 @@ impl Violation for DjangoAllWithModelForm {
|
|||
}
|
||||
|
||||
/// DJ007
|
||||
pub(crate) fn all_with_model_form(
|
||||
checker: &Checker,
|
||||
arguments: Option<&Arguments>,
|
||||
body: &[Stmt],
|
||||
) -> Option<Diagnostic> {
|
||||
if !arguments.is_some_and(|arguments| {
|
||||
arguments
|
||||
.args
|
||||
.iter()
|
||||
.any(|base| is_model_form(base, checker.semantic()))
|
||||
}) {
|
||||
return None;
|
||||
pub(crate) fn all_with_model_form(checker: &mut Checker, class_def: &ast::StmtClassDef) {
|
||||
if !is_model_form(class_def, checker.semantic()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for element in body {
|
||||
for element in &class_def.body {
|
||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
|
|
@ -83,12 +73,18 @@ pub(crate) fn all_with_model_form(
|
|||
match value.as_ref() {
|
||||
Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => {
|
||||
if value == "__all__" {
|
||||
return Some(Diagnostic::new(DjangoAllWithModelForm, element.range()));
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(DjangoAllWithModelForm, element.range()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => {
|
||||
if value == "__all__".as_bytes() {
|
||||
return Some(Diagnostic::new(DjangoAllWithModelForm, element.range()));
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(DjangoAllWithModelForm, element.range()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
|
|
@ -96,5 +92,4 @@ pub(crate) fn all_with_model_form(
|
|||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
use ruff_python_ast::{self as ast, Arguments, Expr, Stmt};
|
||||
|
||||
use ruff_diagnostics::{Diagnostic, Violation};
|
||||
use ruff_macros::{derive_message_formats, violation};
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
|
@ -46,21 +45,12 @@ impl Violation for DjangoExcludeWithModelForm {
|
|||
}
|
||||
|
||||
/// DJ006
|
||||
pub(crate) fn exclude_with_model_form(
|
||||
checker: &Checker,
|
||||
arguments: Option<&Arguments>,
|
||||
body: &[Stmt],
|
||||
) -> Option<Diagnostic> {
|
||||
if !arguments.is_some_and(|arguments| {
|
||||
arguments
|
||||
.args
|
||||
.iter()
|
||||
.any(|base| is_model_form(base, checker.semantic()))
|
||||
}) {
|
||||
return None;
|
||||
pub(crate) fn exclude_with_model_form(checker: &mut Checker, class_def: &ast::StmtClassDef) {
|
||||
if !is_model_form(class_def, checker.semantic()) {
|
||||
return;
|
||||
}
|
||||
|
||||
for element in body {
|
||||
for element in &class_def.body {
|
||||
let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else {
|
||||
continue;
|
||||
};
|
||||
|
|
@ -76,10 +66,12 @@ pub(crate) fn exclude_with_model_form(
|
|||
continue;
|
||||
};
|
||||
if id == "exclude" {
|
||||
return Some(Diagnostic::new(DjangoExcludeWithModelForm, target.range()));
|
||||
checker
|
||||
.diagnostics
|
||||
.push(Diagnostic::new(DjangoExcludeWithModelForm, target.range()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,17 +1,17 @@
|
|||
use ruff_python_ast::Expr;
|
||||
use ruff_python_ast::{self as ast, Expr};
|
||||
|
||||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_python_semantic::{analyze, SemanticModel};
|
||||
|
||||
/// Return `true` if a Python class appears to be a Django model, based on its base classes.
|
||||
pub(super) fn is_model(base: &Expr, semantic: &SemanticModel) -> bool {
|
||||
semantic.resolve_call_path(base).is_some_and(|call_path| {
|
||||
pub(super) fn is_model(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool {
|
||||
analyze::class::any_over_body(class_def, semantic, &|call_path| {
|
||||
matches!(call_path.as_slice(), ["django", "db", "models", "Model"])
|
||||
})
|
||||
}
|
||||
|
||||
/// Return `true` if a Python class appears to be a Django model form, based on its base classes.
|
||||
pub(super) fn is_model_form(base: &Expr, semantic: &SemanticModel) -> bool {
|
||||
semantic.resolve_call_path(base).is_some_and(|call_path| {
|
||||
pub(super) fn is_model_form(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool {
|
||||
analyze::class::any_over_body(class_def, semantic, &|call_path| {
|
||||
matches!(
|
||||
call_path.as_slice(),
|
||||
["django", "forms", "ModelForm"] | ["django", "forms", "models", "ModelForm"]
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue