diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 32d803aa14..b271562d66 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -48,8 +48,8 @@ jobs: - "!crates/ruff_dev/**" - "!crates/ruff_shrinking/**" - scripts/* - - .github/workflows/ci.yaml - python/** + - .github/workflows/ci.yaml formatter: - Cargo.toml @@ -68,7 +68,7 @@ jobs: - .github/workflows/ci.yaml code: - - "*/**" + - "**/*" - "!**/*.md" - "!docs/**" - "!assets/**" @@ -86,7 +86,7 @@ jobs: name: "cargo clippy" runs-on: ubuntu-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} steps: - uses: actions/checkout@v4 - name: "Install Rust toolchain" @@ -102,7 +102,7 @@ jobs: cargo-test-linux: runs-on: ubuntu-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} name: "cargo test (linux)" steps: - uses: actions/checkout@v4 @@ -128,7 +128,7 @@ jobs: cargo-test-windows: runs-on: windows-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} name: "cargo test (windows)" steps: - uses: actions/checkout@v4 @@ -147,7 +147,7 @@ jobs: cargo-test-wasm: runs-on: ubuntu-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} name: "cargo test (wasm)" steps: - uses: actions/checkout@v4 @@ -168,7 +168,7 @@ jobs: cargo-fuzz: runs-on: ubuntu-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} name: "cargo fuzz" steps: - uses: actions/checkout@v4 @@ -187,7 +187,7 @@ jobs: name: "test scripts" runs-on: ubuntu-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} steps: - uses: actions/checkout@v4 - name: "Install Rust toolchain" @@ -215,7 +215,7 @@ jobs: }} steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -226,7 +226,7 @@ jobs: name: ruff path: target/debug - - uses: dawidd6/action-download-artifact@v2 + - uses: dawidd6/action-download-artifact@v3 name: Download baseline Ruff binary with: name: ruff @@ -321,7 +321,7 @@ jobs: name: "cargo udeps" runs-on: ubuntu-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} steps: - uses: actions/checkout@v4 - name: "Install nightly Rust toolchain" @@ -338,7 +338,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -362,7 +362,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: "Install Rust toolchain" @@ -392,7 +392,7 @@ jobs: MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }} steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 - name: "Add SSH key" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} uses: webfactory/ssh-agent@v0.8.0 @@ -444,7 +444,7 @@ jobs: needs: - cargo-test-linux - determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} steps: - uses: extractions/setup-just@v1 env: @@ -455,7 +455,7 @@ jobs: with: repository: "astral-sh/ruff-lsp" - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -483,7 +483,7 @@ jobs: benchmarks: runs-on: ubuntu-latest needs: determine_changes - if: needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' + if: ${{ needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main' }} steps: - name: "Checkout Branch" uses: actions/checkout@v4 @@ -502,7 +502,7 @@ jobs: run: cargo codspeed build --features codspeed -p ruff_benchmark - name: "Run benchmarks" - uses: CodSpeedHQ/action@v1 + uses: CodSpeedHQ/action@v2 with: run: cargo codspeed run token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml index 992ca27a93..13bf7a8759 100644 --- a/.github/workflows/docs.yaml +++ b/.github/workflows/docs.yaml @@ -20,7 +20,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.ref }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 - name: "Add SSH key" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} uses: webfactory/ssh-agent@v0.8.0 diff --git a/.github/workflows/flake8-to-ruff.yaml b/.github/workflows/flake8-to-ruff.yaml index feb1c2825a..ebc38cb4fe 100644 --- a/.github/workflows/flake8-to-ruff.yaml +++ b/.github/workflows/flake8-to-ruff.yaml @@ -20,7 +20,7 @@ jobs: runs-on: macos-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -43,7 +43,7 @@ jobs: runs-on: macos-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -69,7 +69,7 @@ jobs: target: [x64, x86] steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: ${{ matrix.target }} @@ -97,7 +97,7 @@ jobs: target: [x86_64, i686] steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -124,7 +124,7 @@ jobs: target: [aarch64, armv7, s390x, ppc64le, ppc64] steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: "Build wheels" @@ -161,7 +161,7 @@ jobs: - i686-unknown-linux-musl steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -197,7 +197,7 @@ jobs: arch: armv7 steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: "Build wheels" @@ -237,7 +237,7 @@ jobs: - uses: actions/download-artifact@v3 with: name: wheels - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 - name: "Publish to PyPi" env: TWINE_USERNAME: __token__ diff --git a/.github/workflows/pr-comment.yaml b/.github/workflows/pr-comment.yaml index 934ad21b6a..34fe20f50c 100644 --- a/.github/workflows/pr-comment.yaml +++ b/.github/workflows/pr-comment.yaml @@ -17,7 +17,7 @@ jobs: comment: runs-on: ubuntu-latest steps: - - uses: dawidd6/action-download-artifact@v2 + - uses: dawidd6/action-download-artifact@v3 name: Download pull request number with: name: pr-number @@ -32,7 +32,7 @@ jobs: echo "pr-number=$(> $GITHUB_OUTPUT fi - - uses: dawidd6/action-download-artifact@v2 + - uses: dawidd6/action-download-artifact@v3 name: "Download ecosystem results" id: download-ecosystem-result if: steps.pr-number.outputs.pr-number diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 2ec8ef052f..368bd3bff0 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -36,7 +36,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: "Prep README.md" @@ -63,7 +63,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -86,7 +86,7 @@ jobs: path: dist - name: "Archive binary" run: | - ARCHIVE_FILE=ruff-x86_64-apple-darwin.tar.gz + ARCHIVE_FILE=ruff-${{ inputs.tag }}-x86_64-apple-darwin.tar.gz tar czvf $ARCHIVE_FILE -C target/x86_64-apple-darwin/release ruff shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" @@ -103,7 +103,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -125,7 +125,7 @@ jobs: path: dist - name: "Archive binary" run: | - ARCHIVE_FILE=ruff-aarch64-apple-darwin.tar.gz + ARCHIVE_FILE=ruff-${{ inputs.tag }}-aarch64-apple-darwin.tar.gz tar czvf $ARCHIVE_FILE -C target/aarch64-apple-darwin/release ruff shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" @@ -151,7 +151,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: ${{ matrix.platform.arch }} @@ -177,7 +177,7 @@ jobs: - name: "Archive binary" shell: bash run: | - ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.zip + ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.zip 7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/ruff.exe sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" @@ -199,7 +199,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -224,7 +224,7 @@ jobs: path: dist - name: "Archive binary" run: | - ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz + ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" @@ -258,7 +258,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: "Prep README.md" @@ -291,7 +291,7 @@ jobs: path: dist - name: "Archive binary" run: | - ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz + ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" @@ -313,7 +313,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 @@ -332,10 +332,10 @@ jobs: image: alpine:latest options: -v ${{ github.workspace }}:/io -w /io run: | - apk add py3-pip - pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links /io/dist/ --force-reinstall - ruff --help - python -m ruff --help + apk add python3 + python -m venv .venv + .venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall + .venv/bin/ruff check --help - name: "Upload wheels" uses: actions/upload-artifact@v3 with: @@ -343,7 +343,7 @@ jobs: path: dist - name: "Archive binary" run: | - ARCHIVE_FILE=ruff-${{ matrix.target }}.tar.gz + ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.target }}.tar.gz tar czvf $ARCHIVE_FILE -C target/${{ matrix.target }}/release ruff shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" @@ -369,7 +369,7 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ inputs.sha }} - - uses: actions/setup-python@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} - name: "Prep README.md" @@ -388,10 +388,11 @@ jobs: distro: alpine_latest githubToken: ${{ github.token }} install: | - apk add py3-pip + apk add python3 run: | - pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall - ruff check --help + python -m venv .venv + .venv/bin/pip3 install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall + .venv/bin/ruff check --help - name: "Upload wheels" uses: actions/upload-artifact@v3 with: @@ -399,7 +400,7 @@ jobs: path: dist - name: "Archive binary" run: | - ARCHIVE_FILE=ruff-${{ matrix.platform.target }}.tar.gz + ARCHIVE_FILE=ruff-${{ inputs.tag }}-${{ matrix.platform.target }}.tar.gz tar czvf $ARCHIVE_FILE -C target/${{ matrix.platform.target }}/release ruff shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256 - name: "Upload binary" diff --git a/BREAKING_CHANGES.md b/BREAKING_CHANGES.md index 3fcc2337d1..0558ab5cca 100644 --- a/BREAKING_CHANGES.md +++ b/BREAKING_CHANGES.md @@ -1,5 +1,42 @@ # Breaking Changes +## 0.1.9 + +### `site-packages` is now excluded by default ([#5513](https://github.com/astral-sh/ruff/pull/5513)) + +Ruff maintains a list of default exclusions, which now consists of the following patterns: + +- `.bzr` +- `.direnv` +- `.eggs` +- `.git-rewrite` +- `.git` +- `.hg` +- `.ipynb_checkpoints` +- `.mypy_cache` +- `.nox` +- `.pants.d` +- `.pyenv` +- `.pytest_cache` +- `.pytype` +- `.ruff_cache` +- `.svn` +- `.tox` +- `.venv` +- `.vscode` +- `__pypackages__` +- `_build` +- `buck-out` +- `build` +- `dist` +- `node_modules` +- `site-packages` +- `venv` + +Previously, the `site-packages` directory was not excluded by default. While `site-packages` tends +to be excluded anyway by virtue of the `.venv` exclusion, this may not be the case when using Ruff +from VS Code outside a virtual environment. + ## 0.1.0 ### The deprecated `format` setting has been removed diff --git a/CHANGELOG.md b/CHANGELOG.md index 766439b59e..cebb80b270 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,156 @@ # Changelog +## 0.1.8 + +This release includes opt-in support for formatting Python snippets within +docstrings via the `docstring-code-format` setting. +[Check out the blog post](https://astral.sh/blog/ruff-v0.1.8) for more details! + +### Preview features + +- Add `"preserve"` quote-style to mimic Black's skip-string-normalization ([#8822](https://github.com/astral-sh/ruff/pull/8822)) +- Implement `prefer_splitting_right_hand_side_of_assignments` preview style ([#8943](https://github.com/astral-sh/ruff/pull/8943)) +- \[`pycodestyle`\] Add fix for `unexpected-spaces-around-keyword-parameter-equals` ([#9072](https://github.com/astral-sh/ruff/pull/9072)) +- \[`pycodestyle`\] Add fix for comment-related whitespace rules ([#9075](https://github.com/astral-sh/ruff/pull/9075)) +- \[`pycodestyle`\] Allow `sys.path` modifications between imports ([#9047](https://github.com/astral-sh/ruff/pull/9047)) +- \[`refurb`\] Implement `hashlib-digest-hex` (`FURB181`) ([#9077](https://github.com/astral-sh/ruff/pull/9077)) + +### Rule changes + +- Allow `flake8-type-checking` rules to automatically quote runtime-evaluated references ([#6001](https://github.com/astral-sh/ruff/pull/6001)) +- Allow transparent cell magics in Jupyter Notebooks ([#8911](https://github.com/astral-sh/ruff/pull/8911)) +- \[`flake8-annotations`\] Avoid `ANN2xx` fixes for abstract methods with empty bodies ([#9034](https://github.com/astral-sh/ruff/pull/9034)) +- \[`flake8-self`\] Ignore underscore references in type annotations ([#9036](https://github.com/astral-sh/ruff/pull/9036)) +- \[`pep8-naming`\] Allow class names when `apps.get_model` is a non-string ([#9065](https://github.com/astral-sh/ruff/pull/9065)) +- \[`pycodestyle`\] Allow `matplotlib.use` calls to intersperse imports ([#9094](https://github.com/astral-sh/ruff/pull/9094)) +- \[`pyflakes`\] Support fixing unused assignments in tuples by renaming variables (`F841`) ([#9107](https://github.com/astral-sh/ruff/pull/9107)) +- \[`pylint`\] Add fix for `subprocess-run-without-check` (`PLW1510`) ([#6708](https://github.com/astral-sh/ruff/pull/6708)) + +### Formatter + +- Add `docstring-code-format` knob to enable docstring snippet formatting ([#8854](https://github.com/astral-sh/ruff/pull/8854)) +- Use double quotes for all docstrings, including single-quoted docstrings ([#9020](https://github.com/astral-sh/ruff/pull/9020)) +- Implement "dynamic" line width mode for docstring code formatting ([#9098](https://github.com/astral-sh/ruff/pull/9098)) +- Support reformatting Markdown code blocks ([#9030](https://github.com/astral-sh/ruff/pull/9030)) +- add support for formatting reStructuredText code snippets ([#9003](https://github.com/astral-sh/ruff/pull/9003)) +- Avoid trailing comma for single-argument with positional separator ([#9076](https://github.com/astral-sh/ruff/pull/9076)) +- Fix handling of trailing target comment ([#9051](https://github.com/astral-sh/ruff/pull/9051)) + +### CLI + +- Hide unsafe fix suggestions when explicitly disabled ([#9095](https://github.com/astral-sh/ruff/pull/9095)) +- Add SARIF support to `--output-format` ([#9078](https://github.com/astral-sh/ruff/pull/9078)) + +### Bug fixes + +- Apply unnecessary index rule prior to enumerate rewrite ([#9012](https://github.com/astral-sh/ruff/pull/9012)) +- \[`flake8-err-msg`\] Allow `EM` fixes even if `msg` variable is defined ([#9059](https://github.com/astral-sh/ruff/pull/9059)) +- \[`flake8-pie`\] Prevent keyword arguments duplication ([#8450](https://github.com/astral-sh/ruff/pull/8450)) +- \[`flake8-pie`\] Respect trailing comma in `unnecessary-dict-kwargs` (`PIE804`) ([#9015](https://github.com/astral-sh/ruff/pull/9015)) +- \[`flake8-raise`\] Avoid removing parentheses on ctypes.WinError ([#9027](https://github.com/astral-sh/ruff/pull/9027)) +- \[`isort`\] Avoid invalid combination of `force-sort-within-types` and `lines-between-types` ([#9041](https://github.com/astral-sh/ruff/pull/9041)) +- \[`isort`\] Ensure that from-style imports are always ordered first in `__future__` ([#9039](https://github.com/astral-sh/ruff/pull/9039)) +- \[`pycodestyle`\] Allow tab indentation before keyword ([#9099](https://github.com/astral-sh/ruff/pull/9099)) +- \[`pylint`\] Ignore `@overrides` and `@overloads` for `too-many-positional` ([#9000](https://github.com/astral-sh/ruff/pull/9000)) +- \[`pyupgrade`\] Enable `printf-string-formatting` fix with comments on right-hand side ([#9037](https://github.com/astral-sh/ruff/pull/9037)) +- \[`refurb`\] Make `math-constant` (`FURB152`) rule more targeted ([#9054](https://github.com/astral-sh/ruff/pull/9054)) +- \[`refurb`\] Support floating-point base in `redundant-log-base` (`FURB163`) ([#9100](https://github.com/astral-sh/ruff/pull/9100)) +- \[`ruff`\] Detect `unused-asyncio-dangling-task` (`RUF006`) on unused assignments ([#9060](https://github.com/astral-sh/ruff/pull/9060)) + +## 0.1.7 + +### Preview features + +- Implement multiline dictionary and list hugging for preview style ([#8293](https://github.com/astral-sh/ruff/pull/8293)) +- Implement the `fix_power_op_line_length` preview style ([#8947](https://github.com/astral-sh/ruff/pull/8947)) +- Use Python version to determine typing rewrite safety ([#8919](https://github.com/astral-sh/ruff/pull/8919)) +- \[`flake8-annotations`\] Enable auto-return-type involving `Optional` and `Union` annotations ([#8885](https://github.com/astral-sh/ruff/pull/8885)) +- \[`flake8-bandit`\] Implement `django-raw-sql` (`S611`) ([#8651](https://github.com/astral-sh/ruff/pull/8651)) +- \[`flake8-bandit`\] Implement `tarfile-unsafe-members` (`S202`) ([#8829](https://github.com/astral-sh/ruff/pull/8829)) +- \[`flake8-pyi`\] Implement fix for `unnecessary-literal-union` (`PYI030`) ([#7934](https://github.com/astral-sh/ruff/pull/7934)) +- \[`flake8-simplify`\] Extend `dict-get-with-none-default` (`SIM910`) to non-literals ([#8762](https://github.com/astral-sh/ruff/pull/8762)) +- \[`pylint`\] - add `unnecessary-list-index-lookup` (`PLR1736`) + autofix ([#7999](https://github.com/astral-sh/ruff/pull/7999)) +- \[`pylint`\] - implement R0202 and R0203 with autofixes ([#8335](https://github.com/astral-sh/ruff/pull/8335)) +- \[`pylint`\] Implement `repeated-keyword` (`PLe1132`) ([#8706](https://github.com/astral-sh/ruff/pull/8706)) +- \[`pylint`\] Implement `too-many-positional` (`PLR0917`) ([#8995](https://github.com/astral-sh/ruff/pull/8995)) +- \[`pylint`\] Implement `unnecessary-dict-index-lookup` (`PLR1733`) ([#8036](https://github.com/astral-sh/ruff/pull/8036)) +- \[`refurb`\] Implement `redundant-log-base` (`FURB163`) ([#8842](https://github.com/astral-sh/ruff/pull/8842)) + +### Rule changes + +- \[`flake8-boolean-trap`\] Allow booleans in `@override` methods ([#8882](https://github.com/astral-sh/ruff/pull/8882)) +- \[`flake8-bugbear`\] Avoid `B015`,`B018` for last expression in a cell ([#8815](https://github.com/astral-sh/ruff/pull/8815)) +- \[`flake8-pie`\] Allow ellipses for enum values in stub files ([#8825](https://github.com/astral-sh/ruff/pull/8825)) +- \[`flake8-pyi`\] Check PEP 695 type aliases for `snake-case-type-alias` and `t-suffixed-type-alias` ([#8966](https://github.com/astral-sh/ruff/pull/8966)) +- \[`flake8-pyi`\] Check for kwarg and vararg `NoReturn` type annotations ([#8948](https://github.com/astral-sh/ruff/pull/8948)) +- \[`flake8-simplify`\] Omit select context managers from `SIM117` ([#8801](https://github.com/astral-sh/ruff/pull/8801)) +- \[`pep8-naming`\] Allow Django model loads in `non-lowercase-variable-in-function` (`N806`) ([#8917](https://github.com/astral-sh/ruff/pull/8917)) +- \[`pycodestyle`\] Avoid `E703` for last expression in a cell ([#8821](https://github.com/astral-sh/ruff/pull/8821)) +- \[`pycodestyle`\] Update `E402` to work at cell level for notebooks ([#8872](https://github.com/astral-sh/ruff/pull/8872)) +- \[`pydocstyle`\] Avoid `D100` for Jupyter Notebooks ([#8816](https://github.com/astral-sh/ruff/pull/8816)) +- \[`pylint`\] Implement fix for `unspecified-encoding` (`PLW1514`) ([#8928](https://github.com/astral-sh/ruff/pull/8928)) + +### Formatter + +- Avoid unstable formatting in ellipsis-only body with trailing comment ([#8984](https://github.com/astral-sh/ruff/pull/8984)) +- Inline trailing comments for type alias similar to assignments ([#8941](https://github.com/astral-sh/ruff/pull/8941)) +- Insert trailing comma when function breaks with single argument ([#8921](https://github.com/astral-sh/ruff/pull/8921)) + +### CLI + +- Update `ruff check` and `ruff format` to default to the current directory ([#8791](https://github.com/astral-sh/ruff/pull/8791)) +- Stop at the first resolved parent configuration ([#8864](https://github.com/astral-sh/ruff/pull/8864)) + +### Configuration + +- \[`pylint`\] Default `max-positional-args` to `max-args` ([#8998](https://github.com/astral-sh/ruff/pull/8998)) +- \[`pylint`\] Add `allow-dunder-method-names` setting for `bad-dunder-method-name` (`PLW3201`) ([#8812](https://github.com/astral-sh/ruff/pull/8812)) +- \[`isort`\] Add support for `from-first` setting ([#8663](https://github.com/astral-sh/ruff/pull/8663)) +- \[`isort`\] Add support for `length-sort` settings ([#8841](https://github.com/astral-sh/ruff/pull/8841)) + +### Bug fixes + +- Add support for `@functools.singledispatch` ([#8934](https://github.com/astral-sh/ruff/pull/8934)) +- Avoid off-by-one error in stripping noqa following multi-byte char ([#8979](https://github.com/astral-sh/ruff/pull/8979)) +- Avoid off-by-one error in with-item named expressions ([#8915](https://github.com/astral-sh/ruff/pull/8915)) +- Avoid syntax error via invalid ur string prefix ([#8971](https://github.com/astral-sh/ruff/pull/8971)) +- Avoid underflow in `get_model` matching ([#8965](https://github.com/astral-sh/ruff/pull/8965)) +- Avoid unnecessary index diagnostics when value is modified ([#8970](https://github.com/astral-sh/ruff/pull/8970)) +- Convert over-indentation rule to use number of characters ([#8983](https://github.com/astral-sh/ruff/pull/8983)) +- Detect implicit returns in auto-return-types ([#8952](https://github.com/astral-sh/ruff/pull/8952)) +- Fix start >= end error in over-indentation ([#8982](https://github.com/astral-sh/ruff/pull/8982)) +- Ignore `@overload` and `@override` methods for too-many-arguments checks ([#8954](https://github.com/astral-sh/ruff/pull/8954)) +- Lexer start of line is false only for `Mode::Expression` ([#8880](https://github.com/astral-sh/ruff/pull/8880)) +- Mark `pydantic_settings.BaseSettings` as having default copy semantics ([#8793](https://github.com/astral-sh/ruff/pull/8793)) +- Respect dictionary unpacking in `NamedTuple` assignments ([#8810](https://github.com/astral-sh/ruff/pull/8810)) +- Respect local subclasses in `flake8-type-checking` ([#8768](https://github.com/astral-sh/ruff/pull/8768)) +- Support type alias statements in simple statement positions ([#8916](https://github.com/astral-sh/ruff/pull/8916)) +- \[`flake8-annotations`\] Avoid filtering out un-representable types in return annotation ([#8881](https://github.com/astral-sh/ruff/pull/8881)) +- \[`flake8-pie`\] Retain extra ellipses in protocols and abstract methods ([#8769](https://github.com/astral-sh/ruff/pull/8769)) +- \[`flake8-pyi`\] Respect local enum subclasses in `simple-defaults` (`PYI052`) ([#8767](https://github.com/astral-sh/ruff/pull/8767)) +- \[`flake8-trio`\] Use correct range for `TRIO115` fix ([#8933](https://github.com/astral-sh/ruff/pull/8933)) +- \[`flake8-trio`\] Use full arguments range for zero-sleep-call ([#8936](https://github.com/astral-sh/ruff/pull/8936)) +- \[`isort`\] fix: mark `__main__` as first-party import ([#8805](https://github.com/astral-sh/ruff/pull/8805)) +- \[`pep8-naming`\] Avoid `N806` errors for type alias statements ([#8785](https://github.com/astral-sh/ruff/pull/8785)) +- \[`perflint`\] Avoid `PERF101` if there's an append in loop body ([#8809](https://github.com/astral-sh/ruff/pull/8809)) +- \[`pycodestyle`\] Allow space-before-colon after end-of-slice ([#8838](https://github.com/astral-sh/ruff/pull/8838)) +- \[`pydocstyle`\] Avoid non-character breaks in `over-indentation` (`D208`) ([#8866](https://github.com/astral-sh/ruff/pull/8866)) +- \[`pydocstyle`\] Ignore underlines when determining docstring logical lines ([#8929](https://github.com/astral-sh/ruff/pull/8929)) +- \[`pylint`\] Extend `self-assigning-variable` to multi-target assignments ([#8839](https://github.com/astral-sh/ruff/pull/8839)) +- \[`tryceratops`\] Avoid repeated triggers in nested `tryceratops` diagnostics ([#8772](https://github.com/astral-sh/ruff/pull/8772)) + +### Documentation + +- Add advice for fixing RUF008 when mutability is not desired ([#8853](https://github.com/astral-sh/ruff/pull/8853)) +- Added the command to run ruff using pkgx to the installation.md ([#8955](https://github.com/astral-sh/ruff/pull/8955)) +- Document fix safety for flake8-comprehensions and some pyupgrade rules ([#8918](https://github.com/astral-sh/ruff/pull/8918)) +- Fix doc formatting for zero-sleep-call ([#8937](https://github.com/astral-sh/ruff/pull/8937)) +- Remove duplicate imports from os-stat documentation ([#8930](https://github.com/astral-sh/ruff/pull/8930)) +- Replace generated reference to MkDocs ([#8806](https://github.com/astral-sh/ruff/pull/8806)) +- Update Arch Linux package URL in installation.md ([#8802](https://github.com/astral-sh/ruff/pull/8802)) +- \[`flake8-pyi`\] Fix error in `t-suffixed-type-alias` (`PYI043`) example ([#8963](https://github.com/astral-sh/ruff/pull/8963)) +- \[`flake8-pyi`\] Improve motivation for `custom-type-var-return-type` (`PYI019`) ([#8766](https://github.com/astral-sh/ruff/pull/8766)) + ## 0.1.6 ### Preview features diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2d4ecb9b44..d79da233eb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -556,10 +556,10 @@ examples. #### Linux -Install `perf` and build `ruff_benchmark` with the `release-debug` profile and then run it with perf +Install `perf` and build `ruff_benchmark` with the `profiling` profile and then run it with perf ```shell -cargo bench -p ruff_benchmark --no-run --profile=release-debug && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=release-debug -- --profile-time=1 +cargo bench -p ruff_benchmark --no-run --profile=profiling && perf record --call-graph dwarf -F 9999 cargo bench -p ruff_benchmark --profile=profiling -- --profile-time=1 ``` You can also use the `ruff_dev` launcher to run `ruff check` multiple times on a repository to @@ -567,8 +567,8 @@ gather enough samples for a good flamegraph (change the 999, the sample rate, an of checks, to your liking) ```shell -cargo build --bin ruff_dev --profile=release-debug -perf record -g -F 999 target/release-debug/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null +cargo build --bin ruff_dev --profile=profiling +perf record -g -F 999 target/profiling/ruff_dev repeat --repeat 30 --exit-zero --no-cache path/to/cpython > /dev/null ``` Then convert the recorded profile @@ -598,7 +598,7 @@ cargo install cargo-instruments Then run the profiler with ```shell -cargo instruments -t time --bench linter --profile release-debug -p ruff_benchmark -- --profile-time=1 +cargo instruments -t time --bench linter --profile profiling -p ruff_benchmark -- --profile-time=1 ``` - `-t`: Specifies what to profile. Useful options are `time` to profile the wall time and `alloc` diff --git a/Cargo.lock b/Cargo.lock index ee526538f9..87a42ff98d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16,14 +16,15 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", "getrandom", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -381,7 +382,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -606,7 +607,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -617,7 +618,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -790,14 +791,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", ] [[package]] @@ -808,7 +809,7 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flake8-to-ruff" -version = "0.1.6" +version = "0.1.8" dependencies = [ "anyhow", "clap", @@ -827,7 +828,7 @@ dependencies = [ "serde_json", "strum", "strum_macros", - "toml 0.7.8", + "toml", ] [[package]] @@ -848,18 +849,18 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] [[package]] name = "fs-err" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb5fd9bcbe8b1087cbd395b51498c01bc997cef73e778a80b77a811af5e2d29f" +checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" dependencies = [ "autocfg", ] @@ -987,9 +988,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -1122,15 +1123,15 @@ dependencies = [ [[package]] name = "is-macro" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4467ed1321b310c2625c5aa6c1b1ffc5de4d9e42668cf697a08fb033ee8265e" +checksum = "bc74b7abae208af9314a406bd7dcc65091230b6e749c09e07a645885fecf34f9" dependencies = [ "Inflector", "pmutil 0.6.1", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -1170,9 +1171,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.65" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" dependencies = [ "wasm-bindgen", ] @@ -1481,9 +1482,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "oorandom" @@ -1622,9 +1623,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" @@ -1708,7 +1709,7 @@ checksum = "52a40bc70c2c58040d2d8b167ba9a5ff59fc9dab7ad44771cfde3dcfde7a09c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -1810,7 +1811,7 @@ dependencies = [ "pep440_rs", "pep508_rs", "serde", - "toml 0.8.2", + "toml", ] [[package]] @@ -2062,7 +2063,7 @@ dependencies = [ [[package]] name = "ruff_cli" -version = "0.1.6" +version = "0.1.8" dependencies = [ "annotate-snippets 0.9.2", "anyhow", @@ -2154,7 +2155,7 @@ dependencies = [ "strum", "strum_macros", "tempfile", - "toml 0.7.8", + "toml", "tracing", "tracing-indicatif", "tracing-subscriber", @@ -2198,7 +2199,7 @@ dependencies = [ [[package]] name = "ruff_linter" -version = "0.1.6" +version = "0.1.8" dependencies = [ "aho-corasick", "annotate-snippets 0.9.2", @@ -2254,10 +2255,11 @@ dependencies = [ "tempfile", "test-case", "thiserror", - "toml 0.7.8", + "toml", "typed-arena", "unicode-width", "unicode_names2", + "url", "wsl", ] @@ -2269,7 +2271,7 @@ dependencies = [ "proc-macro2", "quote", "ruff_python_trivia", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -2450,7 +2452,7 @@ dependencies = [ [[package]] name = "ruff_shrinking" -version = "0.1.6" +version = "0.1.8" dependencies = [ "anyhow", "clap", @@ -2541,7 +2543,7 @@ dependencies = [ "shellexpand", "strum", "tempfile", - "toml 0.7.8", + "toml", ] [[package]] @@ -2618,9 +2620,9 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c" +checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29" dependencies = [ "dyn-clone", "schemars_derive", @@ -2630,9 +2632,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c" +checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967" dependencies = [ "proc-macro2", "quote", @@ -2676,18 +2678,18 @@ checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.190" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" dependencies = [ "serde_derive", ] [[package]] name = "serde-wasm-bindgen" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ba92964781421b6cef36bf0d7da26d201e96d84e1b10e7ae6ed416e516906d" +checksum = "b9b713f70513ae1f8d92665bbbbda5c295c2cf1da5542881ae5eefe20c9af132" dependencies = [ "js-sys", "serde", @@ -2696,13 +2698,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.190" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -2764,7 +2766,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -2868,7 +2870,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -2884,9 +2886,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.39" +version = "2.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e" dependencies = [ "proc-macro2", "quote", @@ -2973,7 +2975,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -2985,7 +2987,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", "test-case-core", ] @@ -3006,7 +3008,7 @@ checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -3089,18 +3091,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" -[[package]] -name = "toml" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit 0.19.15", -] - [[package]] name = "toml" version = "0.8.2" @@ -3110,7 +3100,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.20.2", + "toml_edit", ] [[package]] @@ -3122,19 +3112,6 @@ dependencies = [ "serde", ] -[[package]] -name = "toml_edit" -version = "0.19.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" -dependencies = [ - "indexmap", - "serde", - "serde_spanned", - "toml_datetime", - "winnow", -] - [[package]] name = "toml_edit" version = "0.20.2" @@ -3168,7 +3145,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -3183,9 +3160,9 @@ dependencies = [ [[package]] name = "tracing-indicatif" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57e05fe4a1c906d94b275d8aeb8ff8b9deaca502aeb59ae8ab500a92b8032ac8" +checksum = "069580424efe11d97c3fef4197fa98c004fa26672cc71ad8770d224e23b1951d" dependencies = [ "indicatif", "tracing", @@ -3305,9 +3282,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "unicode_names2" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5506ae2c3c1ccbdf468e52fc5ef536c2ccd981f01273a4cb81aa61021f3a5f" +checksum = "ac64ef2f016dc69dfa8283394a70b057066eb054d5fcb6b9eb17bd2ec5097211" dependencies = [ "phf", "unicode_names2_generator", @@ -3315,9 +3292,9 @@ dependencies = [ [[package]] name = "unicode_names2_generator" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6dfc680313e95bc6637fa278cd7a22390c3c2cd7b8b2bd28755bc6c0fc811e7" +checksum = "013f6a731e80f3930de580e55ba41dfa846de4e0fdee4a701f97989cb1597d6a" dependencies = [ "getopts", "log", @@ -3334,9 +3311,9 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" [[package]] name = "ureq" -version = "2.8.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5ccd538d4a604753ebc2f17cd9946e89b77bf87f6a8e2309667c6f2e87855e3" +checksum = "f8cdd25c339e200129fe4de81451814e5228c9b771d57378817d6117cc2b3f97" dependencies = [ "base64", "flate2", @@ -3350,9 +3327,9 @@ dependencies = [ [[package]] name = "url" -version = "2.4.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", @@ -3386,7 +3363,7 @@ checksum = "f49e7f3f3db8040a100710a11932239fd30697115e2ba4107080d8252939845e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -3461,9 +3438,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -3471,24 +3448,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" +checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" dependencies = [ "cfg-if", "js-sys", @@ -3498,9 +3475,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3508,28 +3485,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "wasm-bindgen-test" -version = "0.3.38" +version = "0.3.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6433b7c56db97397842c46b67e11873eda263170afeb3a2dc74a7cb370fee0d" +checksum = "2cf9242c0d27999b831eae4767b2a146feb0b27d332d553e605864acd2afd403" dependencies = [ "console_error_panic_hook", "js-sys", @@ -3541,13 +3518,13 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.38" +version = "0.3.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "493fcbab756bb764fa37e6bee8cec2dd709eb4273d06d0c282a5e74275ded735" +checksum = "794645f5408c9a039fd09f4d113cdfb2e7eba5ff1956b07bcf701cf4b394fe89" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.40", ] [[package]] @@ -3644,6 +3621,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -3674,6 +3660,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -3686,6 +3687,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -3698,6 +3705,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -3710,6 +3723,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -3722,6 +3741,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -3734,6 +3759,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -3746,6 +3777,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -3758,6 +3795,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + [[package]] name = "winnow" version = "0.5.15" @@ -3796,3 +3839,23 @@ checksum = "fe5c30ade05e61656247b2e334a031dfd0cc466fadef865bdcdea8d537951bf1" dependencies = [ "winapi", ] + +[[package]] +name = "zerocopy" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] diff --git a/Cargo.toml b/Cargo.toml index 6067694992..0c57416eae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,24 +17,24 @@ bitflags = { version = "2.4.1" } chrono = { version = "0.4.31", default-features = false, features = ["clock"] } clap = { version = "4.4.7", features = ["derive"] } colored = { version = "2.0.0" } -filetime = { version = "0.2.20" } +filetime = { version = "0.2.23" } glob = { version = "0.3.1" } globset = { version = "0.4.14" } ignore = { version = "0.4.20" } insta = { version = "1.34.0", feature = ["filters", "glob"] } -is-macro = { version = "0.3.0" } +is-macro = { version = "0.3.1" } itertools = { version = "0.11.0" } libcst = { version = "1.1.0", default-features = false } log = { version = "0.4.17" } memchr = { version = "2.6.4" } -once_cell = { version = "1.17.1" } +once_cell = { version = "1.19.0" } path-absolutize = { version = "3.1.1" } proc-macro2 = { version = "1.0.70" } quote = { version = "1.0.23" } regex = { version = "1.10.2" } rustc-hash = { version = "1.1.0" } -schemars = { version = "0.8.15" } -serde = { version = "1.0.190", features = ["derive"] } +schemars = { version = "0.8.16" } +serde = { version = "1.0.193", features = ["derive"] } serde_json = { version = "1.0.108" } shellexpand = { version = "3.0.0" } similar = { version = "2.3.0", features = ["inline"] } @@ -42,15 +42,15 @@ smallvec = { version = "1.11.2" } static_assertions = "1.1.0" strum = { version = "0.25.0", features = ["strum_macros"] } strum_macros = { version = "0.25.3" } -syn = { version = "2.0.39" } +syn = { version = "2.0.40" } test-case = { version = "3.2.1" } thiserror = { version = "1.0.50" } -toml = { version = "0.7.8" } +toml = { version = "0.8.2" } tracing = { version = "0.1.40" } -tracing-indicatif = { version = "0.3.4" } +tracing-indicatif = { version = "0.3.6" } tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } unicode-ident = { version = "1.0.12" } -unicode_names2 = { version = "1.2.0" } +unicode_names2 = { version = "1.2.1" } unicode-width = { version = "0.1.11" } uuid = { version = "1.6.1", features = ["v4", "fast-rng", "macro-diagnostics", "js"] } wsl = { version = "0.1.0" } @@ -88,7 +88,20 @@ rc_mutex = "warn" rest_pat_in_fully_bound_structs = "warn" [profile.release] -lto = "fat" +# Note that we set these explicitly, and these values +# were chosen based on a trade-off between compile times +# and runtime performance[1]. +# +# [1]: https://github.com/astral-sh/ruff/pull/9031 +lto = "thin" +codegen-units = 16 + +# Some crates don't change as much but benefit more from +# more expensive optimization passes, so we selectively +# decrease codegen-units in some cases. +[profile.release.package.ruff_python_parser] +codegen-units = 1 +[profile.release.package.ruff_python_ast] codegen-units = 1 [profile.dev.package.insta] @@ -102,8 +115,8 @@ opt-level = 3 [profile.dev.package.ruff_python_parser] opt-level = 1 -# Use the `--profile release-debug` flag to show symbols in release mode. -# e.g. `cargo build --profile release-debug` -[profile.release-debug] +# Use the `--profile profiling` flag to show symbols in release mode. +# e.g. `cargo build --profile profiling` +[profile.profiling] inherits = "release" debug = 1 diff --git a/README.md b/README.md index 2cb126aea6..588539c85c 100644 --- a/README.md +++ b/README.md @@ -150,7 +150,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.1.6 + rev: v0.1.8 hooks: # Run the linter. - id: ruff @@ -194,20 +194,25 @@ exclude = [ ".git", ".git-rewrite", ".hg", + ".ipynb_checkpoints", ".mypy_cache", ".nox", ".pants.d", + ".pyenv", + ".pytest_cache", ".pytype", ".ruff_cache", ".svn", ".tox", ".venv", + ".vscode", "__pypackages__", "_build", "buck-out", "build", "dist", "node_modules", + "site-packages", "venv", ] diff --git a/crates/flake8_to_ruff/Cargo.toml b/crates/flake8_to_ruff/Cargo.toml index b17abde1a8..71e5032e76 100644 --- a/crates/flake8_to_ruff/Cargo.toml +++ b/crates/flake8_to_ruff/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "flake8-to-ruff" -version = "0.1.6" +version = "0.1.8" description = """ Convert Flake8 configuration files to Ruff configuration files. """ diff --git a/crates/ruff_benchmark/Cargo.toml b/crates/ruff_benchmark/Cargo.toml index 96d3476da6..60f1b5011e 100644 --- a/crates/ruff_benchmark/Cargo.toml +++ b/crates/ruff_benchmark/Cargo.toml @@ -34,8 +34,8 @@ harness = false once_cell.workspace = true serde.workspace = true serde_json.workspace = true -url = "2.3.1" -ureq = "2.8.0" +url = "2.5.0" +ureq = "2.9.1" criterion = { version = "0.5.1", default-features = false } codspeed-criterion-compat = { version="2.3.3", default-features = false, optional = true} diff --git a/crates/ruff_cli/Cargo.toml b/crates/ruff_cli/Cargo.toml index 84bfcb5f66..ded05207e0 100644 --- a/crates/ruff_cli/Cargo.toml +++ b/crates/ruff_cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_cli" -version = "0.1.6" +version = "0.1.8" publish = false authors = { workspace = true } edition = { workspace = true } @@ -69,7 +69,7 @@ insta = { workspace = true, features = ["filters", "json"] } insta-cmd = { version = "0.4.0" } tempfile = "3.8.1" test-case = { workspace = true } -ureq = { version = "2.8.0", features = [] } +ureq = { version = "2.9.1", features = [] } [target.'cfg(target_os = "windows")'.dependencies] mimalloc = "0.1.39" diff --git a/crates/ruff_cli/src/commands/format.rs b/crates/ruff_cli/src/commands/format.rs index 697c412163..2b9144ad5b 100644 --- a/crates/ruff_cli/src/commands/format.rs +++ b/crates/ruff_cli/src/commands/format.rs @@ -515,7 +515,7 @@ impl<'a> FormatResults<'a> { if changed > 0 && unchanged > 0 { writeln!( f, - "{} file{} {}, {} file{} left unchanged", + "{} file{} {}, {} file{} {}", changed, if changed == 1 { "" } else { "s" }, match self.mode { @@ -524,6 +524,10 @@ impl<'a> FormatResults<'a> { }, unchanged, if unchanged == 1 { "" } else { "s" }, + match self.mode { + FormatMode::Write => "left unchanged", + FormatMode::Check | FormatMode::Diff => "already formatted", + }, ) } else if changed > 0 { writeln!( @@ -539,9 +543,13 @@ impl<'a> FormatResults<'a> { } else if unchanged > 0 { writeln!( f, - "{} file{} left unchanged", + "{} file{} {}", unchanged, if unchanged == 1 { "" } else { "s" }, + match self.mode { + FormatMode::Write => "left unchanged", + FormatMode::Check | FormatMode::Diff => "already formatted", + }, ) } else { Ok(()) diff --git a/crates/ruff_cli/src/printer.rs b/crates/ruff_cli/src/printer.rs index 6f22f37d16..5bfdece63b 100644 --- a/crates/ruff_cli/src/printer.rs +++ b/crates/ruff_cli/src/printer.rs @@ -13,7 +13,7 @@ use ruff_linter::fs::relativize_path; use ruff_linter::logging::LogLevel; use ruff_linter::message::{ AzureEmitter, Emitter, EmitterContext, GithubEmitter, GitlabEmitter, GroupedEmitter, - JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, TextEmitter, + JsonEmitter, JsonLinesEmitter, JunitEmitter, PylintEmitter, SarifEmitter, TextEmitter, }; use ruff_linter::notify_user; use ruff_linter::registry::{AsRule, Rule}; @@ -125,15 +125,7 @@ impl Printer { if let Some(fixables) = fixables { let fix_prefix = format!("[{}]", "*".cyan()); - if self.unsafe_fixes.is_enabled() { - if fixables.applicable > 0 { - writeln!( - writer, - "{fix_prefix} {} fixable with the --fix option.", - fixables.applicable - )?; - } - } else { + if self.unsafe_fixes.is_hint() { if fixables.applicable > 0 && fixables.unapplicable_unsafe > 0 { let es = if fixables.unapplicable_unsafe == 1 { "" @@ -163,6 +155,14 @@ impl Printer { fixables.unapplicable_unsafe )?; } + } else { + if fixables.applicable > 0 { + writeln!( + writer, + "{fix_prefix} {} fixable with the --fix option.", + fixables.applicable + )?; + } } } } else { @@ -291,6 +291,9 @@ impl Printer { SerializationFormat::Azure => { AzureEmitter.emit(writer, &diagnostics.messages, &context)?; } + SerializationFormat::Sarif => { + SarifEmitter.emit(writer, &diagnostics.messages, &context)?; + } } writer.flush()?; diff --git a/crates/ruff_cli/tests/format.rs b/crates/ruff_cli/tests/format.rs index e9970645d7..09d05d12b0 100644 --- a/crates/ruff_cli/tests/format.rs +++ b/crates/ruff_cli/tests/format.rs @@ -139,6 +139,99 @@ if condition: Ok(()) } +#[test] +fn docstring_options() -> Result<()> { + let tempdir = TempDir::new()?; + let ruff_toml = tempdir.path().join("ruff.toml"); + fs::write( + &ruff_toml, + r#" +[format] +docstring-code-format = true +docstring-code-line-length = 20 +"#, + )?; + + assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) + .args(["format", "--config"]) + .arg(&ruff_toml) + .arg("-") + .pass_stdin(r#" +def f(x): + ''' + Something about `f`. And an example: + + .. code-block:: python + + foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear) + + Another example: + + ```py + foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear) + ``` + + And another: + + >>> foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear) + ''' + pass +"#), @r###" +success: true +exit_code: 0 +----- stdout ----- +def f(x): + """ + Something about `f`. And an example: + + .. code-block:: python + + ( + foo, + bar, + quux, + ) = this_is_a_long_line( + lion, + hippo, + lemur, + bear, + ) + + Another example: + + ```py + ( + foo, + bar, + quux, + ) = this_is_a_long_line( + lion, + hippo, + lemur, + bear, + ) + ``` + + And another: + + >>> ( + ... foo, + ... bar, + ... quux, + ... ) = this_is_a_long_line( + ... lion, + ... hippo, + ... lemur, + ... bear, + ... ) + """ + pass + +----- stderr ----- +"###); + Ok(()) +} + #[test] fn mixed_line_endings() -> Result<()> { let tempdir = TempDir::new()?; @@ -162,7 +255,7 @@ fn mixed_line_endings() -> Result<()> { ----- stdout ----- ----- stderr ----- - 2 files left unchanged + 2 files already formatted "###); Ok(()) } @@ -235,6 +328,60 @@ OTHER = "OTHER" Ok(()) } +#[test] +fn messages() -> Result<()> { + let tempdir = TempDir::new()?; + + fs::write( + tempdir.path().join("main.py"), + r#" +from test import say_hy + +if __name__ == "__main__": + say_hy("dear Ruff contributor") +"#, + )?; + + assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) + .current_dir(tempdir.path()) + .args(["format", "--no-cache", "--isolated", "--check"]) + .arg("main.py"), @r###" + success: false + exit_code: 1 + ----- stdout ----- + Would reformat: main.py + 1 file would be reformatted + + ----- stderr ----- + "###); + + assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) + .current_dir(tempdir.path()) + .args(["format", "--no-cache", "--isolated"]) + .arg("main.py"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + 1 file reformatted + + ----- stderr ----- + "###); + + assert_cmd_snapshot!(Command::new(get_cargo_bin(BIN_NAME)) + .current_dir(tempdir.path()) + .args(["format", "--no-cache", "--isolated"]) + .arg("main.py"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + 1 file left unchanged + + ----- stderr ----- + "###); + + Ok(()) +} + #[test] fn force_exclude() -> Result<()> { let tempdir = TempDir::new()?; @@ -783,7 +930,7 @@ fn test_diff() { ----- stderr ----- - 2 files would be reformatted, 1 file left unchanged + 2 files would be reformatted, 1 file already formatted "###); }); } diff --git a/crates/ruff_cli/tests/integration_test.rs b/crates/ruff_cli/tests/integration_test.rs index 37c18551c2..1ee634f6c0 100644 --- a/crates/ruff_cli/tests/integration_test.rs +++ b/crates/ruff_cli/tests/integration_test.rs @@ -1158,6 +1158,44 @@ fn check_hints_hidden_unsafe_fixes_with_no_safe_fixes() { "###); } +#[test] +fn check_no_hint_for_hidden_unsafe_fixes_when_disabled() { + let mut cmd = RuffCheck::default() + .args(["--select", "F601,UP034", "--no-unsafe-fixes"]) + .build(); + assert_cmd_snapshot!(cmd + .pass_stdin("x = {'a': 1, 'a': 1}\nprint(('foo'))\n"), + @r###" + success: false + exit_code: 1 + ----- stdout ----- + -:1:14: F601 Dictionary key literal `'a'` repeated + -:2:7: UP034 [*] Avoid extraneous parentheses + Found 2 errors. + [*] 1 fixable with the --fix option. + + ----- stderr ----- + "###); +} + +#[test] +fn check_no_hint_for_hidden_unsafe_fixes_with_no_safe_fixes_when_disabled() { + let mut cmd = RuffCheck::default() + .args(["--select", "F601", "--no-unsafe-fixes"]) + .build(); + assert_cmd_snapshot!(cmd + .pass_stdin("x = {'a': 1, 'a': 1}\n"), + @r###" + success: false + exit_code: 1 + ----- stdout ----- + -:1:14: F601 Dictionary key literal `'a'` repeated + Found 1 error. + + ----- stderr ----- + "###); +} + #[test] fn check_shows_unsafe_fixes_with_opt_in() { let mut cmd = RuffCheck::default() diff --git a/crates/ruff_diagnostics/src/edit.rs b/crates/ruff_diagnostics/src/edit.rs index 8c05474d21..5bd4e629b4 100644 --- a/crates/ruff_diagnostics/src/edit.rs +++ b/crates/ruff_diagnostics/src/edit.rs @@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize}; /// A text edit to be applied to a source file. Inserts, deletes, or replaces /// content at a given location. -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Edit { /// The start location of the edit. diff --git a/crates/ruff_linter/Cargo.toml b/crates/ruff_linter/Cargo.toml index 6f129984ff..ea1cb6570b 100644 --- a/crates/ruff_linter/Cargo.toml +++ b/crates/ruff_linter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_linter" -version = "0.1.6" +version = "0.1.8" publish = false authors = { workspace = true } edition = { workspace = true } @@ -71,6 +71,7 @@ toml = { workspace = true } typed-arena = { version = "2.0.2" } unicode-width = { workspace = true } unicode_names2 = { workspace = true } +url = { version = "2.2.2" } wsl = { version = "0.1.0" } [dev-dependencies] diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_annotations/auto_return_type.py b/crates/ruff_linter/resources/test/fixtures/flake8_annotations/auto_return_type.py index ab75a50da7..a1df0985b0 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_annotations/auto_return_type.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_annotations/auto_return_type.py @@ -147,3 +147,68 @@ def func(x: int): while x > 0: break return 1 + + +import abc +from abc import abstractmethod + + +class Foo(abc.ABC): + @abstractmethod + def method(self): + pass + + @abc.abstractmethod + def method(self): + """Docstring.""" + + @abc.abstractmethod + def method(self): + ... + + @staticmethod + @abstractmethod + def method(): + pass + + @classmethod + @abstractmethod + def method(cls): + pass + + @abstractmethod + def method(self): + if self.x > 0: + return 1 + else: + return 1.5 + + +def func(x: int): + try: + pass + except: + return 2 + + +def func(x: int): + try: + pass + except: + return 2 + else: + return 3 + + +def func(x: int): + if not x: + raise ValueError + else: + raise TypeError + + +def func(x: int): + if not x: + raise ValueError + else: + return 1 diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S104.py b/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S104.py index 3bbab01871..7e50db0076 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S104.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S104.py @@ -8,6 +8,7 @@ def func(address): # Error "0.0.0.0" '0.0.0.0' +f"0.0.0.0" # Error diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S108.py b/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S108.py index 1689af66e6..c7cc7dd480 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S108.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_bandit/S108.py @@ -5,6 +5,9 @@ with open("/abc/tmp", "w") as f: with open("/tmp/abc", "w") as f: f.write("def") +with open(f"/tmp/abc", "w") as f: + f.write("def") + with open("/var/tmp/123", "w") as f: f.write("def") diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_django/DJ012.py b/crates/ruff_linter/resources/test/fixtures/flake8_django/DJ012.py index a0f8d9da22..20d3e4078b 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_django/DJ012.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_django/DJ012.py @@ -127,3 +127,21 @@ class MultipleConsecutiveFields(models.Model): pass middle_name = models.CharField(max_length=32) + + +class BaseModel(models.Model): + pass + + +class StrBeforeFieldInheritedModel(BaseModel): + """Model with `__str__` before fields.""" + + class Meta: + verbose_name = "test" + verbose_name_plural = "tests" + + def __str__(self): + return "foobar" + + first_name = models.CharField(max_length=32) + diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_errmsg/EM.py b/crates/ruff_linter/resources/test/fixtures/flake8_errmsg/EM.py index 01e53ac3dd..fbf6eb464a 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_errmsg/EM.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_errmsg/EM.py @@ -27,7 +27,7 @@ def f_ok(): raise RuntimeError(msg) -def f_unfixable(): +def f_msg_defined(): msg = "hello" raise RuntimeError("This is an example exception") diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pie/PIE804.py b/crates/ruff_linter/resources/test/fixtures/flake8_pie/PIE804.py index 84274c853a..a5b3674422 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pie/PIE804.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pie/PIE804.py @@ -10,7 +10,6 @@ Foo.objects.create(**{**bar}) # PIE804 foo(**{}) - foo(**{**data, "foo": "buzz"}) foo(**buzz) foo(**{"bar-foo": True}) @@ -20,3 +19,8 @@ foo(**{buzz: True}) foo(**{"": True}) foo(**{f"buzz__{bar}": True}) abc(**{"for": 3}) +foo(**{},) + +# Duplicated key names won't be fixed, to avoid syntax errors. +abc(**{'a': b}, **{'a': c}) # PIE804 +abc(a=1, **{'a': c}, **{'b': c}) # PIE804 diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.py b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.py index 75dbf08dd6..402c17d7ba 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.py @@ -1,8 +1,13 @@ import typing +import typing_extensions from typing import TypeVar +from typing_extensions import ParamSpec, TypeVarTuple _T = typing.TypeVar("_T") -_P = TypeVar("_P") +_Ts = typing_extensions.TypeVarTuple("_Ts") +_P = ParamSpec("_P") +_P2 = typing.ParamSpec("_P2") +_Ts2 = TypeVarTuple("_Ts2") # OK _UsedTypeVar = TypeVar("_UsedTypeVar") diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.pyi b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.pyi index 75dbf08dd6..402c17d7ba 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.pyi +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI018.pyi @@ -1,8 +1,13 @@ import typing +import typing_extensions from typing import TypeVar +from typing_extensions import ParamSpec, TypeVarTuple _T = typing.TypeVar("_T") -_P = TypeVar("_P") +_Ts = typing_extensions.TypeVarTuple("_Ts") +_P = ParamSpec("_P") +_P2 = typing.ParamSpec("_P2") +_Ts2 = TypeVarTuple("_Ts2") # OK _UsedTypeVar = TypeVar("_UsedTypeVar") diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.py b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.py index 2936c39c6c..2d2b932e42 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.py @@ -22,3 +22,7 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case # check that this edge case doesn't crash _: TypeAlias = str | int + +# PEP 695 +type foo_bar = int | str +type FooBar = int | str diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.pyi b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.pyi index 2936c39c6c..2d2b932e42 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.pyi +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI042.pyi @@ -22,3 +22,7 @@ Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case # check that this edge case doesn't crash _: TypeAlias = str | int + +# PEP 695 +type foo_bar = int | str +type FooBar = int | str diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.py b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.py index b48f5e0fa8..b8107662f9 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.py @@ -21,3 +21,7 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"] # check that this edge case doesn't crash _: TypeAlias = str | int + +# PEP 695 +type _FooT = str | int +type Foo = str | int diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.pyi b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.pyi index b48f5e0fa8..b8107662f9 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.pyi +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI043.pyi @@ -21,3 +21,7 @@ _PrivateAliasS2: TypeAlias = Annotated[str, "also okay"] # check that this edge case doesn't crash _: TypeAlias = str | int + +# PEP 695 +type _FooT = str | int +type Foo = str | int diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.py b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.py index 8b2811eb63..640d1fb42b 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.py @@ -32,6 +32,7 @@ def f8(x: bytes = b"50 character byte stringgggggggggggggggggggggggggg\xff") -> foo: str = "50 character stringggggggggggggggggggggggggggggggg" bar: str = "51 character stringgggggggggggggggggggggggggggggggg" +baz: str = f"51 character stringgggggggggggggggggggggggggggggggg" baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg" diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.pyi b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.pyi index 71064d9bdb..e87388ec9a 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.pyi +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI053.pyi @@ -29,6 +29,10 @@ baz: bytes = b"50 character byte stringgggggggggggggggggggggggggg" # OK qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053 +ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK + +fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053 + class Demo: """Docstrings are excluded from this rule. Some padding.""" # OK diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI055.py b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI055.py index 6471613f98..2a9c2d7787 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI055.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_pyi/PYI055.py @@ -37,3 +37,28 @@ def func(): # PYI055 x: Union[type[requests_mock.Mocker], type[httpretty], type[str]] = requests_mock.Mocker + + +def convert_union(union: UnionType) -> _T | None: + converters: tuple[ + type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T], ... # PYI055 + ] = union.__args__ + ... + +def convert_union(union: UnionType) -> _T | None: + converters: tuple[ + Union[type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T]], ... # PYI055 + ] = union.__args__ + ... + +def convert_union(union: UnionType) -> _T | None: + converters: tuple[ + Union[type[_T] | type[Converter[_T]]] | Converter[_T] | Callable[[str], _T], ... # PYI055 + ] = union.__args__ + ... + +def convert_union(union: UnionType) -> _T | None: + converters: tuple[ + Union[type[_T] | type[Converter[_T]] | str] | Converter[_T] | Callable[[str], _T], ... # PYI055 + ] = union.__args__ + ... diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_raise/RSE102.py b/crates/ruff_linter/resources/test/fixtures/flake8_raise/RSE102.py index 0a750b97cb..bba0e98b17 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_raise/RSE102.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_raise/RSE102.py @@ -82,3 +82,14 @@ raise IndexError(); # RSE102 raise Foo() + +# OK +raise ctypes.WinError() + + +def func(): + pass + + +# OK +raise func() diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_trio/TRIO115.py b/crates/ruff_linter/resources/test/fixtures/flake8_trio/TRIO115.py index aa25cb8e5a..764b5c1d6e 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_trio/TRIO115.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_trio/TRIO115.py @@ -19,8 +19,32 @@ async def func(): bar = "bar" trio.sleep(bar) + x, y = 0, 2000 + trio.sleep(x) # TRIO115 + trio.sleep(y) # OK + + (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0])) + trio.sleep(c) # TRIO115 + trio.sleep(d) # OK + trio.sleep(e) # TRIO115 + + m_x, m_y = 0 + trio.sleep(m_y) # OK + trio.sleep(m_x) # OK + + m_a = m_b = 0 + trio.sleep(m_a) # TRIO115 + trio.sleep(m_b) # TRIO115 + + m_c = (m_d, m_e) = (0, 0) + trio.sleep(m_c) # OK + trio.sleep(m_d) # TRIO115 + trio.sleep(m_e) # TRIO115 + def func(): + import trio + trio.run(trio.sleep(0)) # TRIO115 @@ -33,3 +57,10 @@ def func(): async def func(): await sleep(seconds=0) # TRIO115 + + +def func(): + import trio + + if (walrus := 0) == 0: + trio.sleep(walrus) # TRIO115 diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TCH006_1.py b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TCH006_1.py new file mode 100644 index 0000000000..7e8df4ce10 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TCH006_1.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from typing import TypeVar + + +x: "int" | str # TCH006 +x: ("int" | str) | "bool" # TCH006 + + +def func(): + x: "int" | str # OK + + +z: list[str, str | "int"] = [] # TCH006 + +type A = Value["int" | str] # OK + +OldS = TypeVar('OldS', int | 'str', str) # TCH006 diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TCH006_2.py b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TCH006_2.py new file mode 100644 index 0000000000..0c7d5915b7 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/TCH006_2.py @@ -0,0 +1,16 @@ +from typing import TypeVar + + +x: "int" | str # TCH006 +x: ("int" | str) | "bool" # TCH006 + + +def func(): + x: "int" | str # OK + + +z: list[str, str | "int"] = [] # TCH006 + +type A = Value["int" | str] # OK + +OldS = TypeVar('OldS', int | 'str', str) # TCH006 diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_1.py b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_1.py new file mode 100644 index 0000000000..b29425b178 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_1.py @@ -0,0 +1,7 @@ +"""Add `TYPE_CHECKING` to an existing `typing` import. Another member is moved.""" + +from __future__ import annotations + +from typing import Final + +Const: Final[dict] = {} diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_2.py b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_2.py new file mode 100644 index 0000000000..24e0534746 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_2.py @@ -0,0 +1,7 @@ +"""Using `TYPE_CHECKING` from an existing `typing` import. Another member is moved.""" + +from __future__ import annotations + +from typing import Final, TYPE_CHECKING + +Const: Final[dict] = {} diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_3.py b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_3.py new file mode 100644 index 0000000000..eff6d5efac --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/exempt_type_checking_3.py @@ -0,0 +1,7 @@ +"""Using `TYPE_CHECKING` from an existing `typing` import. Another member is moved.""" + +from __future__ import annotations + +from typing import Final, Mapping + +Const: Final[dict] = {} diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/quote.py b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/quote.py new file mode 100644 index 0000000000..77075dae2f --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/flake8_type_checking/quote.py @@ -0,0 +1,92 @@ +def f(): + from pandas import DataFrame + + def baz() -> DataFrame: + ... + + +def f(): + from pandas import DataFrame + + def baz() -> DataFrame[int]: + ... + + +def f(): + from pandas import DataFrame + + def baz() -> DataFrame["int"]: + ... + + +def f(): + import pandas as pd + + def baz() -> pd.DataFrame: + ... + + +def f(): + import pandas as pd + + def baz() -> pd.DataFrame.Extra: + ... + + +def f(): + import pandas as pd + + def baz() -> pd.DataFrame | int: + ... + + + +def f(): + from pandas import DataFrame + + def baz() -> DataFrame(): + ... + + +def f(): + from typing import Literal + + from pandas import DataFrame + + def baz() -> DataFrame[Literal["int"]]: + ... + + +def f(): + from typing import TYPE_CHECKING + + if TYPE_CHECKING: + from pandas import DataFrame + + def func(value: DataFrame): + ... + + +def f(): + from pandas import DataFrame, Series + + def baz() -> DataFrame | Series: + ... + + +def f(): + from pandas import DataFrame, Series + + def baz() -> ( + DataFrame | + Series + ): + ... + + class C: + x: DataFrame[ + int + ] = 1 + + def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]: + ... diff --git a/crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_future.py b/crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_future.py new file mode 100644 index 0000000000..3698d72a9c --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_future.py @@ -0,0 +1,2 @@ +import __future__ +from __future__ import annotations diff --git a/crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_lines_between.py b/crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_lines_between.py new file mode 100644 index 0000000000..5459d78bfd --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/isort/force_sort_within_sections_lines_between.py @@ -0,0 +1,4 @@ +from a import x +import b +from c import y +import d diff --git a/crates/ruff_linter/resources/test/fixtures/isort/future_from.py b/crates/ruff_linter/resources/test/fixtures/isort/future_from.py new file mode 100644 index 0000000000..3698d72a9c --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/isort/future_from.py @@ -0,0 +1,2 @@ +import __future__ +from __future__ import annotations diff --git a/crates/ruff_linter/resources/test/fixtures/pep8_naming/N806.py b/crates/ruff_linter/resources/test/fixtures/pep8_naming/N806.py index bbaf2b785f..8011433130 100644 --- a/crates/ruff_linter/resources/test/fixtures/pep8_naming/N806.py +++ b/crates/ruff_linter/resources/test/fixtures/pep8_naming/N806.py @@ -52,3 +52,9 @@ def model_assign() -> None: Bad = import_string("django.core.exceptions.ValidationError") # N806 ValidationError = import_string("django.core.exceptions.ValidationError") # OK + + Bad = apps.get_model() # N806 + Bad = apps.get_model(model_name="Stream") # N806 + + Address: Type = apps.get_model("zerver", variable) # OK + ValidationError = import_string(variable) # N806 diff --git a/crates/ruff_linter/resources/test/fixtures/perflint/PERF101.py b/crates/ruff_linter/resources/test/fixtures/perflint/PERF101.py index fbef6a7b2a..e6ae0b8f25 100644 --- a/crates/ruff_linter/resources/test/fixtures/perflint/PERF101.py +++ b/crates/ruff_linter/resources/test/fixtures/perflint/PERF101.py @@ -63,3 +63,8 @@ for i in list(foo_tuple): # Ok for i in list(foo_set): # Ok foo_set.append(i + 1) + +x, y, nested_tuple = (1, 2, (3, 4, 5)) + +for i in list(nested_tuple): # PERF101 + pass diff --git a/crates/ruff_linter/resources/test/fixtures/pycodestyle/E26.py b/crates/ruff_linter/resources/test/fixtures/pycodestyle/E26.py index 9d35553dc5..052baafcab 100644 --- a/crates/ruff_linter/resources/test/fixtures/pycodestyle/E26.py +++ b/crates/ruff_linter/resources/test/fixtures/pycodestyle/E26.py @@ -72,3 +72,15 @@ a = 42 # (Two spaces) # EF Means test is giving error and Failing #! Means test is segfaulting # 8 Means test runs forever + +#: Colon prefix is okay + +###This is a variable ### + +# We should strip the space, but preserve the hashes. +#: E266:1:3 +## Foo + +a = 1 ## Foo + +a = 1 #:Foo diff --git a/crates/ruff_linter/resources/test/fixtures/pycodestyle/E27.py b/crates/ruff_linter/resources/test/fixtures/pycodestyle/E27.py index 576e43ae01..7fb6fbb4f0 100644 --- a/crates/ruff_linter/resources/test/fixtures/pycodestyle/E27.py +++ b/crates/ruff_linter/resources/test/fixtures/pycodestyle/E27.py @@ -60,3 +60,6 @@ def f(): if (a and b): pass +#: Okay +def f(): + return 1 diff --git a/crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py b/crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py index d594340785..fdd0e32ee2 100644 --- a/crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py +++ b/crates/ruff_linter/resources/test/fixtures/pycodestyle/E402.py @@ -19,21 +19,32 @@ if x > 0: else: import e -__some__magic = 1 +import sys +sys.path.insert(0, "some/path") import f +import matplotlib + +matplotlib.use("Agg") + +import g + +__some__magic = 1 + +import h + def foo() -> None: - import e + import i if __name__ == "__main__": - import g + import j -import h; import i +import k; import l if __name__ == "__main__": - import j; \ -import k + import m; \ +import n diff --git a/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_0.py b/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_0.py index 967ed5bc4d..85ec535e22 100644 --- a/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_0.py +++ b/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_0.py @@ -43,3 +43,6 @@ regex = ''' ''' # noqa regex = '\\\_' + +#: W605:1:7 +u'foo\ bar' diff --git a/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py b/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py index 20bf0ea14c..b34ad587c4 100644 --- a/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py +++ b/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_1.py @@ -1,40 +1,54 @@ +# Same as `W605_0.py` but using f-strings instead. + #: W605:1:10 -regex = '\.png$' +regex = f'\.png$' #: W605:2:1 -regex = ''' +regex = f''' \.png$ ''' #: W605:2:6 f( - '\_' + f'\_' ) #: W605:4:6 -""" +f""" multi-line literal with \_ somewhere in the middle """ +#: W605:1:38 +value = f'new line\nand invalid escape \_ here' -def f(): - #: W605:1:11 - return'\.png$' #: Okay -regex = r'\.png$' -regex = '\\.png$' -regex = r''' +regex = fr'\.png$' +regex = f'\\.png$' +regex = fr''' \.png$ ''' -regex = r''' +regex = fr''' \\.png$ ''' -s = '\\' -regex = '\w' # noqa -regex = ''' +s = f'\\' +regex = f'\w' # noqa +regex = f''' \w ''' # noqa + +regex = f'\\\_' +value = f'\{{1}}' +value = f'\{1}' +value = f'{1:\}' +value = f"{f"\{1}"}" +value = rf"{f"\{1}"}" + +# Okay +value = rf'\{{1}}' +value = rf'\{1}' +value = rf'{1:\}' +value = f"{rf"\{1}"}" diff --git a/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_2.py b/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_2.py deleted file mode 100644 index b34ad587c4..0000000000 --- a/crates/ruff_linter/resources/test/fixtures/pycodestyle/W605_2.py +++ /dev/null @@ -1,54 +0,0 @@ -# Same as `W605_0.py` but using f-strings instead. - -#: W605:1:10 -regex = f'\.png$' - -#: W605:2:1 -regex = f''' -\.png$ -''' - -#: W605:2:6 -f( - f'\_' -) - -#: W605:4:6 -f""" -multi-line -literal -with \_ somewhere -in the middle -""" - -#: W605:1:38 -value = f'new line\nand invalid escape \_ here' - - -#: Okay -regex = fr'\.png$' -regex = f'\\.png$' -regex = fr''' -\.png$ -''' -regex = fr''' -\\.png$ -''' -s = f'\\' -regex = f'\w' # noqa -regex = f''' -\w -''' # noqa - -regex = f'\\\_' -value = f'\{{1}}' -value = f'\{1}' -value = f'{1:\}' -value = f"{f"\{1}"}" -value = rf"{f"\{1}"}" - -# Okay -value = rf'\{{1}}' -value = rf'\{1}' -value = rf'{1:\}' -value = f"{rf"\{1}"}" diff --git a/crates/ruff_linter/resources/test/fixtures/pydocstyle/D.py b/crates/ruff_linter/resources/test/fixtures/pydocstyle/D.py index 724490934f..617231c5c8 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydocstyle/D.py +++ b/crates/ruff_linter/resources/test/fixtures/pydocstyle/D.py @@ -713,5 +713,12 @@ def retain_extra_whitespace_not_overindented(): This is not overindented This is overindented, but since one line is not overindented this should not raise - And so is this, but it we should preserve the extra space on this line relative + And so is this, but it we should preserve the extra space on this line relative + """ + + +def inconsistent_indent_byte_size(): + """There's a non-breaking space (2-bytes) after 3 spaces (https://github.com/astral-sh/ruff/issues/9080). + +     Returns: """ diff --git a/crates/ruff_linter/resources/test/fixtures/pydocstyle/D208.py b/crates/ruff_linter/resources/test/fixtures/pydocstyle/D208.py index f0515248ab..4e99cf4b7f 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydocstyle/D208.py +++ b/crates/ruff_linter/resources/test/fixtures/pydocstyle/D208.py @@ -1,5 +1,16 @@ +""" + Author +""" + + class Platform: """ Remove sampler Args:     Returns: """ + + +def memory_test(): + """ +   参数含义:precision:精确到小数点后几位 + """ diff --git a/crates/ruff_linter/resources/test/fixtures/pyflakes/F401_20.py b/crates/ruff_linter/resources/test/fixtures/pyflakes/F401_20.py new file mode 100644 index 0000000000..3ab759cb61 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/pyflakes/F401_20.py @@ -0,0 +1,4 @@ +import re +from typing import Annotated + +type X = Annotated[int, lambda: re.compile("x")] diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/subprocess_run_without_check.py b/crates/ruff_linter/resources/test/fixtures/pylint/subprocess_run_without_check.py index b329ba4510..af5dbbc4d0 100644 --- a/crates/ruff_linter/resources/test/fixtures/pylint/subprocess_run_without_check.py +++ b/crates/ruff_linter/resources/test/fixtures/pylint/subprocess_run_without_check.py @@ -3,6 +3,11 @@ import subprocess # Errors. subprocess.run("ls") subprocess.run("ls", shell=True) +subprocess.run( + ["ls"], + shell=False, +) +subprocess.run(["ls"], **kwargs) # Non-errors. subprocess.run("ls", check=True) diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/too_many_locals.py b/crates/ruff_linter/resources/test/fixtures/pylint/too_many_locals.py new file mode 100644 index 0000000000..ffb30d7607 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/pylint/too_many_locals.py @@ -0,0 +1,36 @@ +def func() -> None: # OK + # 15 is max default + first = 1 + second = 2 + third = 3 + fourth = 4 + fifth = 5 + sixth = 6 + seventh = 7 + eighth = 8 + ninth = 9 + tenth = 10 + eleventh = 11 + twelveth = 12 + thirteenth = 13 + fourteenth = 14 + fifteenth = 15 + + +def func() -> None: # PLR0914 + first = 1 + second = 2 + third = 3 + fourth = 4 + fifth = 5 + sixth = 6 + seventh = 7 + eighth = 8 + ninth = 9 + tenth = 10 + eleventh = 11 + twelfth = 12 + thirteenth = 13 + fourteenth = 14 + fifteenth = 15 + sixteenth = 16 diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional.py b/crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional.py new file mode 100644 index 0000000000..3c0fe2a2f5 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional.py @@ -0,0 +1,30 @@ +def f(x, y, z, t, u, v, w, r): # Too many positional arguments (8/3) + pass + + +def f(x): # OK + pass + + +def f(x, y, z, _t, _u, _v, _w, r): # OK (underscore-prefixed names are ignored + pass + + +def f(x, y, z, *, u=1, v=1, r=1): # OK + pass + + +def f(x=1, y=1, z=1): # OK + pass + + +def f(x, y, z, /, u, v, w): # Too many positional arguments (6/3) + pass + + +def f(x, y, z, *, u, v, w): # OK + pass + + +def f(x, y, z, a, b, c, *, u, v, w): # Too many positional arguments (6/3) + pass diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional_params.py b/crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional_params.py new file mode 100644 index 0000000000..dae87b5737 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/pylint/too_many_positional_params.py @@ -0,0 +1,10 @@ +# Too many positional arguments (7/4) for max_positional=4 +# OK for dummy_variable_rgx ~ "skip_.*" +def f(w, x, y, z, skip_t, skip_u, skip_v): + pass + + +# Too many positional arguments (7/4) for max_args=4 +# Too many positional arguments (7/3) for dummy_variable_rgx ~ "skip_.*" +def f(w, x, y, z, t, u, v): + pass diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_dict_index_lookup.py b/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_dict_index_lookup.py index cfdd9fc42a..d3daeb83c9 100644 --- a/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_dict_index_lookup.py +++ b/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_dict_index_lookup.py @@ -14,16 +14,27 @@ def fix_these(): def dont_fix_these(): # once there is an assignment to the dict[index], we stop emitting diagnostics for fruit_name, fruit_count in FRUITS.items(): - FRUITS[fruit_name] = 0 # Ok - assert FRUITS[fruit_name] == 0 # Ok + FRUITS[fruit_name] = 0 # OK + assert FRUITS[fruit_name] == 0 # OK + + # once there is an assignment to the key, we stop emitting diagnostics + for fruit_name, fruit_count in FRUITS.items(): + fruit_name = 0 # OK + assert FRUITS[fruit_name] == 0 # OK + + # once there is an assignment to the value, we stop emitting diagnostics + for fruit_name, fruit_count in FRUITS.items(): + if fruit_count < 5: + fruit_count = -fruit_count + assert FRUITS[fruit_name] == 0 # OK def value_intentionally_unused(): - [FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()] # Ok - {FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # Ok - {fruit_name: FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # Ok + [FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()] # OK + {FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # OK + {fruit_name: FRUITS[fruit_name] for fruit_name, _ in FRUITS.items()} # OK for fruit_name, _ in FRUITS.items(): - print(FRUITS[fruit_name]) # Ok - blah = FRUITS[fruit_name] # Ok - assert FRUITS[fruit_name] == "pear" # Ok + print(FRUITS[fruit_name]) # OK + blah = FRUITS[fruit_name] # OK + assert FRUITS[fruit_name] == "pear" # OK diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_list_index_lookup.py b/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_list_index_lookup.py index 182e63cb7b..8911c8bd26 100644 --- a/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_list_index_lookup.py +++ b/crates/ruff_linter/resources/test/fixtures/pylint/unnecessary_list_index_lookup.py @@ -12,7 +12,7 @@ def fix_these(): print(letters[index]) # PLR1736 blah = letters[index] # PLR1736 assert letters[index] == "d" # PLR1736 - + for index, letter in builtins.enumerate(letters): print(letters[index]) # PLR1736 blah = letters[index] # PLR1736 @@ -22,38 +22,43 @@ def fix_these(): def dont_fix_these(): # once there is an assignment to the sequence[index], we stop emitting diagnostics for index, letter in enumerate(letters): - letters[index] = "d" # Ok - letters[index] += "e" # Ok - assert letters[index] == "de" # Ok - + letters[index] = "d" # OK + letters[index] += "e" # OK + assert letters[index] == "de" # OK + # once there is an assignment to the index, we stop emitting diagnostics for index, letter in enumerate(letters): - index += 1 # Ok - print(letters[index]) # Ok - + index += 1 # OK + print(letters[index]) # OK + # once there is an assignment to the sequence, we stop emitting diagnostics for index, letter in enumerate(letters): - letters = ["d", "e", "f"] # Ok - print(letters[index]) # Ok + letters = ["d", "e", "f"] # OK + print(letters[index]) # OK + + # once there is an assignment to the value, we stop emitting diagnostics + for index, letter in enumerate(letters): + letter = "d" + print(letters[index]) # OK # once there is an deletion from or of the sequence or index, we stop emitting diagnostics for index, letter in enumerate(letters): - del letters[index] # Ok - print(letters[index]) # Ok + del letters[index] # OK + print(letters[index]) # OK for index, letter in enumerate(letters): - del letters # Ok - print(letters[index]) # Ok + del letters # OK + print(letters[index]) # OK for index, letter in enumerate(letters): - del index # Ok - print(letters[index]) # Ok + del index # OK + print(letters[index]) # OK def value_intentionally_unused(): - [letters[index] for index, _ in enumerate(letters)] # Ok - {letters[index] for index, _ in enumerate(letters)} # Ok - {index: letters[index] for index, _ in enumerate(letters)} # Ok + [letters[index] for index, _ in enumerate(letters)] # OK + {letters[index] for index, _ in enumerate(letters)} # OK + {index: letters[index] for index, _ in enumerate(letters)} # OK for index, _ in enumerate(letters): - print(letters[index]) # Ok - blah = letters[index] # Ok - letters[index] = "d" # Ok + print(letters[index]) # OK + blah = letters[index] # OK + letters[index] = "d" # OK diff --git a/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP031_0.py b/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP031_0.py index 9544e11c4c..00fef079b6 100644 --- a/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP031_0.py +++ b/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP031_0.py @@ -110,3 +110,10 @@ print('Hello %(arg)s' % bar['bop']) "%s" % ( x, # comment ) + + +path = "%s-%s-%s.pem" % ( + safe_domain_name(cn), # common name, which should be filename safe because it is IDNA-encoded, but in case of a malformed cert make sure it's ok to use as a filename + cert.not_valid_after.date().isoformat().replace("-", ""), # expiration date + hexlify(cert.fingerprint(hashes.SHA256())).decode("ascii")[0:8], # fingerprint prefix +) diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB118.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB118.py new file mode 100644 index 0000000000..51c136f976 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB118.py @@ -0,0 +1,61 @@ +# Errors. +op_bitnot = lambda x: ~x +op_not = lambda x: not x +op_pos = lambda x: +x +op_neg = lambda x: -x + +op_add = lambda x, y: x + y +op_sub = lambda x, y: x - y +op_mult = lambda x, y: x * y +op_matmutl = lambda x, y: x @ y +op_truediv = lambda x, y: x / y +op_mod = lambda x, y: x % y +op_pow = lambda x, y: x ** y +op_lshift = lambda x, y: x << y +op_rshift = lambda x, y: x >> y +op_bitor = lambda x, y: x | y +op_xor = lambda x, y: x ^ y +op_bitand = lambda x, y: x & y +op_floordiv = lambda x, y: x // y + +op_eq = lambda x, y: x == y +op_ne = lambda x, y: x != y +op_lt = lambda x, y: x < y +op_lte = lambda x, y: x <= y +op_gt = lambda x, y: x > y +op_gte = lambda x, y: x >= y +op_is = lambda x, y: x is y +op_isnot = lambda x, y: x is not y +op_in = lambda x, y: y in x + + +def op_not2(x): + return not x + + +def op_add2(x, y): + return x + y + + +class Adder: + def add(x, y): + return x + y + +# OK. +op_add3 = lambda x, y = 1: x + y +op_neg2 = lambda x, y: y - x +op_notin = lambda x, y: y not in x +op_and = lambda x, y: y and x +op_or = lambda x, y: y or x +op_in = lambda x, y: x in y + + +def op_neg3(x, y): + return y - x + +def op_add4(x, y = 1): + return x + y + +def op_add5(x, y): + print("op_add5") + return x + y diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB152.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB152.py index 5e1bfbb166..e9339a86fc 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB152.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB152.py @@ -5,3 +5,11 @@ A = 3.14 * r ** 2 # FURB152 C = 6.28 * r # FURB152 e = 2.71 # FURB152 + +r = 3.15 # OK + +r = 3.141 # FURB152 + +r = 3.1415 # FURB152 + +e = 2.7 # OK diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB163.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB163.py index 1f2255be8e..52b6619bad 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB163.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB163.py @@ -16,6 +16,8 @@ special_log(1, 2) special_log(1, 10) special_log(1, math.e) special_log(1, special_e) +math.log(1, 2.0) +math.log(1, 10.0) # Ok. math.log2(1) @@ -45,3 +47,6 @@ def log(*args): log(1, 2) log(1, 10) log(1, math.e) + +math.log(1, 2.0001) +math.log(1, 10.0001) diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB181.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB181.py new file mode 100644 index 0000000000..559f67ee30 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB181.py @@ -0,0 +1,57 @@ +import hashlib +from hashlib import ( + blake2b, + blake2s, + md5, + sha1, + sha3_224, + sha3_256, + sha3_384, + sha3_512, + sha224, +) +from hashlib import sha256 +from hashlib import sha256 as hash_algo +from hashlib import sha384, sha512, shake_128, shake_256 + +# these will match + +blake2b().digest().hex() +blake2s().digest().hex() +md5().digest().hex() +sha1().digest().hex() +sha224().digest().hex() +sha256().digest().hex() +sha384().digest().hex() +sha3_224().digest().hex() +sha3_256().digest().hex() +sha3_384().digest().hex() +sha3_512().digest().hex() +sha512().digest().hex() +shake_128().digest(10).hex() +shake_256().digest(10).hex() + +hashlib.sha256().digest().hex() + +sha256(b"text").digest().hex() + +hash_algo().digest().hex() + +# not yet supported +h = sha256() +h.digest().hex() + + +# these will not + +sha256().digest() +sha256().digest().hex("_") +sha256().digest().hex(bytes_per_sep=4) +sha256().hexdigest() + +class Hash: + def digest(self) -> bytes: + return b"" + + +Hash().digest().hex() diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF006.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF006.py index eedce25631..f11ecd5400 100644 --- a/crates/ruff_linter/resources/test/fixtures/ruff/RUF006.py +++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF006.py @@ -63,11 +63,29 @@ def f(): tasks = [asyncio.create_task(task) for task in tasks] -# OK (false negative) +# Error def f(): task = asyncio.create_task(coordinator.ws_connect()) +# Error +def f(): + loop = asyncio.get_running_loop() + task: asyncio.Task = loop.create_task(coordinator.ws_connect()) + + +# OK (potential false negative) +def f(): + task = asyncio.create_task(coordinator.ws_connect()) + background_tasks.add(task) + + +# OK +async def f(): + task = asyncio.create_task(coordinator.ws_connect()) + await task + + # OK (potential false negative) def f(): do_nothing_with_the_task(asyncio.create_task(coordinator.ws_connect())) @@ -88,3 +106,49 @@ def f(): def f(): loop = asyncio.get_running_loop() loop.do_thing(coordinator.ws_connect()) + + +# OK +async def f(): + task = unused = asyncio.create_task(coordinator.ws_connect()) + await task + + +# OK (false negative) +async def f(): + task = unused = asyncio.create_task(coordinator.ws_connect()) + + +# OK +async def f(): + task[i] = asyncio.create_task(coordinator.ws_connect()) + + +# OK +async def f(x: int): + if x > 0: + task = asyncio.create_task(make_request()) + else: + task = asyncio.create_task(make_request()) + await task + + +# OK +async def f(x: bool): + if x: + t = asyncio.create_task(asyncio.sleep(1)) + else: + t = None + try: + await asyncio.sleep(1) + finally: + if t: + await t + + +# Error +async def f(x: bool): + if x: + t = asyncio.create_task(asyncio.sleep(1)) + else: + t = None diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF012.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF012.py index aa9d54d8b7..c1e84fb080 100644 --- a/crates/ruff_linter/resources/test/fixtures/ruff/RUF012.py +++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF012.py @@ -59,3 +59,11 @@ class F(BaseSettings): without_annotation = [] class_variable: ClassVar[list[int]] = [] final_variable: Final[list[int]] = [] + + +class G(F): + mutable_default: list[int] = [] + immutable_annotation: Sequence[int] = [] + without_annotation = [] + class_variable: ClassVar[list[int]] = [] + final_variable: Final[list[int]] = [] diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF100_3.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF100_3.py index 64f9cd12c4..17e6367ad0 100644 --- a/crates/ruff_linter/resources/test/fixtures/ruff/RUF100_3.py +++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF100_3.py @@ -23,3 +23,6 @@ print(a) # noqa: E501, F821 # comment print(a) # noqa: E501, F821 # comment print(a) # noqa: E501, F821 comment print(a) # noqa: E501, F821 comment + +print(a) # comment with unicode µ # noqa: E501 +print(a) # comment with unicode µ # noqa: E501, F821 diff --git a/crates/ruff_linter/src/checkers/ast/analyze/deferred_lambdas.rs b/crates/ruff_linter/src/checkers/ast/analyze/deferred_lambdas.rs index b3aa849f8f..f331ef113e 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/deferred_lambdas.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/deferred_lambdas.rs @@ -2,7 +2,7 @@ use ruff_python_ast::Expr; use crate::checkers::ast::Checker; use crate::codes::Rule; -use crate::rules::{flake8_pie, pylint}; +use crate::rules::{flake8_pie, pylint, refurb}; /// Run lint rules over all deferred lambdas in the [`SemanticModel`]. pub(crate) fn deferred_lambdas(checker: &mut Checker) { @@ -21,6 +21,9 @@ pub(crate) fn deferred_lambdas(checker: &mut Checker) { if checker.enabled(Rule::ReimplementedContainerBuiltin) { flake8_pie::rules::reimplemented_container_builtin(checker, lambda); } + if checker.enabled(Rule::ReimplementedOperator) { + refurb::rules::reimplemented_operator(checker, &lambda.into()); + } } } } diff --git a/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs b/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs index 27c9a6b7c7..fad4926774 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/deferred_scopes.rs @@ -5,16 +5,21 @@ use ruff_text_size::Ranged; use crate::checkers::ast::Checker; use crate::codes::Rule; -use crate::rules::{flake8_pyi, flake8_type_checking, flake8_unused_arguments, pyflakes, pylint}; +use crate::rules::{ + flake8_pyi, flake8_type_checking, flake8_unused_arguments, pyflakes, pylint, ruff, +}; /// Run lint rules over all deferred scopes in the [`SemanticModel`]. pub(crate) fn deferred_scopes(checker: &mut Checker) { if !checker.any_enabled(&[ + Rule::AsyncioDanglingTask, Rule::GlobalVariableNotAssigned, Rule::ImportShadowedByLoopVar, + Rule::NoSelfUse, Rule::RedefinedArgumentFromLocal, Rule::RedefinedWhileUnused, Rule::RuntimeImportInTypeCheckingBlock, + Rule::TooManyLocals, Rule::TypingOnlyFirstPartyImport, Rule::TypingOnlyStandardLibraryImport, Rule::TypingOnlyThirdPartyImport, @@ -31,7 +36,6 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { Rule::UnusedPrivateTypedDict, Rule::UnusedStaticMethodArgument, Rule::UnusedVariable, - Rule::NoSelfUse, ]) { return; } @@ -59,6 +63,7 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { flake8_type_checking::helpers::is_valid_runtime_import( binding, &checker.semantic, + &checker.settings.flake8_type_checking, ) }) .collect() @@ -268,6 +273,10 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { flake8_pyi::rules::unused_private_typed_dict(checker, scope, &mut diagnostics); } + if checker.enabled(Rule::AsyncioDanglingTask) { + ruff::rules::asyncio_dangling_binding(scope, &checker.semantic, &mut diagnostics); + } + if matches!(scope.kind, ScopeKind::Function(_) | ScopeKind::Lambda(_)) { if checker.enabled(Rule::UnusedVariable) { pyflakes::rules::unused_variable(checker, scope, &mut diagnostics); @@ -335,6 +344,10 @@ pub(crate) fn deferred_scopes(checker: &mut Checker) { if checker.enabled(Rule::NoSelfUse) { pylint::rules::no_self_use(checker, scope_id, scope, &mut diagnostics); } + + if checker.enabled(Rule::TooManyLocals) { + pylint::rules::too_many_locals(checker, scope, &mut diagnostics); + } } } checker.diagnostics.extend(diagnostics); diff --git a/crates/ruff_linter/src/checkers/ast/analyze/expression.rs b/crates/ruff_linter/src/checkers/ast/analyze/expression.rs index 87a354b964..c8b602939d 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/expression.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/expression.rs @@ -15,8 +15,9 @@ use crate::rules::{ flake8_comprehensions, flake8_datetimez, flake8_debugger, flake8_django, flake8_future_annotations, flake8_gettext, flake8_implicit_str_concat, flake8_logging, flake8_logging_format, flake8_pie, flake8_print, flake8_pyi, flake8_pytest_style, flake8_self, - flake8_simplify, flake8_tidy_imports, flake8_trio, flake8_use_pathlib, flynt, numpy, - pandas_vet, pep8_naming, pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade, refurb, ruff, + flake8_simplify, flake8_tidy_imports, flake8_trio, flake8_type_checking, flake8_use_pathlib, + flynt, numpy, pandas_vet, pep8_naming, pycodestyle, pyflakes, pygrep_hooks, pylint, pyupgrade, + refurb, ruff, }; use crate::settings::types::PythonVersion; @@ -356,6 +357,8 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { Rule::FString, // flynt Rule::StaticJoinToFString, + // refurb + Rule::HashlibDigestHex, ]) { if let Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = func.as_ref() { let attr = attr.as_str(); @@ -543,7 +546,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { flake8_bugbear::rules::no_explicit_stacklevel(checker, call); } if checker.enabled(Rule::UnnecessaryDictKwargs) { - flake8_pie::rules::unnecessary_dict_kwargs(checker, expr, keywords); + flake8_pie::rules::unnecessary_dict_kwargs(checker, call); } if checker.enabled(Rule::UnnecessaryRangeStart) { flake8_pie::rules::unnecessary_range_start(checker, call); @@ -581,6 +584,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if checker.enabled(Rule::HashlibInsecureHashFunction) { flake8_bandit::rules::hashlib_insecure_hash_functions(checker, call); } + if checker.enabled(Rule::HashlibDigestHex) { + refurb::rules::hashlib_digest_hex(checker, call); + } if checker.enabled(Rule::RequestWithoutTimeout) { flake8_bandit::rules::request_without_timeout(checker, call); } @@ -1165,6 +1171,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { if checker.enabled(Rule::UnnecessaryTypeUnion) { flake8_pyi::rules::unnecessary_type_union(checker, expr); } + if checker.enabled(Rule::RuntimeStringUnion) { + flake8_type_checking::rules::runtime_string_union(checker, expr); + } } } Expr::UnaryOp( @@ -1270,32 +1279,12 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) { refurb::rules::math_constant(checker, number_literal); } } - Expr::BytesLiteral(_) => { - if checker.source_type.is_stub() && checker.enabled(Rule::StringOrBytesTooLong) { - flake8_pyi::rules::string_or_bytes_too_long(checker, expr); - } - } - Expr::StringLiteral(string) => { - if checker.enabled(Rule::HardcodedBindAllInterfaces) { - if let Some(diagnostic) = - flake8_bandit::rules::hardcoded_bind_all_interfaces(string) - { - checker.diagnostics.push(diagnostic); - } - } - if checker.enabled(Rule::HardcodedTempFile) { - flake8_bandit::rules::hardcoded_tmp_directory(checker, string); - } + Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => { if checker.enabled(Rule::UnicodeKindPrefix) { - for string_part in string.value.parts() { + for string_part in value { pyupgrade::rules::unicode_kind_prefix(checker, string_part); } } - if checker.source_type.is_stub() { - if checker.enabled(Rule::StringOrBytesTooLong) { - flake8_pyi::rules::string_or_bytes_too_long(checker, expr); - } - } } Expr::IfExp( if_exp @ ast::ExprIfExp { diff --git a/crates/ruff_linter/src/checkers/ast/analyze/mod.rs b/crates/ruff_linter/src/checkers/ast/analyze/mod.rs index dd9ec2fbfd..deeb55864b 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/mod.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/mod.rs @@ -10,6 +10,7 @@ pub(super) use module::module; pub(super) use parameter::parameter; pub(super) use parameters::parameters; pub(super) use statement::statement; +pub(super) use string_like::string_like; pub(super) use suite::suite; pub(super) use unresolved_references::unresolved_references; @@ -25,5 +26,6 @@ mod module; mod parameter; mod parameters; mod statement; +mod string_like; mod suite; mod unresolved_references; diff --git a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs index 99ea30595f..c803562bd5 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs @@ -250,6 +250,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.enabled(Rule::TooManyArguments) { pylint::rules::too_many_arguments(checker, function_def); } + if checker.enabled(Rule::TooManyPositional) { + pylint::rules::too_many_positional(checker, function_def); + } if checker.enabled(Rule::TooManyReturnStatements) { if let Some(diagnostic) = pylint::rules::too_many_return_statements( stmt, @@ -365,6 +368,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { .diagnostics .extend(ruff::rules::unreachable::in_function(name, body)); } + if checker.enabled(Rule::ReimplementedOperator) { + refurb::rules::reimplemented_operator(checker, &function_def.into()); + } } Stmt::Return(_) => { if checker.enabled(Rule::ReturnOutsideFunction) { @@ -394,27 +400,13 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { flake8_django::rules::nullable_model_string_field(checker, body); } if checker.enabled(Rule::DjangoExcludeWithModelForm) { - if let Some(diagnostic) = flake8_django::rules::exclude_with_model_form( - checker, - arguments.as_deref(), - body, - ) { - checker.diagnostics.push(diagnostic); - } + flake8_django::rules::exclude_with_model_form(checker, class_def); } if checker.enabled(Rule::DjangoAllWithModelForm) { - if let Some(diagnostic) = - flake8_django::rules::all_with_model_form(checker, arguments.as_deref(), body) - { - checker.diagnostics.push(diagnostic); - } + flake8_django::rules::all_with_model_form(checker, class_def); } if checker.enabled(Rule::DjangoUnorderedBodyContentInModel) { - flake8_django::rules::unordered_body_content_in_model( - checker, - arguments.as_deref(), - body, - ); + flake8_django::rules::unordered_body_content_in_model(checker, class_def); } if !checker.source_type.is_stub() { if checker.enabled(Rule::DjangoModelWithoutDunderStr) { @@ -1534,6 +1526,14 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { } } } + Stmt::TypeAlias(ast::StmtTypeAlias { name, .. }) => { + if checker.enabled(Rule::SnakeCaseTypeAlias) { + flake8_pyi::rules::snake_case_type_alias(checker, name); + } + if checker.enabled(Rule::TSuffixedTypeAlias) { + flake8_pyi::rules::t_suffixed_type_alias(checker, name); + } + } Stmt::Delete(delete @ ast::StmtDelete { targets, range: _ }) => { if checker.enabled(Rule::GlobalStatement) { for target in targets { @@ -1560,7 +1560,11 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { pylint::rules::named_expr_without_context(checker, value); } if checker.enabled(Rule::AsyncioDanglingTask) { - ruff::rules::asyncio_dangling_task(checker, value); + if let Some(diagnostic) = + ruff::rules::asyncio_dangling_task(value, checker.semantic()) + { + checker.diagnostics.push(diagnostic); + } } if checker.enabled(Rule::RepeatedAppend) { refurb::rules::repeated_append(checker, stmt); diff --git a/crates/ruff_linter/src/checkers/ast/analyze/string_like.rs b/crates/ruff_linter/src/checkers/ast/analyze/string_like.rs new file mode 100644 index 0000000000..c3c8fb3367 --- /dev/null +++ b/crates/ruff_linter/src/checkers/ast/analyze/string_like.rs @@ -0,0 +1,20 @@ +use ruff_python_ast::StringLike; + +use crate::checkers::ast::Checker; +use crate::codes::Rule; +use crate::rules::{flake8_bandit, flake8_pyi}; + +/// Run lint rules over a [`StringLike`] syntax nodes. +pub(crate) fn string_like(string_like: StringLike, checker: &mut Checker) { + if checker.enabled(Rule::HardcodedBindAllInterfaces) { + flake8_bandit::rules::hardcoded_bind_all_interfaces(checker, string_like); + } + if checker.enabled(Rule::HardcodedTempFile) { + flake8_bandit::rules::hardcoded_tmp_directory(checker, string_like); + } + if checker.source_type.is_stub() { + if checker.enabled(Rule::StringOrBytesTooLong) { + flake8_pyi::rules::string_or_bytes_too_long(checker, string_like); + } + } +} diff --git a/crates/ruff_linter/src/checkers/ast/annotation.rs b/crates/ruff_linter/src/checkers/ast/annotation.rs new file mode 100644 index 0000000000..aca5fc6c62 --- /dev/null +++ b/crates/ruff_linter/src/checkers/ast/annotation.rs @@ -0,0 +1,66 @@ +use ruff_python_semantic::{ScopeKind, SemanticModel}; + +use crate::rules::flake8_type_checking; +use crate::settings::LinterSettings; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum AnnotationContext { + /// Python will evaluate the annotation at runtime, but it's not _required_ and, as such, could + /// be quoted to convert it into a typing-only annotation. + /// + /// For example: + /// ```python + /// from pandas import DataFrame + /// + /// def foo() -> DataFrame: + /// ... + /// ``` + /// + /// Above, Python will evaluate `DataFrame` at runtime in order to add it to `__annotations__`. + RuntimeEvaluated, + /// Python will evaluate the annotation at runtime, and it's required to be available at + /// runtime, as a library (like Pydantic) needs access to it. + RuntimeRequired, + /// The annotation is only evaluated at type-checking time. + TypingOnly, +} + +impl AnnotationContext { + pub(super) fn from_model(semantic: &SemanticModel, settings: &LinterSettings) -> Self { + // If the annotation is in a class scope (e.g., an annotated assignment for a + // class field), and that class is marked as annotation as runtime-required. + if semantic + .current_scope() + .kind + .as_class() + .is_some_and(|class_def| { + flake8_type_checking::helpers::runtime_required_class( + class_def, + &settings.flake8_type_checking.runtime_required_base_classes, + &settings.flake8_type_checking.runtime_required_decorators, + semantic, + ) + }) + { + return Self::RuntimeRequired; + } + + // If `__future__` annotations are enabled, then annotations are never evaluated + // at runtime, so we can treat them as typing-only. + if semantic.future_annotations() { + return Self::TypingOnly; + } + + // Otherwise, if we're in a class or module scope, then the annotation needs to + // be available at runtime. + // See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements + if matches!( + semantic.current_scope().kind, + ScopeKind::Class(_) | ScopeKind::Module + ) { + return Self::RuntimeEvaluated; + } + + Self::TypingOnly + } +} diff --git a/crates/ruff_linter/src/checkers/ast/mod.rs b/crates/ruff_linter/src/checkers/ast/mod.rs index 11e8e704aa..2cc13787d7 100644 --- a/crates/ruff_linter/src/checkers/ast/mod.rs +++ b/crates/ruff_linter/src/checkers/ast/mod.rs @@ -44,12 +44,12 @@ use ruff_python_ast::helpers::{ }; use ruff_python_ast::identifier::Identifier; use ruff_python_ast::str::trailing_quote; -use ruff_python_ast::visitor::{walk_except_handler, walk_pattern, Visitor}; +use ruff_python_ast::visitor::{walk_except_handler, walk_f_string_element, walk_pattern, Visitor}; use ruff_python_ast::{helpers, str, visitor, PySourceType}; use ruff_python_codegen::{Generator, Quote, Stylist}; use ruff_python_index::Indexer; use ruff_python_parser::typing::{parse_type_annotation, AnnotationKind}; -use ruff_python_semantic::analyze::{typing, visibility}; +use ruff_python_semantic::analyze::{imports, typing, visibility}; use ruff_python_semantic::{ BindingFlags, BindingId, BindingKind, Exceptions, Export, FromImport, Globals, Import, Module, ModuleKind, NodeId, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, Snapshot, @@ -58,6 +58,7 @@ use ruff_python_semantic::{ use ruff_python_stdlib::builtins::{IPYTHON_BUILTINS, MAGIC_GLOBALS, PYTHON_BUILTINS}; use ruff_source_file::Locator; +use crate::checkers::ast::annotation::AnnotationContext; use crate::checkers::ast::deferred::Deferred; use crate::docstrings::extraction::ExtractionTarget; use crate::importer::Importer; @@ -68,6 +69,7 @@ use crate::settings::{flags, LinterSettings}; use crate::{docstrings, noqa}; mod analyze; +mod annotation; mod deferred; pub(crate) struct Checker<'a> { @@ -303,9 +305,12 @@ where } _ => { self.semantic.flags |= SemanticModelFlags::FUTURES_BOUNDARY; - if !self.semantic.seen_import_boundary() - && !helpers::is_assignment_to_a_dunder(stmt) - && !helpers::in_nested_block(self.semantic.current_statements()) + if !(self.semantic.seen_import_boundary() + || helpers::is_assignment_to_a_dunder(stmt) + || helpers::in_nested_block(self.semantic.current_statements()) + || imports::is_matplotlib_activation(stmt, self.semantic()) + || self.settings.preview.is_enabled() + && imports::is_sys_path_modification(stmt, self.semantic())) { self.semantic.flags |= SemanticModelFlags::IMPORT_BOUNDARY; } @@ -512,8 +517,10 @@ where .chain(¶meters.kwonlyargs) { if let Some(expr) = ¶meter_with_default.parameter.annotation { - if runtime_annotation || singledispatch { - self.visit_runtime_annotation(expr); + if singledispatch { + self.visit_runtime_required_annotation(expr); + } else if runtime_annotation { + self.visit_runtime_evaluated_annotation(expr); } else { self.visit_annotation(expr); }; @@ -526,7 +533,7 @@ where if let Some(arg) = ¶meters.vararg { if let Some(expr) = &arg.annotation { if runtime_annotation { - self.visit_runtime_annotation(expr); + self.visit_runtime_evaluated_annotation(expr); } else { self.visit_annotation(expr); }; @@ -535,7 +542,7 @@ where if let Some(arg) = ¶meters.kwarg { if let Some(expr) = &arg.annotation { if runtime_annotation { - self.visit_runtime_annotation(expr); + self.visit_runtime_evaluated_annotation(expr); } else { self.visit_annotation(expr); }; @@ -543,7 +550,7 @@ where } for expr in returns { if runtime_annotation { - self.visit_runtime_annotation(expr); + self.visit_runtime_evaluated_annotation(expr); } else { self.visit_annotation(expr); }; @@ -674,40 +681,16 @@ where value, .. }) => { - // If we're in a class or module scope, then the annotation needs to be - // available at runtime. - // See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements - let runtime_annotation = if self.semantic.future_annotations() { - self.semantic - .current_scope() - .kind - .as_class() - .is_some_and(|class_def| { - flake8_type_checking::helpers::runtime_evaluated_class( - class_def, - &self - .settings - .flake8_type_checking - .runtime_evaluated_base_classes, - &self - .settings - .flake8_type_checking - .runtime_evaluated_decorators, - &self.semantic, - ) - }) - } else { - matches!( - self.semantic.current_scope().kind, - ScopeKind::Class(_) | ScopeKind::Module - ) - }; - - if runtime_annotation { - self.visit_runtime_annotation(annotation); - } else { - self.visit_annotation(annotation); + match AnnotationContext::from_model(&self.semantic, self.settings) { + AnnotationContext::RuntimeRequired => { + self.visit_runtime_required_annotation(annotation); + } + AnnotationContext::RuntimeEvaluated => { + self.visit_runtime_evaluated_annotation(annotation); + } + AnnotationContext::TypingOnly => self.visit_annotation(annotation), } + if let Some(expr) = value { if self.semantic.match_typing_expr(annotation, "TypeAlias") { self.visit_type_definition(expr); @@ -815,8 +798,7 @@ where fn visit_expr(&mut self, expr: &'b Expr) { // Step 0: Pre-processing - if !self.semantic.in_f_string() - && !self.semantic.in_literal() + if !self.semantic.in_typing_literal() && !self.semantic.in_deferred_type_definition() && self.semantic.in_type_definition() && self.semantic.future_annotations() @@ -1198,7 +1180,7 @@ where ) { // Ex) Literal["Class"] Some(typing::SubscriptKind::Literal) => { - self.semantic.flags |= SemanticModelFlags::LITERAL; + self.semantic.flags |= SemanticModelFlags::TYPING_LITERAL; self.visit_expr(slice); self.visit_expr_context(ctx); @@ -1238,10 +1220,7 @@ where } } Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => { - if self.semantic.in_type_definition() - && !self.semantic.in_literal() - && !self.semantic.in_f_string() - { + if self.semantic.in_type_definition() && !self.semantic.in_typing_literal() { self.deferred.string_type_definitions.push(( expr.range(), value.to_str(), @@ -1271,6 +1250,13 @@ where // Step 4: Analysis analyze::expression(expr, self); + match expr { + Expr::StringLiteral(string_literal) => { + analyze::string_like(string_literal.into(), self); + } + Expr::BytesLiteral(bytes_literal) => analyze::string_like(bytes_literal.into(), self), + _ => {} + } self.semantic.flags = flags_snapshot; self.semantic.pop_node(); @@ -1326,17 +1312,6 @@ where self.semantic.flags = flags_snapshot; } - fn visit_format_spec(&mut self, format_spec: &'b Expr) { - match format_spec { - Expr::FString(ast::ExprFString { value, .. }) => { - for expr in value.elements() { - self.visit_expr(expr); - } - } - _ => unreachable!("Unexpected expression for format_spec"), - } - } - fn visit_parameters(&mut self, parameters: &'b Parameters) { // Step 1: Binding. // Bind, but intentionally avoid walking default expressions, as we handle them @@ -1446,6 +1421,16 @@ where .push((bound, self.semantic.snapshot())); } } + + fn visit_f_string_element(&mut self, f_string_element: &'b ast::FStringElement) { + // Step 2: Traversal + walk_f_string_element(self, f_string_element); + + // Step 4: Analysis + if let Some(literal) = f_string_element.as_literal() { + analyze::string_like(literal.into(), self); + } + } } impl<'a> Checker<'a> { @@ -1522,10 +1507,18 @@ impl<'a> Checker<'a> { self.semantic.flags = snapshot; } - /// Visit an [`Expr`], and treat it as a runtime-required type annotation. - fn visit_runtime_annotation(&mut self, expr: &'a Expr) { + /// Visit an [`Expr`], and treat it as a runtime-evaluated type annotation. + fn visit_runtime_evaluated_annotation(&mut self, expr: &'a Expr) { let snapshot = self.semantic.flags; - self.semantic.flags |= SemanticModelFlags::RUNTIME_ANNOTATION; + self.semantic.flags |= SemanticModelFlags::RUNTIME_EVALUATED_ANNOTATION; + self.visit_type_definition(expr); + self.semantic.flags = snapshot; + } + + /// Visit an [`Expr`], and treat it as a runtime-required type annotation. + fn visit_runtime_required_annotation(&mut self, expr: &'a Expr) { + let snapshot = self.semantic.flags; + self.semantic.flags |= SemanticModelFlags::RUNTIME_REQUIRED_ANNOTATION; self.visit_type_definition(expr); self.semantic.flags = snapshot; } @@ -2020,13 +2013,15 @@ pub(crate) fn check_ast( // Iterate over the AST. checker.visit_body(python_ast); - // Visit any deferred syntax nodes. + // Visit any deferred syntax nodes. Take care to visit in order, such that we avoid adding + // new deferred nodes after visiting nodes of that kind. For example, visiting a deferred + // function can add a deferred lambda, but the opposite is not true. checker.visit_deferred_functions(); - checker.visit_deferred_lambdas(); - checker.visit_deferred_future_type_definitions(); checker.visit_deferred_type_param_definitions(); + checker.visit_deferred_future_type_definitions(); let allocator = typed_arena::Arena::new(); checker.visit_deferred_string_type_definitions(&allocator); + checker.visit_deferred_lambdas(); checker.visit_exports(); // Check docstrings, bindings, and unresolved references. diff --git a/crates/ruff_linter/src/checkers/noqa.rs b/crates/ruff_linter/src/checkers/noqa.rs index 7b4224c4e1..055f802ccc 100644 --- a/crates/ruff_linter/src/checkers/noqa.rs +++ b/crates/ruff_linter/src/checkers/noqa.rs @@ -3,10 +3,10 @@ use std::path::Path; use itertools::Itertools; -use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; +use ruff_text_size::{Ranged, TextLen, TextRange}; use ruff_diagnostics::{Diagnostic, Edit, Fix}; -use ruff_python_trivia::CommentRanges; +use ruff_python_trivia::{CommentRanges, PythonWhitespace}; use ruff_source_file::Locator; use crate::noqa; @@ -200,17 +200,11 @@ fn delete_noqa(range: TextRange, locator: &Locator) -> Edit { // Compute the leading space. let prefix = locator.slice(TextRange::new(line_range.start(), range.start())); - let leading_space = prefix - .rfind(|c: char| !c.is_whitespace()) - .map_or(prefix.len(), |i| prefix.len() - i - 1); - let leading_space_len = TextSize::try_from(leading_space).unwrap(); + let leading_space_len = prefix.text_len() - prefix.trim_whitespace_end().text_len(); // Compute the trailing space. let suffix = locator.slice(TextRange::new(range.end(), line_range.end())); - let trailing_space = suffix - .find(|c: char| !c.is_whitespace()) - .map_or(suffix.len(), |i| i); - let trailing_space_len = TextSize::try_from(trailing_space).unwrap(); + let trailing_space_len = suffix.text_len() - suffix.trim_whitespace_start().text_len(); // Ex) `# noqa` if line_range diff --git a/crates/ruff_linter/src/codes.rs b/crates/ruff_linter/src/codes.rs index cb1f907b65..158d60d4a3 100644 --- a/crates/ruff_linter/src/codes.rs +++ b/crates/ruff_linter/src/codes.rs @@ -252,8 +252,10 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> { (Pylint, "R0911") => (RuleGroup::Stable, rules::pylint::rules::TooManyReturnStatements), (Pylint, "R0912") => (RuleGroup::Stable, rules::pylint::rules::TooManyBranches), (Pylint, "R0913") => (RuleGroup::Stable, rules::pylint::rules::TooManyArguments), + (Pylint, "R0914") => (RuleGroup::Preview, rules::pylint::rules::TooManyLocals), (Pylint, "R0915") => (RuleGroup::Stable, rules::pylint::rules::TooManyStatements), (Pylint, "R0916") => (RuleGroup::Preview, rules::pylint::rules::TooManyBooleanExpressions), + (Pylint, "R0917") => (RuleGroup::Preview, rules::pylint::rules::TooManyPositional), (Pylint, "R1701") => (RuleGroup::Stable, rules::pylint::rules::RepeatedIsinstanceCalls), (Pylint, "R1704") => (RuleGroup::Preview, rules::pylint::rules::RedefinedArgumentFromLocal), (Pylint, "R1711") => (RuleGroup::Stable, rules::pylint::rules::UselessReturn), @@ -806,6 +808,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> { (Flake8TypeChecking, "003") => (RuleGroup::Stable, rules::flake8_type_checking::rules::TypingOnlyStandardLibraryImport), (Flake8TypeChecking, "004") => (RuleGroup::Stable, rules::flake8_type_checking::rules::RuntimeImportInTypeCheckingBlock), (Flake8TypeChecking, "005") => (RuleGroup::Stable, rules::flake8_type_checking::rules::EmptyTypeCheckingBlock), + (Flake8TypeChecking, "006") => (RuleGroup::Preview, rules::flake8_type_checking::rules::RuntimeStringUnion), // tryceratops (Tryceratops, "002") => (RuleGroup::Stable, rules::tryceratops::rules::RaiseVanillaClass), @@ -950,6 +953,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> { (Refurb, "105") => (RuleGroup::Preview, rules::refurb::rules::PrintEmptyString), #[allow(deprecated)] (Refurb, "113") => (RuleGroup::Nursery, rules::refurb::rules::RepeatedAppend), + (Refurb, "118") => (RuleGroup::Preview, rules::refurb::rules::ReimplementedOperator), #[allow(deprecated)] (Refurb, "131") => (RuleGroup::Nursery, rules::refurb::rules::DeleteFullSlice), #[allow(deprecated)] @@ -964,6 +968,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> { (Refurb, "169") => (RuleGroup::Preview, rules::refurb::rules::TypeNoneComparison), (Refurb, "171") => (RuleGroup::Preview, rules::refurb::rules::SingleItemMembershipTest), (Refurb, "177") => (RuleGroup::Preview, rules::refurb::rules::ImplicitCwd), + (Refurb, "181") => (RuleGroup::Preview, rules::refurb::rules::HashlibDigestHex), // flake8-logging (Flake8Logging, "001") => (RuleGroup::Preview, rules::flake8_logging::rules::DirectLoggerInstantiation), diff --git a/crates/ruff_linter/src/importer/mod.rs b/crates/ruff_linter/src/importer/mod.rs index cb1c4b1ea6..6dce7a35a1 100644 --- a/crates/ruff_linter/src/importer/mod.rs +++ b/crates/ruff_linter/src/importer/mod.rs @@ -13,7 +13,7 @@ use ruff_text_size::{Ranged, TextSize}; use ruff_diagnostics::Edit; use ruff_python_ast::imports::{AnyImport, Import, ImportFrom}; use ruff_python_codegen::Stylist; -use ruff_python_semantic::SemanticModel; +use ruff_python_semantic::{ImportedName, SemanticModel}; use ruff_python_trivia::textwrap::indent; use ruff_source_file::Locator; @@ -132,7 +132,48 @@ impl<'a> Importer<'a> { )?; // Import the `TYPE_CHECKING` symbol from the typing module. - let (type_checking_edit, type_checking) = self.get_or_import_type_checking(at, semantic)?; + let (type_checking_edit, type_checking) = + if let Some(type_checking) = Self::find_type_checking(at, semantic)? { + // Special-case: if the `TYPE_CHECKING` symbol is imported as part of the same + // statement that we're modifying, avoid adding a no-op edit. For example, here, + // the `TYPE_CHECKING` no-op edit would overlap with the edit to remove `Final` + // from the import: + // ```python + // from __future__ import annotations + // + // from typing import Final, TYPE_CHECKING + // + // Const: Final[dict] = {} + // ``` + let edit = if type_checking.statement(semantic) == import.statement { + None + } else { + Some(Edit::range_replacement( + self.locator.slice(type_checking.range()).to_string(), + type_checking.range(), + )) + }; + (edit, type_checking.into_name()) + } else { + // Special-case: if the `TYPE_CHECKING` symbol would be added to the same import + // we're modifying, import it as a separate import statement. For example, here, + // we're concurrently removing `Final` and adding `TYPE_CHECKING`, so it's easier to + // use a separate import statement: + // ```python + // from __future__ import annotations + // + // from typing import Final + // + // Const: Final[dict] = {} + // ``` + let (edit, name) = self.import_symbol( + &ImportRequest::import_from("typing", "TYPE_CHECKING"), + at, + Some(import.statement), + semantic, + )?; + (Some(edit), name) + }; // Add the import to a `TYPE_CHECKING` block. let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) { @@ -157,28 +198,21 @@ impl<'a> Importer<'a> { }) } - /// Generate an [`Edit`] to reference `typing.TYPE_CHECKING`. Returns the [`Edit`] necessary to - /// make the symbol available in the current scope along with the bound name of the symbol. - fn get_or_import_type_checking( - &self, + /// Find a reference to `typing.TYPE_CHECKING`. + fn find_type_checking( at: TextSize, semantic: &SemanticModel, - ) -> Result<(Edit, String), ResolutionError> { + ) -> Result, ResolutionError> { for module in semantic.typing_modules() { - if let Some((edit, name)) = self.get_symbol( + if let Some(imported_name) = Self::find_symbol( &ImportRequest::import_from(module, "TYPE_CHECKING"), at, semantic, )? { - return Ok((edit, name)); + return Ok(Some(imported_name)); } } - - self.import_symbol( - &ImportRequest::import_from("typing", "TYPE_CHECKING"), - at, - semantic, - ) + Ok(None) } /// Generate an [`Edit`] to reference the given symbol. Returns the [`Edit`] necessary to make @@ -192,16 +226,15 @@ impl<'a> Importer<'a> { semantic: &SemanticModel, ) -> Result<(Edit, String), ResolutionError> { self.get_symbol(symbol, at, semantic)? - .map_or_else(|| self.import_symbol(symbol, at, semantic), Ok) + .map_or_else(|| self.import_symbol(symbol, at, None, semantic), Ok) } - /// Return an [`Edit`] to reference an existing symbol, if it's present in the given [`SemanticModel`]. - fn get_symbol( - &self, + /// Return the [`ImportedName`] to for existing symbol, if it's present in the given [`SemanticModel`]. + fn find_symbol( symbol: &ImportRequest, at: TextSize, semantic: &SemanticModel, - ) -> Result, ResolutionError> { + ) -> Result, ResolutionError> { // If the symbol is already available in the current scope, use it. let Some(imported_name) = semantic.resolve_qualified_import_name(symbol.module, symbol.member) @@ -226,6 +259,21 @@ impl<'a> Importer<'a> { return Err(ResolutionError::IncompatibleContext); } + Ok(Some(imported_name)) + } + + /// Return an [`Edit`] to reference an existing symbol, if it's present in the given [`SemanticModel`]. + fn get_symbol( + &self, + symbol: &ImportRequest, + at: TextSize, + semantic: &SemanticModel, + ) -> Result, ResolutionError> { + // Find the symbol in the current scope. + let Some(imported_name) = Self::find_symbol(symbol, at, semantic)? else { + return Ok(None); + }; + // We also add a no-op edit to force conflicts with any other fixes that might try to // remove the import. Consider: // @@ -259,9 +307,13 @@ impl<'a> Importer<'a> { &self, symbol: &ImportRequest, at: TextSize, + except: Option<&Stmt>, semantic: &SemanticModel, ) -> Result<(Edit, String), ResolutionError> { - if let Some(stmt) = self.find_import_from(symbol.module, at) { + if let Some(stmt) = self + .find_import_from(symbol.module, at) + .filter(|stmt| except != Some(stmt)) + { // Case 1: `from functools import lru_cache` is in scope, and we're trying to reference // `functools.cache`; thus, we add `cache` to the import, and return `"cache"` as the // bound name. @@ -423,14 +475,18 @@ impl RuntimeImportEdit { #[derive(Debug)] pub(crate) struct TypingImportEdit { /// The edit to add the `TYPE_CHECKING` symbol to the module. - type_checking_edit: Edit, + type_checking_edit: Option, /// The edit to add the import to a `TYPE_CHECKING` block. add_import_edit: Edit, } impl TypingImportEdit { - pub(crate) fn into_edits(self) -> Vec { - vec![self.type_checking_edit, self.add_import_edit] + pub(crate) fn into_edits(self) -> (Edit, Option) { + if let Some(type_checking_edit) = self.type_checking_edit { + (type_checking_edit, Some(self.add_import_edit)) + } else { + (self.add_import_edit, None) + } } } diff --git a/crates/ruff_linter/src/message/mod.rs b/crates/ruff_linter/src/message/mod.rs index 69f7241b04..2f44de44ed 100644 --- a/crates/ruff_linter/src/message/mod.rs +++ b/crates/ruff_linter/src/message/mod.rs @@ -17,6 +17,7 @@ use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix}; use ruff_notebook::NotebookIndex; use ruff_source_file::{SourceFile, SourceLocation}; use ruff_text_size::{Ranged, TextRange, TextSize}; +pub use sarif::SarifEmitter; pub use text::TextEmitter; mod azure; @@ -28,6 +29,7 @@ mod json; mod json_lines; mod junit; mod pylint; +mod sarif; mod text; #[derive(Debug, PartialEq, Eq)] diff --git a/crates/ruff_linter/src/message/sarif.rs b/crates/ruff_linter/src/message/sarif.rs new file mode 100644 index 0000000000..3517c0eee3 --- /dev/null +++ b/crates/ruff_linter/src/message/sarif.rs @@ -0,0 +1,212 @@ +use std::io::Write; + +use anyhow::Result; +use serde::{Serialize, Serializer}; +use serde_json::json; + +use ruff_source_file::OneIndexed; + +use crate::codes::Rule; +use crate::fs::normalize_path; +use crate::message::{Emitter, EmitterContext, Message}; +use crate::registry::{AsRule, Linter, RuleNamespace}; +use crate::VERSION; + +use strum::IntoEnumIterator; + +pub struct SarifEmitter; + +impl Emitter for SarifEmitter { + fn emit( + &mut self, + writer: &mut dyn Write, + messages: &[Message], + _context: &EmitterContext, + ) -> Result<()> { + let results = messages + .iter() + .map(SarifResult::from_message) + .collect::>>()?; + + let output = json!({ + "$schema": "https://json.schemastore.org/sarif-2.1.0.json", + "version": "2.1.0", + "runs": [{ + "tool": { + "driver": { + "name": "ruff", + "informationUri": "https://github.com/astral-sh/ruff", + "rules": Rule::iter().map(SarifRule::from).collect::>(), + "version": VERSION.to_string(), + } + }, + "results": results, + }], + }); + serde_json::to_writer_pretty(writer, &output)?; + Ok(()) + } +} + +#[derive(Debug, Clone)] +struct SarifRule<'a> { + name: &'a str, + code: String, + linter: &'a str, + summary: &'a str, + explanation: Option<&'a str>, + url: Option, +} + +impl From for SarifRule<'_> { + fn from(rule: Rule) -> Self { + let code = rule.noqa_code().to_string(); + let (linter, _) = Linter::parse_code(&code).unwrap(); + Self { + name: rule.into(), + code, + linter: linter.name(), + summary: rule.message_formats()[0], + explanation: rule.explanation(), + url: rule.url(), + } + } +} + +impl Serialize for SarifRule<'_> { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + json!({ + "id": self.code, + "shortDescription": { + "text": self.summary, + }, + "fullDescription": { + "text": self.explanation, + }, + "help": { + "text": self.summary, + }, + "helpUri": self.url, + "properties": { + "id": self.code, + "kind": self.linter, + "name": self.name, + "problem.severity": "error".to_string(), + }, + }) + .serialize(serializer) + } +} + +#[derive(Debug)] +struct SarifResult { + rule: Rule, + level: String, + message: String, + uri: String, + start_line: OneIndexed, + start_column: OneIndexed, + end_line: OneIndexed, + end_column: OneIndexed, +} + +impl SarifResult { + #[cfg(not(target_arch = "wasm32"))] + fn from_message(message: &Message) -> Result { + let start_location = message.compute_start_location(); + let end_location = message.compute_end_location(); + let path = normalize_path(message.filename()); + Ok(Self { + rule: message.kind.rule(), + level: "error".to_string(), + message: message.kind.name.clone(), + uri: url::Url::from_file_path(&path) + .map_err(|()| anyhow::anyhow!("Failed to convert path to URL: {}", path.display()))? + .to_string(), + start_line: start_location.row, + start_column: start_location.column, + end_line: end_location.row, + end_column: end_location.column, + }) + } + + #[cfg(target_arch = "wasm32")] + #[allow(clippy::unnecessary_wraps)] + fn from_message(message: &Message) -> Result { + let start_location = message.compute_start_location(); + let end_location = message.compute_end_location(); + let path = normalize_path(message.filename()); + Ok(Self { + rule: message.kind.rule(), + level: "error".to_string(), + message: message.kind.name.clone(), + uri: path.display().to_string(), + start_line: start_location.row, + start_column: start_location.column, + end_line: end_location.row, + end_column: end_location.column, + }) + } +} + +impl Serialize for SarifResult { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + json!({ + "level": self.level, + "message": { + "text": self.message, + }, + "locations": [{ + "physicalLocation": { + "artifactLocation": { + "uri": self.uri, + }, + "region": { + "startLine": self.start_line, + "startColumn": self.start_column, + "endLine": self.end_line, + "endColumn": self.end_column, + } + } + }], + "ruleId": self.rule.noqa_code().to_string(), + }) + .serialize(serializer) + } +} + +#[cfg(test)] +mod tests { + + use crate::message::tests::{capture_emitter_output, create_messages}; + use crate::message::SarifEmitter; + + fn get_output() -> String { + let mut emitter = SarifEmitter {}; + capture_emitter_output(&mut emitter, &create_messages()) + } + + #[test] + fn valid_json() { + let content = get_output(); + serde_json::from_str::(&content).unwrap(); + } + + #[test] + fn test_results() { + let content = get_output(); + let sarif = serde_json::from_str::(content.as_str()).unwrap(); + let rules = sarif["runs"][0]["tool"]["driver"]["rules"] + .as_array() + .unwrap(); + let results = sarif["runs"][0]["results"].as_array().unwrap(); + assert_eq!(results.len(), 3); + assert!(rules.len() > 3); + } +} diff --git a/crates/ruff_linter/src/rules/flake8_annotations/helpers.rs b/crates/ruff_linter/src/rules/flake8_annotations/helpers.rs index 41cff286d7..0ef1bcf262 100644 --- a/crates/ruff_linter/src/rules/flake8_annotations/helpers.rs +++ b/crates/ruff_linter/src/rules/flake8_annotations/helpers.rs @@ -3,7 +3,7 @@ use rustc_hash::FxHashSet; use ruff_diagnostics::Edit; use ruff_python_ast::helpers::{ - implicit_return, pep_604_union, typing_optional, typing_union, ReturnStatementVisitor, + pep_604_union, typing_optional, typing_union, ReturnStatementVisitor, Terminal, }; use ruff_python_ast::visitor::Visitor; use ruff_python_ast::{self as ast, Expr, ExprContext}; @@ -57,6 +57,14 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option Option 0: // return 1 // ``` - if implicit_return(function) { + if terminal.is_none() { return_type = return_type.union(ResolvedPythonType::Atom(PythonType::None)); } @@ -94,6 +102,7 @@ pub(crate) fn auto_return_type(function: &ast::StmtFunctionDef) -> Option), } @@ -111,6 +120,28 @@ impl AutoPythonType { target_version: PythonVersion, ) -> Option<(Expr, Vec)> { match self { + AutoPythonType::Never => { + let (no_return_edit, binding) = importer + .get_or_import_symbol( + &ImportRequest::import_from( + "typing", + if target_version >= PythonVersion::Py311 { + "Never" + } else { + "NoReturn" + }, + ), + at, + semantic, + ) + .ok()?; + let expr = Expr::Name(ast::ExprName { + id: binding, + range: TextRange::default(), + ctx: ExprContext::Load, + }); + Some((expr, vec![no_return_edit])) + } AutoPythonType::Atom(python_type) => { let expr = type_expr(python_type)?; Some((expr, vec![])) diff --git a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs index 9360d0dfdd..dee00d6667 100644 --- a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs +++ b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs @@ -537,6 +537,19 @@ fn check_dynamically_typed( } } +fn is_empty_body(body: &[Stmt]) -> bool { + body.iter().all(|stmt| match stmt { + Stmt::Pass(_) => true, + Stmt::Expr(ast::StmtExpr { value, range: _ }) => { + matches!( + value.as_ref(), + Expr::StringLiteral(_) | Expr::EllipsisLiteral(_) + ) + } + _ => false, + }) +} + /// Generate flake8-annotation checks for a given `Definition`. pub(crate) fn definition( checker: &Checker, @@ -725,16 +738,22 @@ pub(crate) fn definition( ) { if is_method && visibility::is_classmethod(decorator_list, checker.semantic()) { if checker.enabled(Rule::MissingReturnTypeClassMethod) { - let return_type = auto_return_type(function) - .and_then(|return_type| { - return_type.into_expression( - checker.importer(), - function.parameters.start(), - checker.semantic(), - checker.settings.target_version, - ) - }) - .map(|(return_type, edits)| (checker.generator().expr(&return_type), edits)); + let return_type = if visibility::is_abstract(decorator_list, checker.semantic()) + && is_empty_body(body) + { + None + } else { + auto_return_type(function) + .and_then(|return_type| { + return_type.into_expression( + checker.importer(), + function.parameters.start(), + checker.semantic(), + checker.settings.target_version, + ) + }) + .map(|(return_type, edits)| (checker.generator().expr(&return_type), edits)) + }; let mut diagnostic = Diagnostic::new( MissingReturnTypeClassMethod { name: name.to_string(), @@ -752,16 +771,22 @@ pub(crate) fn definition( } } else if is_method && visibility::is_staticmethod(decorator_list, checker.semantic()) { if checker.enabled(Rule::MissingReturnTypeStaticMethod) { - let return_type = auto_return_type(function) - .and_then(|return_type| { - return_type.into_expression( - checker.importer(), - function.parameters.start(), - checker.semantic(), - checker.settings.target_version, - ) - }) - .map(|(return_type, edits)| (checker.generator().expr(&return_type), edits)); + let return_type = if visibility::is_abstract(decorator_list, checker.semantic()) + && is_empty_body(body) + { + None + } else { + auto_return_type(function) + .and_then(|return_type| { + return_type.into_expression( + checker.importer(), + function.parameters.start(), + checker.semantic(), + checker.settings.target_version, + ) + }) + .map(|(return_type, edits)| (checker.generator().expr(&return_type), edits)) + }; let mut diagnostic = Diagnostic::new( MissingReturnTypeStaticMethod { name: name.to_string(), @@ -818,18 +843,25 @@ pub(crate) fn definition( match visibility { visibility::Visibility::Public => { if checker.enabled(Rule::MissingReturnTypeUndocumentedPublicFunction) { - let return_type = auto_return_type(function) - .and_then(|return_type| { - return_type.into_expression( - checker.importer(), - function.parameters.start(), - checker.semantic(), - checker.settings.target_version, - ) - }) - .map(|(return_type, edits)| { - (checker.generator().expr(&return_type), edits) - }); + let return_type = + if visibility::is_abstract(decorator_list, checker.semantic()) + && is_empty_body(body) + { + None + } else { + auto_return_type(function) + .and_then(|return_type| { + return_type.into_expression( + checker.importer(), + function.parameters.start(), + checker.semantic(), + checker.settings.target_version, + ) + }) + .map(|(return_type, edits)| { + (checker.generator().expr(&return_type), edits) + }) + }; let mut diagnostic = Diagnostic::new( MissingReturnTypeUndocumentedPublicFunction { name: name.to_string(), @@ -853,18 +885,25 @@ pub(crate) fn definition( } visibility::Visibility::Private => { if checker.enabled(Rule::MissingReturnTypePrivateFunction) { - let return_type = auto_return_type(function) - .and_then(|return_type| { - return_type.into_expression( - checker.importer(), - function.parameters.start(), - checker.semantic(), - checker.settings.target_version, - ) - }) - .map(|(return_type, edits)| { - (checker.generator().expr(&return_type), edits) - }); + let return_type = + if visibility::is_abstract(decorator_list, checker.semantic()) + && is_empty_body(body) + { + None + } else { + auto_return_type(function) + .and_then(|return_type| { + return_type.into_expression( + checker.importer(), + function.parameters.start(), + checker.semantic(), + checker.settings.target_version, + ) + }) + .map(|(return_type, edits)| { + (checker.generator().expr(&return_type), edits) + }) + }; let mut diagnostic = Diagnostic::new( MissingReturnTypePrivateFunction { name: name.to_string(), diff --git a/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type.snap b/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type.snap index 6fcbb95d02..d374776949 100644 --- a/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type.snap +++ b/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type.snap @@ -427,4 +427,156 @@ auto_return_type.py:146:5: ANN201 [*] Missing return type annotation for public 148 148 | break 149 149 | return 1 +auto_return_type.py:158:9: ANN201 Missing return type annotation for public function `method` + | +156 | class Foo(abc.ABC): +157 | @abstractmethod +158 | def method(self): + | ^^^^^^ ANN201 +159 | pass + | + = help: Add return type annotation + +auto_return_type.py:162:9: ANN201 Missing return type annotation for public function `method` + | +161 | @abc.abstractmethod +162 | def method(self): + | ^^^^^^ ANN201 +163 | """Docstring.""" + | + = help: Add return type annotation + +auto_return_type.py:166:9: ANN201 Missing return type annotation for public function `method` + | +165 | @abc.abstractmethod +166 | def method(self): + | ^^^^^^ ANN201 +167 | ... + | + = help: Add return type annotation + +auto_return_type.py:171:9: ANN205 Missing return type annotation for staticmethod `method` + | +169 | @staticmethod +170 | @abstractmethod +171 | def method(): + | ^^^^^^ ANN205 +172 | pass + | + = help: Add return type annotation + +auto_return_type.py:176:9: ANN206 Missing return type annotation for classmethod `method` + | +174 | @classmethod +175 | @abstractmethod +176 | def method(cls): + | ^^^^^^ ANN206 +177 | pass + | + = help: Add return type annotation + +auto_return_type.py:180:9: ANN201 [*] Missing return type annotation for public function `method` + | +179 | @abstractmethod +180 | def method(self): + | ^^^^^^ ANN201 +181 | if self.x > 0: +182 | return 1 + | + = help: Add return type annotation: `float` + +ℹ Unsafe fix +177 177 | pass +178 178 | +179 179 | @abstractmethod +180 |- def method(self): + 180 |+ def method(self) -> float: +181 181 | if self.x > 0: +182 182 | return 1 +183 183 | else: + +auto_return_type.py:187:5: ANN201 [*] Missing return type annotation for public function `func` + | +187 | def func(x: int): + | ^^^^ ANN201 +188 | try: +189 | pass + | + = help: Add return type annotation: `int | None` + +ℹ Unsafe fix +184 184 | return 1.5 +185 185 | +186 186 | +187 |-def func(x: int): + 187 |+def func(x: int) -> int | None: +188 188 | try: +189 189 | pass +190 190 | except: + +auto_return_type.py:194:5: ANN201 [*] Missing return type annotation for public function `func` + | +194 | def func(x: int): + | ^^^^ ANN201 +195 | try: +196 | pass + | + = help: Add return type annotation: `int` + +ℹ Unsafe fix +191 191 | return 2 +192 192 | +193 193 | +194 |-def func(x: int): + 194 |+def func(x: int) -> int: +195 195 | try: +196 196 | pass +197 197 | except: + +auto_return_type.py:203:5: ANN201 [*] Missing return type annotation for public function `func` + | +203 | def func(x: int): + | ^^^^ ANN201 +204 | if not x: +205 | raise ValueError + | + = help: Add return type annotation: `Never` + +ℹ Unsafe fix +151 151 | +152 152 | import abc +153 153 | from abc import abstractmethod + 154 |+from typing import Never +154 155 | +155 156 | +156 157 | class Foo(abc.ABC): +-------------------------------------------------------------------------------- +200 201 | return 3 +201 202 | +202 203 | +203 |-def func(x: int): + 204 |+def func(x: int) -> Never: +204 205 | if not x: +205 206 | raise ValueError +206 207 | else: + +auto_return_type.py:210:5: ANN201 [*] Missing return type annotation for public function `func` + | +210 | def func(x: int): + | ^^^^ ANN201 +211 | if not x: +212 | raise ValueError + | + = help: Add return type annotation: `int` + +ℹ Unsafe fix +207 207 | raise TypeError +208 208 | +209 209 | +210 |-def func(x: int): + 210 |+def func(x: int) -> int: +211 211 | if not x: +212 212 | raise ValueError +213 213 | else: + diff --git a/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type_py38.snap b/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type_py38.snap index d91484ca3f..a2fb6448f7 100644 --- a/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type_py38.snap +++ b/crates/ruff_linter/src/rules/flake8_annotations/snapshots/ruff_linter__rules__flake8_annotations__tests__auto_return_type_py38.snap @@ -482,4 +482,164 @@ auto_return_type.py:146:5: ANN201 [*] Missing return type annotation for public 148 149 | break 149 150 | return 1 +auto_return_type.py:158:9: ANN201 Missing return type annotation for public function `method` + | +156 | class Foo(abc.ABC): +157 | @abstractmethod +158 | def method(self): + | ^^^^^^ ANN201 +159 | pass + | + = help: Add return type annotation + +auto_return_type.py:162:9: ANN201 Missing return type annotation for public function `method` + | +161 | @abc.abstractmethod +162 | def method(self): + | ^^^^^^ ANN201 +163 | """Docstring.""" + | + = help: Add return type annotation + +auto_return_type.py:166:9: ANN201 Missing return type annotation for public function `method` + | +165 | @abc.abstractmethod +166 | def method(self): + | ^^^^^^ ANN201 +167 | ... + | + = help: Add return type annotation + +auto_return_type.py:171:9: ANN205 Missing return type annotation for staticmethod `method` + | +169 | @staticmethod +170 | @abstractmethod +171 | def method(): + | ^^^^^^ ANN205 +172 | pass + | + = help: Add return type annotation + +auto_return_type.py:176:9: ANN206 Missing return type annotation for classmethod `method` + | +174 | @classmethod +175 | @abstractmethod +176 | def method(cls): + | ^^^^^^ ANN206 +177 | pass + | + = help: Add return type annotation + +auto_return_type.py:180:9: ANN201 [*] Missing return type annotation for public function `method` + | +179 | @abstractmethod +180 | def method(self): + | ^^^^^^ ANN201 +181 | if self.x > 0: +182 | return 1 + | + = help: Add return type annotation: `float` + +ℹ Unsafe fix +177 177 | pass +178 178 | +179 179 | @abstractmethod +180 |- def method(self): + 180 |+ def method(self) -> float: +181 181 | if self.x > 0: +182 182 | return 1 +183 183 | else: + +auto_return_type.py:187:5: ANN201 [*] Missing return type annotation for public function `func` + | +187 | def func(x: int): + | ^^^^ ANN201 +188 | try: +189 | pass + | + = help: Add return type annotation: `Optional[int]` + +ℹ Unsafe fix +151 151 | +152 152 | import abc +153 153 | from abc import abstractmethod + 154 |+from typing import Optional +154 155 | +155 156 | +156 157 | class Foo(abc.ABC): +-------------------------------------------------------------------------------- +184 185 | return 1.5 +185 186 | +186 187 | +187 |-def func(x: int): + 188 |+def func(x: int) -> Optional[int]: +188 189 | try: +189 190 | pass +190 191 | except: + +auto_return_type.py:194:5: ANN201 [*] Missing return type annotation for public function `func` + | +194 | def func(x: int): + | ^^^^ ANN201 +195 | try: +196 | pass + | + = help: Add return type annotation: `int` + +ℹ Unsafe fix +191 191 | return 2 +192 192 | +193 193 | +194 |-def func(x: int): + 194 |+def func(x: int) -> int: +195 195 | try: +196 196 | pass +197 197 | except: + +auto_return_type.py:203:5: ANN201 [*] Missing return type annotation for public function `func` + | +203 | def func(x: int): + | ^^^^ ANN201 +204 | if not x: +205 | raise ValueError + | + = help: Add return type annotation: `NoReturn` + +ℹ Unsafe fix +151 151 | +152 152 | import abc +153 153 | from abc import abstractmethod + 154 |+from typing import NoReturn +154 155 | +155 156 | +156 157 | class Foo(abc.ABC): +-------------------------------------------------------------------------------- +200 201 | return 3 +201 202 | +202 203 | +203 |-def func(x: int): + 204 |+def func(x: int) -> NoReturn: +204 205 | if not x: +205 206 | raise ValueError +206 207 | else: + +auto_return_type.py:210:5: ANN201 [*] Missing return type annotation for public function `func` + | +210 | def func(x: int): + | ^^^^ ANN201 +211 | if not x: +212 | raise ValueError + | + = help: Add return type annotation: `int` + +ℹ Unsafe fix +207 207 | raise TypeError +208 208 | +209 209 | +210 |-def func(x: int): + 210 |+def func(x: int) -> int: +211 211 | if not x: +212 212 | raise ValueError +213 213 | else: + diff --git a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs index 49b16b66cd..38295b7131 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs +++ b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_bind_all_interfaces.rs @@ -1,6 +1,9 @@ use ruff_diagnostics::{Diagnostic, Violation}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::ExprStringLiteral; +use ruff_python_ast::{self as ast, StringLike}; +use ruff_text_size::Ranged; + +use crate::checkers::ast::Checker; /// ## What it does /// Checks for hardcoded bindings to all network interfaces (`0.0.0.0`). @@ -34,10 +37,16 @@ impl Violation for HardcodedBindAllInterfaces { } /// S104 -pub(crate) fn hardcoded_bind_all_interfaces(string: &ExprStringLiteral) -> Option { - if string.value.to_str() == "0.0.0.0" { - Some(Diagnostic::new(HardcodedBindAllInterfaces, string.range)) - } else { - None +pub(crate) fn hardcoded_bind_all_interfaces(checker: &mut Checker, string: StringLike) { + let is_bind_all_interface = match string { + StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value == "0.0.0.0", + StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value == "0.0.0.0", + StringLike::BytesLiteral(_) => return, + }; + + if is_bind_all_interface { + checker + .diagnostics + .push(Diagnostic::new(HardcodedBindAllInterfaces, string.range())); } } diff --git a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs index 25d0f3e710..ff892e6b3f 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs +++ b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_sql_expression.rs @@ -57,7 +57,7 @@ impl Violation for HardcodedSQLExpression { /// becomes `foobar {x}baz`. fn concatenated_f_string(expr: &ast::ExprFString, locator: &Locator) -> String { expr.value - .parts() + .iter() .filter_map(|part| { raw_contents(locator.slice(part)).map(|s| s.escape_default().to_string()) }) diff --git a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs index 09de15f20b..e0a66fec19 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs +++ b/crates/ruff_linter/src/rules/flake8_bandit/rules/hardcoded_tmp_directory.rs @@ -1,4 +1,5 @@ -use ruff_python_ast::{self as ast, Expr}; +use ruff_python_ast::{self as ast, Expr, StringLike}; +use ruff_text_size::Ranged; use ruff_diagnostics::{Diagnostic, Violation}; use ruff_macros::{derive_message_formats, violation}; @@ -51,13 +52,19 @@ impl Violation for HardcodedTempFile { } /// S108 -pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: &ast::ExprStringLiteral) { +pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: StringLike) { + let value = match string { + StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.to_str(), + StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => value, + StringLike::BytesLiteral(_) => return, + }; + if !checker .settings .flake8_bandit .hardcoded_tmp_directory .iter() - .any(|prefix| string.value.to_str().starts_with(prefix)) + .any(|prefix| value.starts_with(prefix)) { return; } @@ -76,8 +83,8 @@ pub(crate) fn hardcoded_tmp_directory(checker: &mut Checker, string: &ast::ExprS checker.diagnostics.push(Diagnostic::new( HardcodedTempFile { - string: string.value.to_string(), + string: value.to_string(), }, - string.range, + string.range(), )); } diff --git a/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S104_S104.py.snap b/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S104_S104.py.snap index 1927319794..b3b9ad07d3 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S104_S104.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S104_S104.py.snap @@ -7,6 +7,7 @@ S104.py:9:1: S104 Possible binding to all interfaces 9 | "0.0.0.0" | ^^^^^^^^^ S104 10 | '0.0.0.0' +11 | f"0.0.0.0" | S104.py:10:1: S104 Possible binding to all interfaces @@ -15,21 +16,30 @@ S104.py:10:1: S104 Possible binding to all interfaces 9 | "0.0.0.0" 10 | '0.0.0.0' | ^^^^^^^^^ S104 +11 | f"0.0.0.0" | -S104.py:14:6: S104 Possible binding to all interfaces +S104.py:11:3: S104 Possible binding to all interfaces | -13 | # Error -14 | func("0.0.0.0") + 9 | "0.0.0.0" +10 | '0.0.0.0' +11 | f"0.0.0.0" + | ^^^^^^^ S104 + | + +S104.py:15:6: S104 Possible binding to all interfaces + | +14 | # Error +15 | func("0.0.0.0") | ^^^^^^^^^ S104 | -S104.py:18:9: S104 Possible binding to all interfaces +S104.py:19:9: S104 Possible binding to all interfaces | -17 | def my_func(): -18 | x = "0.0.0.0" +18 | def my_func(): +19 | x = "0.0.0.0" | ^^^^^^^^^ S104 -19 | print(x) +20 | print(x) | diff --git a/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_S108.py.snap b/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_S108.py.snap index 9ecf1141d9..7336a5015a 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_S108.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_S108.py.snap @@ -10,22 +10,31 @@ S108.py:5:11: S108 Probable insecure usage of temporary file or directory: "/tmp 6 | f.write("def") | -S108.py:8:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123" +S108.py:8:13: S108 Probable insecure usage of temporary file or directory: "/tmp/abc" | 6 | f.write("def") 7 | -8 | with open("/var/tmp/123", "w") as f: - | ^^^^^^^^^^^^^^ S108 +8 | with open(f"/tmp/abc", "w") as f: + | ^^^^^^^^ S108 9 | f.write("def") | -S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test" +S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123" | 9 | f.write("def") 10 | -11 | with open("/dev/shm/unit/test", "w") as f: - | ^^^^^^^^^^^^^^^^^^^^ S108 +11 | with open("/var/tmp/123", "w") as f: + | ^^^^^^^^^^^^^^ S108 12 | f.write("def") | +S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test" + | +12 | f.write("def") +13 | +14 | with open("/dev/shm/unit/test", "w") as f: + | ^^^^^^^^^^^^^^^^^^^^ S108 +15 | f.write("def") + | + diff --git a/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_extend.snap b/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_extend.snap index 998bc90059..b562794a05 100644 --- a/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_extend.snap +++ b/crates/ruff_linter/src/rules/flake8_bandit/snapshots/ruff_linter__rules__flake8_bandit__tests__S108_extend.snap @@ -10,30 +10,39 @@ S108.py:5:11: S108 Probable insecure usage of temporary file or directory: "/tmp 6 | f.write("def") | -S108.py:8:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123" +S108.py:8:13: S108 Probable insecure usage of temporary file or directory: "/tmp/abc" | 6 | f.write("def") 7 | -8 | with open("/var/tmp/123", "w") as f: - | ^^^^^^^^^^^^^^ S108 +8 | with open(f"/tmp/abc", "w") as f: + | ^^^^^^^^ S108 9 | f.write("def") | -S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test" +S108.py:11:11: S108 Probable insecure usage of temporary file or directory: "/var/tmp/123" | 9 | f.write("def") 10 | -11 | with open("/dev/shm/unit/test", "w") as f: - | ^^^^^^^^^^^^^^^^^^^^ S108 +11 | with open("/var/tmp/123", "w") as f: + | ^^^^^^^^^^^^^^ S108 12 | f.write("def") | -S108.py:15:11: S108 Probable insecure usage of temporary file or directory: "/foo/bar" +S108.py:14:11: S108 Probable insecure usage of temporary file or directory: "/dev/shm/unit/test" | -14 | # not ok by config -15 | with open("/foo/bar", "w") as f: +12 | f.write("def") +13 | +14 | with open("/dev/shm/unit/test", "w") as f: + | ^^^^^^^^^^^^^^^^^^^^ S108 +15 | f.write("def") + | + +S108.py:18:11: S108 Probable insecure usage of temporary file or directory: "/foo/bar" + | +17 | # not ok by config +18 | with open("/foo/bar", "w") as f: | ^^^^^^^^^^ S108 -16 | f.write("def") +19 | f.write("def") | diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs index b92168b348..61b3fe246d 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs @@ -1,4 +1,4 @@ -use ruff_diagnostics::{Diagnostic, Violation}; +use ruff_diagnostics::{AlwaysFixableViolation, Applicability, Diagnostic, Fix}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_ast::{self as ast, Arguments, Expr}; @@ -6,6 +6,7 @@ use ruff_python_semantic::SemanticModel; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; +use crate::fix::edits::add_argument; /// ## What it does /// Checks for `zip` calls without an explicit `strict` parameter. @@ -28,16 +29,25 @@ use crate::checkers::ast::Checker; /// zip(a, b, strict=True) /// ``` /// +/// ## Fix safety +/// This rule's fix is marked as unsafe for `zip` calls that contain +/// `**kwargs`, as adding a `check` keyword argument to such a call may lead +/// to a duplicate keyword argument error. +/// /// ## References /// - [Python documentation: `zip`](https://docs.python.org/3/library/functions.html#zip) #[violation] pub struct ZipWithoutExplicitStrict; -impl Violation for ZipWithoutExplicitStrict { +impl AlwaysFixableViolation for ZipWithoutExplicitStrict { #[derive_message_formats] fn message(&self) -> String { format!("`zip()` without an explicit `strict=` parameter") } + + fn fix_title(&self) -> String { + "Add explicit `strict=False`".to_string() + } } /// B905 @@ -52,9 +62,27 @@ pub(crate) fn zip_without_explicit_strict(checker: &mut Checker, call: &ast::Exp .iter() .any(|arg| is_infinite_iterator(arg, checker.semantic())) { - checker - .diagnostics - .push(Diagnostic::new(ZipWithoutExplicitStrict, call.range())); + let mut diagnostic = Diagnostic::new(ZipWithoutExplicitStrict, call.range()); + diagnostic.set_fix(Fix::applicable_edit( + add_argument( + "strict=False", + &call.arguments, + checker.indexer().comment_ranges(), + checker.locator().contents(), + ), + // If the function call contains `**kwargs`, mark the fix as unsafe. + if call + .arguments + .keywords + .iter() + .any(|keyword| keyword.arg.is_none()) + { + Applicability::Unsafe + } else { + Applicability::Safe + }, + )); + checker.diagnostics.push(diagnostic); } } } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B905.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B905.py.snap index f4b41f2239..6e51121edc 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B905.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B905.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/flake8_bugbear/mod.rs --- -B905.py:4:1: B905 `zip()` without an explicit `strict=` parameter +B905.py:4:1: B905 [*] `zip()` without an explicit `strict=` parameter | 3 | # Errors 4 | zip() @@ -9,8 +9,19 @@ B905.py:4:1: B905 `zip()` without an explicit `strict=` parameter 5 | zip(range(3)) 6 | zip("a", "b") | + = help: Add explicit `strict=False` -B905.py:5:1: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +1 1 | from itertools import count, cycle, repeat +2 2 | +3 3 | # Errors +4 |-zip() + 4 |+zip(strict=False) +5 5 | zip(range(3)) +6 6 | zip("a", "b") +7 7 | zip("a", "b", *zip("c")) + +B905.py:5:1: B905 [*] `zip()` without an explicit `strict=` parameter | 3 | # Errors 4 | zip() @@ -19,8 +30,19 @@ B905.py:5:1: B905 `zip()` without an explicit `strict=` parameter 6 | zip("a", "b") 7 | zip("a", "b", *zip("c")) | + = help: Add explicit `strict=False` -B905.py:6:1: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +2 2 | +3 3 | # Errors +4 4 | zip() +5 |-zip(range(3)) + 5 |+zip(range(3), strict=False) +6 6 | zip("a", "b") +7 7 | zip("a", "b", *zip("c")) +8 8 | zip(zip("a"), strict=False) + +B905.py:6:1: B905 [*] `zip()` without an explicit `strict=` parameter | 4 | zip() 5 | zip(range(3)) @@ -29,8 +51,19 @@ B905.py:6:1: B905 `zip()` without an explicit `strict=` parameter 7 | zip("a", "b", *zip("c")) 8 | zip(zip("a"), strict=False) | + = help: Add explicit `strict=False` -B905.py:7:1: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +3 3 | # Errors +4 4 | zip() +5 5 | zip(range(3)) +6 |-zip("a", "b") + 6 |+zip("a", "b", strict=False) +7 7 | zip("a", "b", *zip("c")) +8 8 | zip(zip("a"), strict=False) +9 9 | zip(zip("a", strict=True)) + +B905.py:7:1: B905 [*] `zip()` without an explicit `strict=` parameter | 5 | zip(range(3)) 6 | zip("a", "b") @@ -39,8 +72,19 @@ B905.py:7:1: B905 `zip()` without an explicit `strict=` parameter 8 | zip(zip("a"), strict=False) 9 | zip(zip("a", strict=True)) | + = help: Add explicit `strict=False` -B905.py:7:16: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +4 4 | zip() +5 5 | zip(range(3)) +6 6 | zip("a", "b") +7 |-zip("a", "b", *zip("c")) + 7 |+zip("a", "b", *zip("c"), strict=False) +8 8 | zip(zip("a"), strict=False) +9 9 | zip(zip("a", strict=True)) +10 10 | + +B905.py:7:16: B905 [*] `zip()` without an explicit `strict=` parameter | 5 | zip(range(3)) 6 | zip("a", "b") @@ -49,8 +93,19 @@ B905.py:7:16: B905 `zip()` without an explicit `strict=` parameter 8 | zip(zip("a"), strict=False) 9 | zip(zip("a", strict=True)) | + = help: Add explicit `strict=False` -B905.py:8:5: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +4 4 | zip() +5 5 | zip(range(3)) +6 6 | zip("a", "b") +7 |-zip("a", "b", *zip("c")) + 7 |+zip("a", "b", *zip("c", strict=False)) +8 8 | zip(zip("a"), strict=False) +9 9 | zip(zip("a", strict=True)) +10 10 | + +B905.py:8:5: B905 [*] `zip()` without an explicit `strict=` parameter | 6 | zip("a", "b") 7 | zip("a", "b", *zip("c")) @@ -58,8 +113,19 @@ B905.py:8:5: B905 `zip()` without an explicit `strict=` parameter | ^^^^^^^^ B905 9 | zip(zip("a", strict=True)) | + = help: Add explicit `strict=False` -B905.py:9:1: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +5 5 | zip(range(3)) +6 6 | zip("a", "b") +7 7 | zip("a", "b", *zip("c")) +8 |-zip(zip("a"), strict=False) + 8 |+zip(zip("a", strict=False), strict=False) +9 9 | zip(zip("a", strict=True)) +10 10 | +11 11 | # OK + +B905.py:9:1: B905 [*] `zip()` without an explicit `strict=` parameter | 7 | zip("a", "b", *zip("c")) 8 | zip(zip("a"), strict=False) @@ -68,21 +134,49 @@ B905.py:9:1: B905 `zip()` without an explicit `strict=` parameter 10 | 11 | # OK | + = help: Add explicit `strict=False` -B905.py:24:1: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +6 6 | zip("a", "b") +7 7 | zip("a", "b", *zip("c")) +8 8 | zip(zip("a"), strict=False) +9 |-zip(zip("a", strict=True)) + 9 |+zip(zip("a", strict=True), strict=False) +10 10 | +11 11 | # OK +12 12 | zip(range(3), strict=True) + +B905.py:24:1: B905 [*] `zip()` without an explicit `strict=` parameter | 23 | # Errors (limited iterators). 24 | zip([1, 2, 3], repeat(1, 1)) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B905 25 | zip([1, 2, 3], repeat(1, times=4)) | + = help: Add explicit `strict=False` -B905.py:25:1: B905 `zip()` without an explicit `strict=` parameter +ℹ Safe fix +21 21 | zip([1, 2, 3], repeat(1, times=None)) +22 22 | +23 23 | # Errors (limited iterators). +24 |-zip([1, 2, 3], repeat(1, 1)) + 24 |+zip([1, 2, 3], repeat(1, 1), strict=False) +25 25 | zip([1, 2, 3], repeat(1, times=4)) + +B905.py:25:1: B905 [*] `zip()` without an explicit `strict=` parameter | 23 | # Errors (limited iterators). 24 | zip([1, 2, 3], repeat(1, 1)) 25 | zip([1, 2, 3], repeat(1, times=4)) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ B905 | + = help: Add explicit `strict=False` + +ℹ Safe fix +22 22 | +23 23 | # Errors (limited iterators). +24 24 | zip([1, 2, 3], repeat(1, 1)) +25 |-zip([1, 2, 3], repeat(1, times=4)) + 25 |+zip([1, 2, 3], repeat(1, times=4), strict=False) diff --git a/crates/ruff_linter/src/rules/flake8_comprehensions/fixes.rs b/crates/ruff_linter/src/rules/flake8_comprehensions/fixes.rs index 7772b430dc..6c08ddf4a3 100644 --- a/crates/ruff_linter/src/rules/flake8_comprehensions/fixes.rs +++ b/crates/ruff_linter/src/rules/flake8_comprehensions/fixes.rs @@ -1083,7 +1083,7 @@ pub(crate) fn fix_unnecessary_map( // If the expression is embedded in an f-string, surround it with spaces to avoid // syntax errors. if matches!(object_type, ObjectType::Set | ObjectType::Dict) { - if parent.is_some_and(Expr::is_formatted_value_expr) { + if parent.is_some_and(Expr::is_f_string_expr) { content = format!(" {content} "); } } diff --git a/crates/ruff_linter/src/rules/flake8_django/rules/all_with_model_form.rs b/crates/ruff_linter/src/rules/flake8_django/rules/all_with_model_form.rs index 8e97c68c2c..8083575e2a 100644 --- a/crates/ruff_linter/src/rules/flake8_django/rules/all_with_model_form.rs +++ b/crates/ruff_linter/src/rules/flake8_django/rules/all_with_model_form.rs @@ -1,7 +1,6 @@ -use ruff_python_ast::{self as ast, Arguments, Expr, Stmt}; - use ruff_diagnostics::{Diagnostic, Violation}; use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::{self as ast, Expr, Stmt}; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -48,21 +47,12 @@ impl Violation for DjangoAllWithModelForm { } /// DJ007 -pub(crate) fn all_with_model_form( - checker: &Checker, - arguments: Option<&Arguments>, - body: &[Stmt], -) -> Option { - if !arguments.is_some_and(|arguments| { - arguments - .args - .iter() - .any(|base| is_model_form(base, checker.semantic())) - }) { - return None; +pub(crate) fn all_with_model_form(checker: &mut Checker, class_def: &ast::StmtClassDef) { + if !is_model_form(class_def, checker.semantic()) { + return; } - for element in body { + for element in &class_def.body { let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else { continue; }; @@ -83,12 +73,18 @@ pub(crate) fn all_with_model_form( match value.as_ref() { Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => { if value == "__all__" { - return Some(Diagnostic::new(DjangoAllWithModelForm, element.range())); + checker + .diagnostics + .push(Diagnostic::new(DjangoAllWithModelForm, element.range())); + return; } } Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => { if value == "__all__".as_bytes() { - return Some(Diagnostic::new(DjangoAllWithModelForm, element.range())); + checker + .diagnostics + .push(Diagnostic::new(DjangoAllWithModelForm, element.range())); + return; } } _ => (), @@ -96,5 +92,4 @@ pub(crate) fn all_with_model_form( } } } - None } diff --git a/crates/ruff_linter/src/rules/flake8_django/rules/exclude_with_model_form.rs b/crates/ruff_linter/src/rules/flake8_django/rules/exclude_with_model_form.rs index 41661892bb..d1211c5662 100644 --- a/crates/ruff_linter/src/rules/flake8_django/rules/exclude_with_model_form.rs +++ b/crates/ruff_linter/src/rules/flake8_django/rules/exclude_with_model_form.rs @@ -1,7 +1,6 @@ -use ruff_python_ast::{self as ast, Arguments, Expr, Stmt}; - use ruff_diagnostics::{Diagnostic, Violation}; use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::{self as ast, Expr, Stmt}; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -46,21 +45,12 @@ impl Violation for DjangoExcludeWithModelForm { } /// DJ006 -pub(crate) fn exclude_with_model_form( - checker: &Checker, - arguments: Option<&Arguments>, - body: &[Stmt], -) -> Option { - if !arguments.is_some_and(|arguments| { - arguments - .args - .iter() - .any(|base| is_model_form(base, checker.semantic())) - }) { - return None; +pub(crate) fn exclude_with_model_form(checker: &mut Checker, class_def: &ast::StmtClassDef) { + if !is_model_form(class_def, checker.semantic()) { + return; } - for element in body { + for element in &class_def.body { let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else { continue; }; @@ -76,10 +66,12 @@ pub(crate) fn exclude_with_model_form( continue; }; if id == "exclude" { - return Some(Diagnostic::new(DjangoExcludeWithModelForm, target.range())); + checker + .diagnostics + .push(Diagnostic::new(DjangoExcludeWithModelForm, target.range())); + return; } } } } - None } diff --git a/crates/ruff_linter/src/rules/flake8_django/rules/helpers.rs b/crates/ruff_linter/src/rules/flake8_django/rules/helpers.rs index c857bec150..0318de1839 100644 --- a/crates/ruff_linter/src/rules/flake8_django/rules/helpers.rs +++ b/crates/ruff_linter/src/rules/flake8_django/rules/helpers.rs @@ -1,17 +1,17 @@ -use ruff_python_ast::Expr; +use ruff_python_ast::{self as ast, Expr}; -use ruff_python_semantic::SemanticModel; +use ruff_python_semantic::{analyze, SemanticModel}; /// Return `true` if a Python class appears to be a Django model, based on its base classes. -pub(super) fn is_model(base: &Expr, semantic: &SemanticModel) -> bool { - semantic.resolve_call_path(base).is_some_and(|call_path| { +pub(super) fn is_model(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool { + analyze::class::any_over_body(class_def, semantic, &|call_path| { matches!(call_path.as_slice(), ["django", "db", "models", "Model"]) }) } /// Return `true` if a Python class appears to be a Django model form, based on its base classes. -pub(super) fn is_model_form(base: &Expr, semantic: &SemanticModel) -> bool { - semantic.resolve_call_path(base).is_some_and(|call_path| { +pub(super) fn is_model_form(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool { + analyze::class::any_over_body(class_def, semantic, &|call_path| { matches!( call_path.as_slice(), ["django", "forms", "ModelForm"] | ["django", "forms", "models", "ModelForm"] diff --git a/crates/ruff_linter/src/rules/flake8_django/rules/model_without_dunder_str.rs b/crates/ruff_linter/src/rules/flake8_django/rules/model_without_dunder_str.rs index 9228d04753..0baaeafb34 100644 --- a/crates/ruff_linter/src/rules/flake8_django/rules/model_without_dunder_str.rs +++ b/crates/ruff_linter/src/rules/flake8_django/rules/model_without_dunder_str.rs @@ -1,10 +1,9 @@ -use ruff_python_ast::{self as ast, Arguments, Expr, Stmt}; - use ruff_diagnostics::{Diagnostic, Violation}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_ast::helpers::is_const_true; +use ruff_python_ast::identifier::Identifier; +use ruff_python_ast::{self as ast, Expr, Stmt}; use ruff_python_semantic::SemanticModel; -use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -52,57 +51,39 @@ impl Violation for DjangoModelWithoutDunderStr { } /// DJ008 -pub(crate) fn model_without_dunder_str( - checker: &mut Checker, - ast::StmtClassDef { - name, - arguments, - body, - .. - }: &ast::StmtClassDef, -) { - if !is_non_abstract_model(arguments.as_deref(), body, checker.semantic()) { +pub(crate) fn model_without_dunder_str(checker: &mut Checker, class_def: &ast::StmtClassDef) { + if !is_non_abstract_model(class_def, checker.semantic()) { return; } - if has_dunder_method(body) { + if has_dunder_method(class_def) { return; } - checker - .diagnostics - .push(Diagnostic::new(DjangoModelWithoutDunderStr, name.range())); + checker.diagnostics.push(Diagnostic::new( + DjangoModelWithoutDunderStr, + class_def.identifier(), + )); } -fn has_dunder_method(body: &[Stmt]) -> bool { - body.iter().any(|val| match val { - Stmt::FunctionDef(ast::StmtFunctionDef { name, .. }) => { - if name == "__str__" { - return true; - } - false - } +/// Returns `true` if the class has `__str__` method. +fn has_dunder_method(class_def: &ast::StmtClassDef) -> bool { + class_def.body.iter().any(|val| match val { + Stmt::FunctionDef(ast::StmtFunctionDef { name, .. }) => name == "__str__", _ => false, }) } -fn is_non_abstract_model( - arguments: Option<&Arguments>, - body: &[Stmt], - semantic: &SemanticModel, -) -> bool { - let Some(Arguments { args: bases, .. }) = arguments else { - return false; - }; - - if is_model_abstract(body) { - return false; +/// Returns `true` if the class is a non-abstract Django model. +fn is_non_abstract_model(class_def: &ast::StmtClassDef, semantic: &SemanticModel) -> bool { + if class_def.bases().is_empty() || is_model_abstract(class_def) { + false + } else { + helpers::is_model(class_def, semantic) } - - bases.iter().any(|base| helpers::is_model(base, semantic)) } /// Check if class is abstract, in terms of Django model inheritance. -fn is_model_abstract(body: &[Stmt]) -> bool { - for element in body { +fn is_model_abstract(class_def: &ast::StmtClassDef) -> bool { + for element in &class_def.body { let Stmt::ClassDef(ast::StmtClassDef { name, body, .. }) = element else { continue; }; diff --git a/crates/ruff_linter/src/rules/flake8_django/rules/unordered_body_content_in_model.rs b/crates/ruff_linter/src/rules/flake8_django/rules/unordered_body_content_in_model.rs index 01a63d4e34..635527dcaf 100644 --- a/crates/ruff_linter/src/rules/flake8_django/rules/unordered_body_content_in_model.rs +++ b/crates/ruff_linter/src/rules/flake8_django/rules/unordered_body_content_in_model.rs @@ -1,9 +1,8 @@ use std::fmt; -use ruff_python_ast::{self as ast, Arguments, Expr, Stmt}; - use ruff_diagnostics::{Diagnostic, Violation}; use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::{self as ast, Expr, Stmt}; use ruff_python_semantic::SemanticModel; use ruff_text_size::Ranged; @@ -79,6 +78,50 @@ impl Violation for DjangoUnorderedBodyContentInModel { } } +/// DJ012 +pub(crate) fn unordered_body_content_in_model( + checker: &mut Checker, + class_def: &ast::StmtClassDef, +) { + if !helpers::is_model(class_def, checker.semantic()) { + return; + } + + // Track all the element types we've seen so far. + let mut element_types = Vec::new(); + let mut prev_element_type = None; + for element in &class_def.body { + let Some(element_type) = get_element_type(element, checker.semantic()) else { + continue; + }; + + // Skip consecutive elements of the same type. It's less noisy to only report + // violations at type boundaries (e.g., avoid raising a violation for _every_ + // field declaration that's out of order). + if prev_element_type == Some(element_type) { + continue; + } + + prev_element_type = Some(element_type); + + if let Some(&prev_element_type) = element_types + .iter() + .find(|&&prev_element_type| prev_element_type > element_type) + { + let diagnostic = Diagnostic::new( + DjangoUnorderedBodyContentInModel { + element_type, + prev_element_type, + }, + element.range(), + ); + checker.diagnostics.push(diagnostic); + } else { + element_types.push(element_type); + } + } +} + #[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)] enum ContentType { FieldDeclaration, @@ -140,53 +183,3 @@ fn get_element_type(element: &Stmt, semantic: &SemanticModel) -> Option None, } } - -/// DJ012 -pub(crate) fn unordered_body_content_in_model( - checker: &mut Checker, - arguments: Option<&Arguments>, - body: &[Stmt], -) { - if !arguments.is_some_and(|arguments| { - arguments - .args - .iter() - .any(|base| helpers::is_model(base, checker.semantic())) - }) { - return; - } - - // Track all the element types we've seen so far. - let mut element_types = Vec::new(); - let mut prev_element_type = None; - for element in body { - let Some(element_type) = get_element_type(element, checker.semantic()) else { - continue; - }; - - // Skip consecutive elements of the same type. It's less noisy to only report - // violations at type boundaries (e.g., avoid raising a violation for _every_ - // field declaration that's out of order). - if prev_element_type == Some(element_type) { - continue; - } - - prev_element_type = Some(element_type); - - if let Some(&prev_element_type) = element_types - .iter() - .find(|&&prev_element_type| prev_element_type > element_type) - { - let diagnostic = Diagnostic::new( - DjangoUnorderedBodyContentInModel { - element_type, - prev_element_type, - }, - element.range(), - ); - checker.diagnostics.push(diagnostic); - } else { - element_types.push(element_type); - } - } -} diff --git a/crates/ruff_linter/src/rules/flake8_django/snapshots/ruff_linter__rules__flake8_django__tests__DJ012_DJ012.py.snap b/crates/ruff_linter/src/rules/flake8_django/snapshots/ruff_linter__rules__flake8_django__tests__DJ012_DJ012.py.snap index 5f15655232..79e5d7e395 100644 --- a/crates/ruff_linter/src/rules/flake8_django/snapshots/ruff_linter__rules__flake8_django__tests__DJ012_DJ012.py.snap +++ b/crates/ruff_linter/src/rules/flake8_django/snapshots/ruff_linter__rules__flake8_django__tests__DJ012_DJ012.py.snap @@ -54,4 +54,12 @@ DJ012.py:129:5: DJ012 Order of model's inner classes, methods, and fields does n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ DJ012 | +DJ012.py:146:5: DJ012 Order of model's inner classes, methods, and fields does not follow the Django Style Guide: field declaration should come before `Meta` class + | +144 | return "foobar" +145 | +146 | first_name = models.CharField(max_length=32) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ DJ012 + | + diff --git a/crates/ruff_linter/src/rules/flake8_errmsg/rules/string_in_exception.rs b/crates/ruff_linter/src/rules/flake8_errmsg/rules/string_in_exception.rs index 173e600789..a7298ea73c 100644 --- a/crates/ruff_linter/src/rules/flake8_errmsg/rules/string_in_exception.rs +++ b/crates/ruff_linter/src/rules/flake8_errmsg/rules/string_in_exception.rs @@ -191,15 +191,13 @@ pub(crate) fn string_in_exception(checker: &mut Checker, stmt: &Stmt, exc: &Expr if let Some(indentation) = whitespace::indentation(checker.locator(), stmt) { - if checker.semantic().is_available("msg") { - diagnostic.set_fix(generate_fix( - stmt, - first, - indentation, - checker.stylist(), - checker.locator(), - )); - } + diagnostic.set_fix(generate_fix( + stmt, + first, + indentation, + checker.stylist(), + checker.locator(), + )); } checker.diagnostics.push(diagnostic); } @@ -211,15 +209,13 @@ pub(crate) fn string_in_exception(checker: &mut Checker, stmt: &Stmt, exc: &Expr let mut diagnostic = Diagnostic::new(FStringInException, first.range()); if let Some(indentation) = whitespace::indentation(checker.locator(), stmt) { - if checker.semantic().is_available("msg") { - diagnostic.set_fix(generate_fix( - stmt, - first, - indentation, - checker.stylist(), - checker.locator(), - )); - } + diagnostic.set_fix(generate_fix( + stmt, + first, + indentation, + checker.stylist(), + checker.locator(), + )); } checker.diagnostics.push(diagnostic); } @@ -236,15 +232,13 @@ pub(crate) fn string_in_exception(checker: &mut Checker, stmt: &Stmt, exc: &Expr if let Some(indentation) = whitespace::indentation(checker.locator(), stmt) { - if checker.semantic().is_available("msg") { - diagnostic.set_fix(generate_fix( - stmt, - first, - indentation, - checker.stylist(), - checker.locator(), - )); - } + diagnostic.set_fix(generate_fix( + stmt, + first, + indentation, + checker.stylist(), + checker.locator(), + )); } checker.diagnostics.push(diagnostic); } diff --git a/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__custom.snap b/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__custom.snap index 92912b35e2..d29e987d49 100644 --- a/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__custom.snap +++ b/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__custom.snap @@ -59,15 +59,26 @@ EM.py:22:24: EM103 [*] Exception must not use a `.format()` string directly, ass 24 25 | 25 26 | def f_ok(): -EM.py:32:24: EM101 Exception must not use a string literal, assign to variable first +EM.py:32:24: EM101 [*] Exception must not use a string literal, assign to variable first | -30 | def f_unfixable(): +30 | def f_msg_defined(): 31 | msg = "hello" 32 | raise RuntimeError("This is an example exception") | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ EM101 | = help: Assign to variable; remove string literal +ℹ Unsafe fix +29 29 | +30 30 | def f_msg_defined(): +31 31 | msg = "hello" +32 |- raise RuntimeError("This is an example exception") + 32 |+ msg = "This is an example exception" + 33 |+ raise RuntimeError(msg) +33 34 | +34 35 | +35 36 | def f_msg_in_nested_scope(): + EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variable first | 37 | msg = "hello" @@ -88,7 +99,7 @@ EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variab 41 42 | 42 43 | def f_msg_in_parent_scope(): -EM.py:46:28: EM101 Exception must not use a string literal, assign to variable first +EM.py:46:28: EM101 [*] Exception must not use a string literal, assign to variable first | 45 | def nested(): 46 | raise RuntimeError("This is an example exception") @@ -96,6 +107,17 @@ EM.py:46:28: EM101 Exception must not use a string literal, assign to variable f | = help: Assign to variable; remove string literal +ℹ Unsafe fix +43 43 | msg = "hello" +44 44 | +45 45 | def nested(): +46 |- raise RuntimeError("This is an example exception") + 46 |+ msg = "This is an example exception" + 47 |+ raise RuntimeError(msg) +47 48 | +48 49 | +49 50 | def f_fix_indentation_check(foo): + EM.py:51:28: EM101 [*] Exception must not use a string literal, assign to variable first | 49 | def f_fix_indentation_check(foo): diff --git a/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__defaults.snap b/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__defaults.snap index a50458f892..593d6b30dc 100644 --- a/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__defaults.snap +++ b/crates/ruff_linter/src/rules/flake8_errmsg/snapshots/ruff_linter__rules__flake8_errmsg__tests__defaults.snap @@ -97,15 +97,26 @@ EM.py:22:24: EM103 [*] Exception must not use a `.format()` string directly, ass 24 25 | 25 26 | def f_ok(): -EM.py:32:24: EM101 Exception must not use a string literal, assign to variable first +EM.py:32:24: EM101 [*] Exception must not use a string literal, assign to variable first | -30 | def f_unfixable(): +30 | def f_msg_defined(): 31 | msg = "hello" 32 | raise RuntimeError("This is an example exception") | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ EM101 | = help: Assign to variable; remove string literal +ℹ Unsafe fix +29 29 | +30 30 | def f_msg_defined(): +31 31 | msg = "hello" +32 |- raise RuntimeError("This is an example exception") + 32 |+ msg = "This is an example exception" + 33 |+ raise RuntimeError(msg) +33 34 | +34 35 | +35 36 | def f_msg_in_nested_scope(): + EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variable first | 37 | msg = "hello" @@ -126,7 +137,7 @@ EM.py:39:24: EM101 [*] Exception must not use a string literal, assign to variab 41 42 | 42 43 | def f_msg_in_parent_scope(): -EM.py:46:28: EM101 Exception must not use a string literal, assign to variable first +EM.py:46:28: EM101 [*] Exception must not use a string literal, assign to variable first | 45 | def nested(): 46 | raise RuntimeError("This is an example exception") @@ -134,6 +145,17 @@ EM.py:46:28: EM101 Exception must not use a string literal, assign to variable f | = help: Assign to variable; remove string literal +ℹ Unsafe fix +43 43 | msg = "hello" +44 44 | +45 45 | def nested(): +46 |- raise RuntimeError("This is an example exception") + 46 |+ msg = "This is an example exception" + 47 |+ raise RuntimeError(msg) +47 48 | +48 49 | +49 50 | def f_fix_indentation_check(foo): + EM.py:51:28: EM101 [*] Exception must not use a string literal, assign to variable first | 49 | def f_fix_indentation_check(foo): diff --git a/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs b/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs index c4625444f7..5f0bf0abb4 100644 --- a/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs +++ b/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs @@ -1,13 +1,16 @@ -use itertools::Itertools; -use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; -use ruff_python_ast::{self as ast, Expr, Keyword}; +use std::hash::BuildHasherDefault; +use itertools::Itertools; +use rustc_hash::FxHashSet; + +use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation}; use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::{self as ast, Expr}; +use ruff_python_stdlib::identifiers::is_identifier; use ruff_text_size::Ranged; -use ruff_python_stdlib::identifiers::is_identifier; - use crate::checkers::ast::Checker; +use crate::fix::edits::{remove_argument, Parentheses}; /// ## What it does /// Checks for unnecessary `dict` kwargs. @@ -40,36 +43,39 @@ use crate::checkers::ast::Checker; #[violation] pub struct UnnecessaryDictKwargs; -impl AlwaysFixableViolation for UnnecessaryDictKwargs { +impl Violation for UnnecessaryDictKwargs { + const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes; + #[derive_message_formats] fn message(&self) -> String { format!("Unnecessary `dict` kwargs") } - fn fix_title(&self) -> String { - format!("Remove unnecessary kwargs") + fn fix_title(&self) -> Option { + Some(format!("Remove unnecessary kwargs")) } } /// PIE804 -pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, expr: &Expr, kwargs: &[Keyword]) { - for kw in kwargs { - // keyword is a spread operator (indicated by None) - if kw.arg.is_some() { +pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, call: &ast::ExprCall) { + let mut duplicate_keywords = None; + for keyword in &call.arguments.keywords { + // keyword is a spread operator (indicated by None). + if keyword.arg.is_some() { continue; } - let Expr::Dict(ast::ExprDict { keys, values, .. }) = &kw.value else { + let Expr::Dict(ast::ExprDict { keys, values, .. }) = &keyword.value else { continue; }; // Ex) `foo(**{**bar})` if matches!(keys.as_slice(), [None]) { - let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, expr.range()); + let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, keyword.range()); diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( format!("**{}", checker.locator().slice(values[0].range())), - kw.range(), + keyword.range(), ))); checker.diagnostics.push(diagnostic); @@ -86,27 +92,77 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &mut Checker, expr: &Expr, kwargs continue; } - let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, expr.range()); + let mut diagnostic = Diagnostic::new(UnnecessaryDictKwargs, keyword.range()); if values.is_empty() { - diagnostic.set_fix(Fix::safe_edit(Edit::deletion(kw.start(), kw.end()))); + diagnostic.try_set_fix(|| { + remove_argument( + keyword, + &call.arguments, + Parentheses::Preserve, + checker.locator().contents(), + ) + .map(Fix::safe_edit) + }); } else { - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - kwargs + // Compute the set of duplicate keywords (lazily). + if duplicate_keywords.is_none() { + duplicate_keywords = Some(duplicates(call)); + } + + // Avoid fixing if doing so could introduce a duplicate keyword argument. + if let Some(duplicate_keywords) = duplicate_keywords.as_ref() { + if kwargs .iter() - .zip(values.iter()) - .map(|(kwarg, value)| { - format!("{}={}", kwarg, checker.locator().slice(value.range())) - }) - .join(", "), - kw.range(), - ))); + .all(|kwarg| !duplicate_keywords.contains(kwarg)) + { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + kwargs + .iter() + .zip(values.iter()) + .map(|(kwarg, value)| { + format!("{}={}", kwarg, checker.locator().slice(value.range())) + }) + .join(", "), + keyword.range(), + ))); + } + } } checker.diagnostics.push(diagnostic); } } +/// Determine the set of keywords that appear in multiple positions (either directly, as in +/// `func(x=1)`, or indirectly, as in `func(**{"x": 1})`). +fn duplicates(call: &ast::ExprCall) -> FxHashSet<&str> { + let mut seen = FxHashSet::with_capacity_and_hasher( + call.arguments.keywords.len(), + BuildHasherDefault::default(), + ); + let mut duplicates = FxHashSet::with_capacity_and_hasher( + call.arguments.keywords.len(), + BuildHasherDefault::default(), + ); + for keyword in &call.arguments.keywords { + if let Some(name) = &keyword.arg { + if !seen.insert(name.as_str()) { + duplicates.insert(name.as_str()); + } + } else if let Expr::Dict(ast::ExprDict { keys, .. }) = &keyword.value { + for key in keys { + if let Some(name) = key.as_ref().and_then(as_kwarg) { + if !seen.insert(name) { + duplicates.insert(name); + } + } + } + } + } + duplicates +} + /// Return `Some` if a key is a valid keyword argument name, or `None` otherwise. fn as_kwarg(key: &Expr) -> Option<&str> { if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = key { diff --git a/crates/ruff_linter/src/rules/flake8_pie/snapshots/ruff_linter__rules__flake8_pie__tests__PIE804_PIE804.py.snap b/crates/ruff_linter/src/rules/flake8_pie/snapshots/ruff_linter__rules__flake8_pie__tests__PIE804_PIE804.py.snap index 450ed048e6..15d993a0be 100644 --- a/crates/ruff_linter/src/rules/flake8_pie/snapshots/ruff_linter__rules__flake8_pie__tests__PIE804_PIE804.py.snap +++ b/crates/ruff_linter/src/rules/flake8_pie/snapshots/ruff_linter__rules__flake8_pie__tests__PIE804_PIE804.py.snap @@ -1,10 +1,10 @@ --- source: crates/ruff_linter/src/rules/flake8_pie/mod.rs --- -PIE804.py:1:1: PIE804 [*] Unnecessary `dict` kwargs +PIE804.py:1:5: PIE804 [*] Unnecessary `dict` kwargs | 1 | foo(**{"bar": True}) # PIE804 - | ^^^^^^^^^^^^^^^^^^^^ PIE804 + | ^^^^^^^^^^^^^^^ PIE804 2 | 3 | foo(**{"r2d2": True}) # PIE804 | @@ -17,12 +17,12 @@ PIE804.py:1:1: PIE804 [*] Unnecessary `dict` kwargs 3 3 | foo(**{"r2d2": True}) # PIE804 4 4 | -PIE804.py:3:1: PIE804 [*] Unnecessary `dict` kwargs +PIE804.py:3:5: PIE804 [*] Unnecessary `dict` kwargs | 1 | foo(**{"bar": True}) # PIE804 2 | 3 | foo(**{"r2d2": True}) # PIE804 - | ^^^^^^^^^^^^^^^^^^^^^ PIE804 + | ^^^^^^^^^^^^^^^^ PIE804 4 | 5 | Foo.objects.create(**{"bar": True}) # PIE804 | @@ -37,12 +37,12 @@ PIE804.py:3:1: PIE804 [*] Unnecessary `dict` kwargs 5 5 | Foo.objects.create(**{"bar": True}) # PIE804 6 6 | -PIE804.py:5:1: PIE804 [*] Unnecessary `dict` kwargs +PIE804.py:5:20: PIE804 [*] Unnecessary `dict` kwargs | 3 | foo(**{"r2d2": True}) # PIE804 4 | 5 | Foo.objects.create(**{"bar": True}) # PIE804 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PIE804 + | ^^^^^^^^^^^^^^^ PIE804 6 | 7 | Foo.objects.create(**{"_id": some_id}) # PIE804 | @@ -58,12 +58,12 @@ PIE804.py:5:1: PIE804 [*] Unnecessary `dict` kwargs 7 7 | Foo.objects.create(**{"_id": some_id}) # PIE804 8 8 | -PIE804.py:7:1: PIE804 [*] Unnecessary `dict` kwargs +PIE804.py:7:20: PIE804 [*] Unnecessary `dict` kwargs | 5 | Foo.objects.create(**{"bar": True}) # PIE804 6 | 7 | Foo.objects.create(**{"_id": some_id}) # PIE804 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PIE804 + | ^^^^^^^^^^^^^^^^^^ PIE804 8 | 9 | Foo.objects.create(**{**bar}) # PIE804 | @@ -79,12 +79,12 @@ PIE804.py:7:1: PIE804 [*] Unnecessary `dict` kwargs 9 9 | Foo.objects.create(**{**bar}) # PIE804 10 10 | -PIE804.py:9:1: PIE804 [*] Unnecessary `dict` kwargs +PIE804.py:9:20: PIE804 [*] Unnecessary `dict` kwargs | 7 | Foo.objects.create(**{"_id": some_id}) # PIE804 8 | 9 | Foo.objects.create(**{**bar}) # PIE804 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PIE804 + | ^^^^^^^^^ PIE804 10 | 11 | foo(**{}) | @@ -100,12 +100,14 @@ PIE804.py:9:1: PIE804 [*] Unnecessary `dict` kwargs 11 11 | foo(**{}) 12 12 | -PIE804.py:11:1: PIE804 [*] Unnecessary `dict` kwargs +PIE804.py:11:5: PIE804 [*] Unnecessary `dict` kwargs | 9 | Foo.objects.create(**{**bar}) # PIE804 10 | 11 | foo(**{}) - | ^^^^^^^^^ PIE804 + | ^^^^ PIE804 +12 | +13 | foo(**{**data, "foo": "buzz"}) | = help: Remove unnecessary kwargs @@ -116,7 +118,71 @@ PIE804.py:11:1: PIE804 [*] Unnecessary `dict` kwargs 11 |-foo(**{}) 11 |+foo() 12 12 | -13 13 | -14 14 | foo(**{**data, "foo": "buzz"}) +13 13 | foo(**{**data, "foo": "buzz"}) +14 14 | foo(**buzz) + +PIE804.py:22:5: PIE804 [*] Unnecessary `dict` kwargs + | +20 | foo(**{f"buzz__{bar}": True}) +21 | abc(**{"for": 3}) +22 | foo(**{},) + | ^^^^ PIE804 +23 | +24 | # Duplicated key names won't be fixed, to avoid syntax errors. + | + = help: Remove unnecessary kwargs + +ℹ Safe fix +19 19 | foo(**{"": True}) +20 20 | foo(**{f"buzz__{bar}": True}) +21 21 | abc(**{"for": 3}) +22 |-foo(**{},) + 22 |+foo() +23 23 | +24 24 | # Duplicated key names won't be fixed, to avoid syntax errors. +25 25 | abc(**{'a': b}, **{'a': c}) # PIE804 + +PIE804.py:25:5: PIE804 Unnecessary `dict` kwargs + | +24 | # Duplicated key names won't be fixed, to avoid syntax errors. +25 | abc(**{'a': b}, **{'a': c}) # PIE804 + | ^^^^^^^^^^ PIE804 +26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804 + | + = help: Remove unnecessary kwargs + +PIE804.py:25:17: PIE804 Unnecessary `dict` kwargs + | +24 | # Duplicated key names won't be fixed, to avoid syntax errors. +25 | abc(**{'a': b}, **{'a': c}) # PIE804 + | ^^^^^^^^^^ PIE804 +26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804 + | + = help: Remove unnecessary kwargs + +PIE804.py:26:10: PIE804 Unnecessary `dict` kwargs + | +24 | # Duplicated key names won't be fixed, to avoid syntax errors. +25 | abc(**{'a': b}, **{'a': c}) # PIE804 +26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804 + | ^^^^^^^^^^ PIE804 + | + = help: Remove unnecessary kwargs + +PIE804.py:26:22: PIE804 [*] Unnecessary `dict` kwargs + | +24 | # Duplicated key names won't be fixed, to avoid syntax errors. +25 | abc(**{'a': b}, **{'a': c}) # PIE804 +26 | abc(a=1, **{'a': c}, **{'b': c}) # PIE804 + | ^^^^^^^^^^ PIE804 + | + = help: Remove unnecessary kwargs + +ℹ Safe fix +23 23 | +24 24 | # Duplicated key names won't be fixed, to avoid syntax errors. +25 25 | abc(**{'a': b}, **{'a': c}) # PIE804 +26 |-abc(a=1, **{'a': c}, **{'b': c}) # PIE804 + 26 |+abc(a=1, **{'a': c}, b=c) # PIE804 diff --git a/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs b/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs index 8cfa0a6f5c..d8866f8390 100644 --- a/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs +++ b/crates/ruff_linter/src/rules/flake8_print/rules/print_call.rs @@ -1,10 +1,10 @@ -use ruff_diagnostics::{Diagnostic, Violation}; +use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation}; use ruff_macros::{derive_message_formats, violation}; - -use ruff_python_ast::{self as ast}; +use ruff_python_ast as ast; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; +use crate::fix::edits::delete_stmt; use crate::registry::AsRule; /// ## What it does @@ -28,14 +28,24 @@ use crate::registry::AsRule; /// def add_numbers(a, b): /// return a + b /// ``` +/// +/// ## Fix safety +/// This rule's fix is marked as unsafe, as it may remove `print` statements +/// that are used beyond debugging purposes. #[violation] pub struct Print; impl Violation for Print { + const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes; + #[derive_message_formats] fn message(&self) -> String { format!("`print` found") } + + fn fix_title(&self) -> Option { + Some("Remove `print`".to_string()) + } } /// ## What it does @@ -65,19 +75,29 @@ impl Violation for Print { /// dict_c = {**dict_a, **dict_b} /// return dict_c /// ``` +/// +/// ## Fix safety +/// This rule's fix is marked as unsafe, as it may remove `pprint` statements +/// that are used beyond debugging purposes. #[violation] pub struct PPrint; impl Violation for PPrint { + const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes; + #[derive_message_formats] fn message(&self) -> String { format!("`pprint` found") } + + fn fix_title(&self) -> Option { + Some("Remove `pprint`".to_string()) + } } /// T201, T203 pub(crate) fn print_call(checker: &mut Checker, call: &ast::ExprCall) { - let diagnostic = { + let mut diagnostic = { let call_path = checker.semantic().resolve_call_path(&call.func); if call_path .as_ref() @@ -113,5 +133,15 @@ pub(crate) fn print_call(checker: &mut Checker, call: &ast::ExprCall) { return; } + // Remove the `print`, if it's a standalone statement. + if checker.semantic().current_expression_parent().is_none() { + let statement = checker.semantic().current_statement(); + let parent = checker.semantic().current_statement_parent(); + let edit = delete_stmt(statement, parent, checker.locator(), checker.indexer()); + diagnostic.set_fix(Fix::unsafe_edit(edit).isolate(Checker::isolation( + checker.semantic().current_statement_parent_id(), + ))); + } + checker.diagnostics.push(diagnostic); } diff --git a/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T201_T201.py.snap b/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T201_T201.py.snap index 97fb0c0676..2cbb575361 100644 --- a/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T201_T201.py.snap +++ b/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T201_T201.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/flake8_print/mod.rs --- -T201.py:4:1: T201 `print` found +T201.py:4:1: T201 [*] `print` found | 2 | import tempfile 3 | @@ -10,8 +10,18 @@ T201.py:4:1: T201 `print` found 5 | print("Hello, world!", file=None) # T201 6 | print("Hello, world!", file=sys.stdout) # T201 | + = help: Remove `print` -T201.py:5:1: T201 `print` found +ℹ Unsafe fix +1 1 | import sys +2 2 | import tempfile +3 3 | +4 |-print("Hello, world!") # T201 +5 4 | print("Hello, world!", file=None) # T201 +6 5 | print("Hello, world!", file=sys.stdout) # T201 +7 6 | print("Hello, world!", file=sys.stderr) # T201 + +T201.py:5:1: T201 [*] `print` found | 4 | print("Hello, world!") # T201 5 | print("Hello, world!", file=None) # T201 @@ -19,8 +29,18 @@ T201.py:5:1: T201 `print` found 6 | print("Hello, world!", file=sys.stdout) # T201 7 | print("Hello, world!", file=sys.stderr) # T201 | + = help: Remove `print` -T201.py:6:1: T201 `print` found +ℹ Unsafe fix +2 2 | import tempfile +3 3 | +4 4 | print("Hello, world!") # T201 +5 |-print("Hello, world!", file=None) # T201 +6 5 | print("Hello, world!", file=sys.stdout) # T201 +7 6 | print("Hello, world!", file=sys.stderr) # T201 +8 7 | + +T201.py:6:1: T201 [*] `print` found | 4 | print("Hello, world!") # T201 5 | print("Hello, world!", file=None) # T201 @@ -28,8 +48,18 @@ T201.py:6:1: T201 `print` found | ^^^^^ T201 7 | print("Hello, world!", file=sys.stderr) # T201 | + = help: Remove `print` -T201.py:7:1: T201 `print` found +ℹ Unsafe fix +3 3 | +4 4 | print("Hello, world!") # T201 +5 5 | print("Hello, world!", file=None) # T201 +6 |-print("Hello, world!", file=sys.stdout) # T201 +7 6 | print("Hello, world!", file=sys.stderr) # T201 +8 7 | +9 8 | with tempfile.NamedTemporaryFile() as fp: + +T201.py:7:1: T201 [*] `print` found | 5 | print("Hello, world!", file=None) # T201 6 | print("Hello, world!", file=sys.stdout) # T201 @@ -38,5 +68,15 @@ T201.py:7:1: T201 `print` found 8 | 9 | with tempfile.NamedTemporaryFile() as fp: | + = help: Remove `print` + +ℹ Unsafe fix +4 4 | print("Hello, world!") # T201 +5 5 | print("Hello, world!", file=None) # T201 +6 6 | print("Hello, world!", file=sys.stdout) # T201 +7 |-print("Hello, world!", file=sys.stderr) # T201 +8 7 | +9 8 | with tempfile.NamedTemporaryFile() as fp: +10 9 | print("Hello, world!", file=fp) # OK diff --git a/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T203_T203.py.snap b/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T203_T203.py.snap index 1fe8d2ea41..f63e108c86 100644 --- a/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T203_T203.py.snap +++ b/crates/ruff_linter/src/rules/flake8_print/snapshots/ruff_linter__rules__flake8_print__tests__T203_T203.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/flake8_print/mod.rs --- -T203.py:3:1: T203 `pprint` found +T203.py:3:1: T203 [*] `pprint` found | 1 | from pprint import pprint 2 | @@ -10,8 +10,17 @@ T203.py:3:1: T203 `pprint` found 4 | 5 | import pprint | + = help: Remove `pprint` -T203.py:7:1: T203 `pprint` found +ℹ Unsafe fix +1 1 | from pprint import pprint +2 2 | +3 |-pprint("Hello, world!") # T203 +4 3 | +5 4 | import pprint +6 5 | + +T203.py:7:1: T203 [*] `pprint` found | 5 | import pprint 6 | @@ -20,5 +29,14 @@ T203.py:7:1: T203 `pprint` found 8 | 9 | pprint.pformat("Hello, world!") | + = help: Remove `pprint` + +ℹ Unsafe fix +4 4 | +5 5 | import pprint +6 6 | +7 |-pprint.pprint("Hello, world!") # T203 +8 7 | +9 8 | pprint.pformat("Hello, world!") diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/string_or_bytes_too_long.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/string_or_bytes_too_long.rs index d8fa801c94..df2b034e82 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/string_or_bytes_too_long.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/string_or_bytes_too_long.rs @@ -1,8 +1,7 @@ -use ruff_python_ast::{self as ast, Expr}; - use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_ast::helpers::is_docstring_stmt; +use ruff_python_ast::{self as ast, StringLike}; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -44,25 +43,27 @@ impl AlwaysFixableViolation for StringOrBytesTooLong { } /// PYI053 -pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, expr: &Expr) { +pub(crate) fn string_or_bytes_too_long(checker: &mut Checker, string: StringLike) { // Ignore docstrings. if is_docstring_stmt(checker.semantic().current_statement()) { return; } - let length = match expr { - Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.chars().count(), - Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => value.len(), - _ => return, + let length = match string { + StringLike::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.chars().count(), + StringLike::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => value.len(), + StringLike::FStringLiteral(ast::FStringLiteralElement { value, .. }) => { + value.chars().count() + } }; if length <= 50 { return; } - let mut diagnostic = Diagnostic::new(StringOrBytesTooLong, expr.range()); + let mut diagnostic = Diagnostic::new(StringOrBytesTooLong, string.range()); diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( "...".to_string(), - expr.range(), + string.range(), ))); checker.diagnostics.push(diagnostic); } diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/type_alias_naming.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/type_alias_naming.rs index e240bd3bd8..4e60317211 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/type_alias_naming.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/type_alias_naming.rs @@ -46,12 +46,16 @@ impl Violation for SnakeCaseTypeAlias { /// /// ## Example /// ```python -/// MyTypeT = int +/// from typing import TypeAlias +/// +/// _MyTypeT: TypeAlias = int /// ``` /// /// Use instead: /// ```python -/// MyType = int +/// from typing import TypeAlias +/// +/// _MyType: TypeAlias = int /// ``` #[violation] pub struct TSuffixedTypeAlias { diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/unnecessary_type_union.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/unnecessary_type_union.rs index 75ba6ab9bc..6d74fffd5b 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/unnecessary_type_union.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/unnecessary_type_union.rs @@ -80,17 +80,24 @@ pub(crate) fn unnecessary_type_union<'a>(checker: &mut Checker, union: &'a Expr) } let mut type_exprs = Vec::new(); + let mut other_exprs = Vec::new(); let mut collect_type_exprs = |expr: &'a Expr, _| { - let Some(subscript) = expr.as_subscript_expr() else { - return; - }; - if checker - .semantic() - .resolve_call_path(subscript.value.as_ref()) - .is_some_and(|call_path| matches!(call_path.as_slice(), ["" | "builtins", "type"])) - { - type_exprs.push(&subscript.slice); + let subscript = expr.as_subscript_expr(); + + if subscript.is_none() { + other_exprs.push(expr); + } else { + let unwrapped = subscript.unwrap(); + if checker + .semantic() + .resolve_call_path(unwrapped.value.as_ref()) + .is_some_and(|call_path| matches!(call_path.as_slice(), ["" | "builtins", "type"])) + { + type_exprs.push(&unwrapped.slice); + } else { + other_exprs.push(expr); + } } }; @@ -113,55 +120,82 @@ pub(crate) fn unnecessary_type_union<'a>(checker: &mut Checker, union: &'a Expr) if checker.semantic().is_builtin("type") { let content = if let Some(subscript) = subscript { - checker - .generator() - .expr(&Expr::Subscript(ast::ExprSubscript { - value: Box::new(Expr::Name(ast::ExprName { - id: "type".into(), - ctx: ExprContext::Load, - range: TextRange::default(), - })), - slice: Box::new(Expr::Subscript(ast::ExprSubscript { - value: subscript.value.clone(), - slice: Box::new(Expr::Tuple(ast::ExprTuple { - elts: type_members - .into_iter() - .map(|type_member| { - Expr::Name(ast::ExprName { - id: type_member, - ctx: ExprContext::Load, - range: TextRange::default(), - }) - }) - .collect(), - ctx: ExprContext::Load, - range: TextRange::default(), - })), - ctx: ExprContext::Load, - range: TextRange::default(), - })), + let types = &Expr::Subscript(ast::ExprSubscript { + value: Box::new(Expr::Name(ast::ExprName { + id: "type".into(), ctx: ExprContext::Load, range: TextRange::default(), - })) - } else { - checker - .generator() - .expr(&Expr::Subscript(ast::ExprSubscript { - value: Box::new(Expr::Name(ast::ExprName { - id: "type".into(), - ctx: ExprContext::Load, - range: TextRange::default(), - })), - slice: Box::new(concatenate_bin_ors( - type_exprs - .clone() + })), + slice: Box::new(Expr::Subscript(ast::ExprSubscript { + value: subscript.value.clone(), + slice: Box::new(Expr::Tuple(ast::ExprTuple { + elts: type_members .into_iter() - .map(std::convert::AsRef::as_ref) + .map(|type_member| { + Expr::Name(ast::ExprName { + id: type_member, + ctx: ExprContext::Load, + range: TextRange::default(), + }) + }) .collect(), - )), + ctx: ExprContext::Load, + range: TextRange::default(), + })), ctx: ExprContext::Load, range: TextRange::default(), - })) + })), + ctx: ExprContext::Load, + range: TextRange::default(), + }); + + if other_exprs.is_empty() { + checker.generator().expr(types) + } else { + let mut exprs = Vec::new(); + exprs.push(types); + exprs.extend(other_exprs); + + let union = Expr::Subscript(ast::ExprSubscript { + value: subscript.value.clone(), + slice: Box::new(Expr::Tuple(ast::ExprTuple { + elts: exprs.into_iter().cloned().collect(), + ctx: ExprContext::Load, + range: TextRange::default(), + })), + ctx: ExprContext::Load, + range: TextRange::default(), + }); + + checker.generator().expr(&union) + } + } else { + let types = &Expr::Subscript(ast::ExprSubscript { + value: Box::new(Expr::Name(ast::ExprName { + id: "type".into(), + ctx: ExprContext::Load, + range: TextRange::default(), + })), + slice: Box::new(concatenate_bin_ors( + type_exprs + .clone() + .into_iter() + .map(std::convert::AsRef::as_ref) + .collect(), + )), + ctx: ExprContext::Load, + range: TextRange::default(), + }); + + if other_exprs.is_empty() { + checker.generator().expr(types) + } else { + let mut exprs = Vec::new(); + exprs.push(types); + exprs.extend(other_exprs); + + checker.generator().expr(&concatenate_bin_ors(exprs)) + } }; diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/unused_private_type_definition.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/unused_private_type_definition.rs index 094d1697c9..5da722a904 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/unused_private_type_definition.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/unused_private_type_definition.rs @@ -7,28 +7,35 @@ use ruff_text_size::Ranged; use crate::checkers::ast::Checker; /// ## What it does -/// Checks for the presence of unused private `TypeVar` declarations. +/// Checks for the presence of unused private `TypeVar`, `ParamSpec` or +/// `TypeVarTuple` declarations. /// /// ## Why is this bad? -/// A private `TypeVar` that is defined but not used is likely a mistake, and +/// A private `TypeVar` that is defined but not used is likely a mistake. It /// should either be used, made public, or removed to avoid confusion. /// /// ## Example /// ```python /// import typing +/// import typing_extensions /// /// _T = typing.TypeVar("_T") +/// _Ts = typing_extensions.TypeVarTuple("_Ts") /// ``` #[violation] pub struct UnusedPrivateTypeVar { - name: String, + type_var_like_name: String, + type_var_like_kind: String, } impl Violation for UnusedPrivateTypeVar { #[derive_message_formats] fn message(&self) -> String { - let UnusedPrivateTypeVar { name } = self; - format!("Private TypeVar `{name}` is never used") + let UnusedPrivateTypeVar { + type_var_like_name, + type_var_like_kind, + } = self; + format!("Private {type_var_like_kind} `{type_var_like_name}` is never used") } } @@ -185,13 +192,26 @@ pub(crate) fn unused_private_type_var( let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() else { continue; }; - if !checker.semantic().match_typing_expr(func, "TypeVar") { + + let semantic = checker.semantic(); + let Some(type_var_like_kind) = semantic.resolve_call_path(func).and_then(|call_path| { + if semantic.match_typing_call_path(&call_path, "TypeVar") { + Some("TypeVar") + } else if semantic.match_typing_call_path(&call_path, "ParamSpec") { + Some("ParamSpec") + } else if semantic.match_typing_call_path(&call_path, "TypeVarTuple") { + Some("TypeVarTuple") + } else { + None + } + }) else { continue; - } + }; diagnostics.push(Diagnostic::new( UnusedPrivateTypeVar { - name: id.to_string(), + type_var_like_name: id.to_string(), + type_var_like_kind: type_var_like_kind.to_string(), }, binding.range(), )); diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.py.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.py.snap index 878d32f43d..9ca737a920 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.py.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.py.snap @@ -1,22 +1,52 @@ --- source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs --- -PYI018.py:4:1: PYI018 Private TypeVar `_T` is never used +PYI018.py:6:1: PYI018 Private TypeVar `_T` is never used | -2 | from typing import TypeVar -3 | -4 | _T = typing.TypeVar("_T") +4 | from typing_extensions import ParamSpec, TypeVarTuple +5 | +6 | _T = typing.TypeVar("_T") | ^^ PYI018 -5 | _P = TypeVar("_P") +7 | _Ts = typing_extensions.TypeVarTuple("_Ts") +8 | _P = ParamSpec("_P") | -PYI018.py:5:1: PYI018 Private TypeVar `_P` is never used +PYI018.py:7:1: PYI018 Private TypeVarTuple `_Ts` is never used | -4 | _T = typing.TypeVar("_T") -5 | _P = TypeVar("_P") - | ^^ PYI018 -6 | -7 | # OK +6 | _T = typing.TypeVar("_T") +7 | _Ts = typing_extensions.TypeVarTuple("_Ts") + | ^^^ PYI018 +8 | _P = ParamSpec("_P") +9 | _P2 = typing.ParamSpec("_P2") | +PYI018.py:8:1: PYI018 Private ParamSpec `_P` is never used + | + 6 | _T = typing.TypeVar("_T") + 7 | _Ts = typing_extensions.TypeVarTuple("_Ts") + 8 | _P = ParamSpec("_P") + | ^^ PYI018 + 9 | _P2 = typing.ParamSpec("_P2") +10 | _Ts2 = TypeVarTuple("_Ts2") + | + +PYI018.py:9:1: PYI018 Private ParamSpec `_P2` is never used + | + 7 | _Ts = typing_extensions.TypeVarTuple("_Ts") + 8 | _P = ParamSpec("_P") + 9 | _P2 = typing.ParamSpec("_P2") + | ^^^ PYI018 +10 | _Ts2 = TypeVarTuple("_Ts2") + | + +PYI018.py:10:1: PYI018 Private TypeVarTuple `_Ts2` is never used + | + 8 | _P = ParamSpec("_P") + 9 | _P2 = typing.ParamSpec("_P2") +10 | _Ts2 = TypeVarTuple("_Ts2") + | ^^^^ PYI018 +11 | +12 | # OK + | + diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.pyi.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.pyi.snap index d82b93d9b1..4585ae25ba 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.pyi.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI018_PYI018.pyi.snap @@ -1,22 +1,52 @@ --- source: crates/ruff_linter/src/rules/flake8_pyi/mod.rs --- -PYI018.pyi:4:1: PYI018 Private TypeVar `_T` is never used +PYI018.pyi:6:1: PYI018 Private TypeVar `_T` is never used | -2 | from typing import TypeVar -3 | -4 | _T = typing.TypeVar("_T") +4 | from typing_extensions import ParamSpec, TypeVarTuple +5 | +6 | _T = typing.TypeVar("_T") | ^^ PYI018 -5 | _P = TypeVar("_P") +7 | _Ts = typing_extensions.TypeVarTuple("_Ts") +8 | _P = ParamSpec("_P") | -PYI018.pyi:5:1: PYI018 Private TypeVar `_P` is never used +PYI018.pyi:7:1: PYI018 Private TypeVarTuple `_Ts` is never used | -4 | _T = typing.TypeVar("_T") -5 | _P = TypeVar("_P") - | ^^ PYI018 -6 | -7 | # OK +6 | _T = typing.TypeVar("_T") +7 | _Ts = typing_extensions.TypeVarTuple("_Ts") + | ^^^ PYI018 +8 | _P = ParamSpec("_P") +9 | _P2 = typing.ParamSpec("_P2") | +PYI018.pyi:8:1: PYI018 Private ParamSpec `_P` is never used + | + 6 | _T = typing.TypeVar("_T") + 7 | _Ts = typing_extensions.TypeVarTuple("_Ts") + 8 | _P = ParamSpec("_P") + | ^^ PYI018 + 9 | _P2 = typing.ParamSpec("_P2") +10 | _Ts2 = TypeVarTuple("_Ts2") + | + +PYI018.pyi:9:1: PYI018 Private ParamSpec `_P2` is never used + | + 7 | _Ts = typing_extensions.TypeVarTuple("_Ts") + 8 | _P = ParamSpec("_P") + 9 | _P2 = typing.ParamSpec("_P2") + | ^^^ PYI018 +10 | _Ts2 = TypeVarTuple("_Ts2") + | + +PYI018.pyi:10:1: PYI018 Private TypeVarTuple `_Ts2` is never used + | + 8 | _P = ParamSpec("_P") + 9 | _P2 = typing.ParamSpec("_P2") +10 | _Ts2 = TypeVarTuple("_Ts2") + | ^^^^ PYI018 +11 | +12 | # OK + | + diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.py.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.py.snap index 1f80b9c9af..539eb12d99 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.py.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.py.snap @@ -29,4 +29,12 @@ PYI042.py:20:1: PYI042 Type alias `_snake_case_alias2` should be CamelCase 21 | Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case | +PYI042.py:27:6: PYI042 Type alias `foo_bar` should be CamelCase + | +26 | # PEP 695 +27 | type foo_bar = int | str + | ^^^^^^^ PYI042 +28 | type FooBar = int | str + | + diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.pyi.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.pyi.snap index ab3c2fe98e..9d1d034c00 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.pyi.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI042_PYI042.pyi.snap @@ -29,4 +29,12 @@ PYI042.pyi:20:1: PYI042 Type alias `_snake_case_alias2` should be CamelCase 21 | Snake_case_alias: TypeAlias = int | float # PYI042, since not camel case | +PYI042.pyi:27:6: PYI042 Type alias `foo_bar` should be CamelCase + | +26 | # PEP 695 +27 | type foo_bar = int | str + | ^^^^^^^ PYI042 +28 | type FooBar = int | str + | + diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.py.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.py.snap index 3dfc9c819b..550ad122d1 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.py.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.py.snap @@ -30,4 +30,12 @@ PYI043.py:12:1: PYI043 Private type alias `_PrivateAliasT3` should not be suffix 14 | ] # PYI043, since this ends in a T | +PYI043.py:26:6: PYI043 Private type alias `_FooT` should not be suffixed with `T` (the `T` suffix implies that an object is a `TypeVar`) + | +25 | # PEP 695 +26 | type _FooT = str | int + | ^^^^^ PYI043 +27 | type Foo = str | int + | + diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.pyi.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.pyi.snap index f856dc86ad..80fb8b4c19 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.pyi.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI043_PYI043.pyi.snap @@ -30,4 +30,12 @@ PYI043.pyi:12:1: PYI043 Private type alias `_PrivateAliasT3` should not be suffi 14 | ] # PYI043, since this ends in a T | +PYI043.pyi:26:6: PYI043 Private type alias `_FooT` should not be suffixed with `T` (the `T` suffix implies that an object is a `TypeVar`) + | +25 | # PEP 695 +26 | type _FooT = str | int + | ^^^^^ PYI043 +27 | type Foo = str | int + | + diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI053_PYI053.pyi.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI053_PYI053.pyi.snap index 2e8fd1a1a3..f0a6ebc905 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI053_PYI053.pyi.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI053_PYI053.pyi.snap @@ -90,7 +90,7 @@ PYI053.pyi:30:14: PYI053 [*] String and bytes literals longer than 50 characters 30 | qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053 31 | -32 | class Demo: +32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK | = help: Replace with `...` @@ -101,7 +101,28 @@ PYI053.pyi:30:14: PYI053 [*] String and bytes literals longer than 50 characters 30 |-qux: bytes = b"51 character byte stringggggggggggggggggggggggggggg\xff" # Error: PYI053 30 |+qux: bytes = ... # Error: PYI053 31 31 | -32 32 | class Demo: -33 33 | """Docstrings are excluded from this rule. Some padding.""" # OK +32 32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK +33 33 | + +PYI053.pyi:34:15: PYI053 [*] String and bytes literals longer than 50 characters are not permitted + | +32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK +33 | +34 | fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI053 +35 | +36 | class Demo: + | + = help: Replace with `...` + +ℹ Safe fix +31 31 | +32 32 | ffoo: str = f"50 character stringggggggggggggggggggggggggggggggg" # OK +33 33 | +34 |-fbar: str = f"51 character stringgggggggggggggggggggggggggggggggg" # Error: PYI053 + 34 |+fbar: str = f"..." # Error: PYI053 +35 35 | +36 36 | class Demo: +37 37 | """Docstrings are excluded from this rule. Some padding.""" # OK diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.py.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.py.snap index bdd2f92937..e93e79a12d 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.py.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.py.snap @@ -54,5 +54,91 @@ PYI055.py:39:8: PYI055 [*] Multiple `type` members in a union. Combine them into 38 38 | # PYI055 39 |- x: Union[type[requests_mock.Mocker], type[httpretty], type[str]] = requests_mock.Mocker 39 |+ x: type[Union[requests_mock.Mocker, httpretty, str]] = requests_mock.Mocker +40 40 | +41 41 | +42 42 | def convert_union(union: UnionType) -> _T | None: + +PYI055.py:44:9: PYI055 [*] Multiple `type` members in a union. Combine them into one, e.g., `type[_T | Converter[_T]]`. + | +42 | def convert_union(union: UnionType) -> _T | None: +43 | converters: tuple[ +44 | type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T], ... # PYI055 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI055 +45 | ] = union.__args__ +46 | ... + | + = help: Combine multiple `type` members + +ℹ Safe fix +41 41 | +42 42 | def convert_union(union: UnionType) -> _T | None: +43 43 | converters: tuple[ +44 |- type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T], ... # PYI055 + 44 |+ type[_T | Converter[_T]] | Converter[_T] | Callable[[str], _T], ... # PYI055 +45 45 | ] = union.__args__ +46 46 | ... +47 47 | + +PYI055.py:50:15: PYI055 [*] Multiple `type` members in a union. Combine them into one, e.g., `type[_T | Converter[_T]]`. + | +48 | def convert_union(union: UnionType) -> _T | None: +49 | converters: tuple[ +50 | Union[type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T]], ... # PYI055 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI055 +51 | ] = union.__args__ +52 | ... + | + = help: Combine multiple `type` members + +ℹ Safe fix +47 47 | +48 48 | def convert_union(union: UnionType) -> _T | None: +49 49 | converters: tuple[ +50 |- Union[type[_T] | type[Converter[_T]] | Converter[_T] | Callable[[str], _T]], ... # PYI055 + 50 |+ Union[type[_T | Converter[_T]] | Converter[_T] | Callable[[str], _T]], ... # PYI055 +51 51 | ] = union.__args__ +52 52 | ... +53 53 | + +PYI055.py:56:15: PYI055 [*] Multiple `type` members in a union. Combine them into one, e.g., `type[_T | Converter[_T]]`. + | +54 | def convert_union(union: UnionType) -> _T | None: +55 | converters: tuple[ +56 | Union[type[_T] | type[Converter[_T]]] | Converter[_T] | Callable[[str], _T], ... # PYI055 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI055 +57 | ] = union.__args__ +58 | ... + | + = help: Combine multiple `type` members + +ℹ Safe fix +53 53 | +54 54 | def convert_union(union: UnionType) -> _T | None: +55 55 | converters: tuple[ +56 |- Union[type[_T] | type[Converter[_T]]] | Converter[_T] | Callable[[str], _T], ... # PYI055 + 56 |+ Union[type[_T | Converter[_T]]] | Converter[_T] | Callable[[str], _T], ... # PYI055 +57 57 | ] = union.__args__ +58 58 | ... +59 59 | + +PYI055.py:62:15: PYI055 [*] Multiple `type` members in a union. Combine them into one, e.g., `type[_T | Converter[_T]]`. + | +60 | def convert_union(union: UnionType) -> _T | None: +61 | converters: tuple[ +62 | Union[type[_T] | type[Converter[_T]] | str] | Converter[_T] | Callable[[str], _T], ... # PYI055 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ PYI055 +63 | ] = union.__args__ +64 | ... + | + = help: Combine multiple `type` members + +ℹ Safe fix +59 59 | +60 60 | def convert_union(union: UnionType) -> _T | None: +61 61 | converters: tuple[ +62 |- Union[type[_T] | type[Converter[_T]] | str] | Converter[_T] | Callable[[str], _T], ... # PYI055 + 62 |+ Union[type[_T | Converter[_T]] | str] | Converter[_T] | Callable[[str], _T], ... # PYI055 +63 63 | ] = union.__args__ +64 64 | ... diff --git a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.pyi.snap b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.pyi.snap index b845e5269a..0e41288be5 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.pyi.snap +++ b/crates/ruff_linter/src/rules/flake8_pyi/snapshots/ruff_linter__rules__flake8_pyi__tests__PYI055_PYI055.pyi.snap @@ -120,7 +120,7 @@ PYI055.pyi:10:15: PYI055 [*] Multiple `type` members in a union. Combine them in 8 8 | z: Union[type[float, int], type[complex]] 9 9 | 10 |-def func(arg: type[int] | str | type[float]) -> None: ... - 10 |+def func(arg: type[int | float]) -> None: ... + 10 |+def func(arg: type[int | float] | str) -> None: ... 11 11 | 12 12 | # OK 13 13 | x: type[int, str, float] diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs index 80a6181832..dde7f26928 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs @@ -33,7 +33,7 @@ use super::unittest_assert::UnittestAssert; /// Checks for assertions that combine multiple independent conditions. /// /// ## Why is this bad? -/// Composite assertion statements are harder debug upon failure, as the +/// Composite assertion statements are harder to debug upon failure, as the /// failure message will not indicate which condition failed. /// /// ## Example diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/helpers.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/helpers.rs index 34303d204c..9ac1291353 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/helpers.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/helpers.rs @@ -56,17 +56,27 @@ pub(super) fn is_empty_or_null_string(expr: &Expr) -> bool { Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.is_empty(), Expr::NoneLiteral(_) => true, Expr::FString(ast::ExprFString { value, .. }) => { - value.parts().all(|f_string_part| match f_string_part { + value.iter().all(|f_string_part| match f_string_part { ast::FStringPart::Literal(literal) => literal.is_empty(), - ast::FStringPart::FString(f_string) => { - f_string.values.iter().all(is_empty_or_null_string) - } + ast::FStringPart::FString(f_string) => f_string + .elements + .iter() + .all(is_empty_or_null_fstring_element), }) } _ => false, } } +fn is_empty_or_null_fstring_element(element: &ast::FStringElement) -> bool { + match element { + ast::FStringElement::Literal(ast::FStringLiteralElement { value, .. }) => value.is_empty(), + ast::FStringElement::Expression(ast::FStringExpressionElement { expression, .. }) => { + is_empty_or_null_string(expression) + } + } +} + pub(super) fn split_names(names: &str) -> Vec<&str> { // Match the following pytest code: // [x.strip() for x in argnames.split(",") if x.strip()] diff --git a/crates/ruff_linter/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs b/crates/ruff_linter/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs index 4a0bffc74b..a056bbc755 100644 --- a/crates/ruff_linter/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs +++ b/crates/ruff_linter/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs @@ -78,9 +78,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr: // `ctypes.WinError()` is a function, not a class. It's part of the standard library, so // we might as well get it right. - if exception_type - .as_ref() - .is_some_and(ExceptionType::is_builtin) + if exception_type.is_none() && checker .semantic() .resolve_call_path(func) diff --git a/crates/ruff_linter/src/rules/flake8_raise/snapshots/ruff_linter__rules__flake8_raise__tests__unnecessary-paren-on-raise-exception_RSE102.py.snap b/crates/ruff_linter/src/rules/flake8_raise/snapshots/ruff_linter__rules__flake8_raise__tests__unnecessary-paren-on-raise-exception_RSE102.py.snap index 37b632cc25..d1d89829c6 100644 --- a/crates/ruff_linter/src/rules/flake8_raise/snapshots/ruff_linter__rules__flake8_raise__tests__unnecessary-paren-on-raise-exception_RSE102.py.snap +++ b/crates/ruff_linter/src/rules/flake8_raise/snapshots/ruff_linter__rules__flake8_raise__tests__unnecessary-paren-on-raise-exception_RSE102.py.snap @@ -266,6 +266,8 @@ RSE102.py:84:10: RSE102 [*] Unnecessary parentheses on raised exception 83 | # RSE102 84 | raise Foo() | ^^ RSE102 +85 | +86 | # OK | = help: Remove unnecessary parentheses @@ -275,5 +277,8 @@ RSE102.py:84:10: RSE102 [*] Unnecessary parentheses on raised exception 83 83 | # RSE102 84 |-raise Foo() 84 |+raise Foo +85 85 | +86 86 | # OK +87 87 | raise ctypes.WinError() diff --git a/crates/ruff_linter/src/rules/flake8_self/rules/private_member_access.rs b/crates/ruff_linter/src/rules/flake8_self/rules/private_member_access.rs index 543c4d36d7..fdbf3e8def 100644 --- a/crates/ruff_linter/src/rules/flake8_self/rules/private_member_access.rs +++ b/crates/ruff_linter/src/rules/flake8_self/rules/private_member_access.rs @@ -66,6 +66,10 @@ pub(crate) fn private_member_access(checker: &mut Checker, expr: &Expr) { return; }; + if checker.semantic().in_annotation() { + return; + } + if (attr.starts_with("__") && !attr.ends_with("__")) || (attr.starts_with('_') && !attr.starts_with("__")) { diff --git a/crates/ruff_linter/src/rules/flake8_trio/rules/zero_sleep_call.rs b/crates/ruff_linter/src/rules/flake8_trio/rules/zero_sleep_call.rs index 6b0e57569c..57caec4eec 100644 --- a/crates/ruff_linter/src/rules/flake8_trio/rules/zero_sleep_call.rs +++ b/crates/ruff_linter/src/rules/flake8_trio/rules/zero_sleep_call.rs @@ -1,7 +1,7 @@ use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::Stmt; use ruff_python_ast::{self as ast, Expr, ExprCall, Int}; +use ruff_python_semantic::analyze::typing::find_assigned_value; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -71,30 +71,15 @@ pub(crate) fn zero_sleep_call(checker: &mut Checker, call: &ExprCall) { } } Expr::Name(ast::ExprName { id, .. }) => { - let scope = checker.semantic().current_scope(); - if let Some(binding_id) = scope.get(id) { - let binding = checker.semantic().binding(binding_id); - if binding.kind.is_assignment() || binding.kind.is_named_expr_assignment() { - if let Some(parent_id) = binding.source { - let parent = checker.semantic().statement(parent_id); - if let Stmt::Assign(ast::StmtAssign { value, .. }) - | Stmt::AnnAssign(ast::StmtAnnAssign { - value: Some(value), .. - }) - | Stmt::AugAssign(ast::StmtAugAssign { value, .. }) = parent - { - let Expr::NumberLiteral(ast::ExprNumberLiteral { value: num, .. }) = - value.as_ref() - else { - return; - }; - let Some(int) = num.as_int() else { return }; - if *int != Int::ZERO { - return; - } - } - } - } + let Some(value) = find_assigned_value(id, checker.semantic()) else { + return; + }; + let Expr::NumberLiteral(ast::ExprNumberLiteral { value: num, .. }) = value else { + return; + }; + let Some(int) = num.as_int() else { return }; + if *int != Int::ZERO { + return; } } _ => return, diff --git a/crates/ruff_linter/src/rules/flake8_trio/snapshots/ruff_linter__rules__flake8_trio__tests__TRIO115_TRIO115.py.snap b/crates/ruff_linter/src/rules/flake8_trio/snapshots/ruff_linter__rules__flake8_trio__tests__TRIO115_TRIO115.py.snap index 0dfeef7c65..7710be9285 100644 --- a/crates/ruff_linter/src/rules/flake8_trio/snapshots/ruff_linter__rules__flake8_trio__tests__TRIO115_TRIO115.py.snap +++ b/crates/ruff_linter/src/rules/flake8_trio/snapshots/ruff_linter__rules__flake8_trio__tests__TRIO115_TRIO115.py.snap @@ -85,51 +85,227 @@ TRIO115.py:17:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.s 19 19 | bar = "bar" 20 20 | trio.sleep(bar) -TRIO115.py:31:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` +TRIO115.py:23:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` | -30 | def func(): -31 | sleep(0) # TRIO115 +22 | x, y = 0, 2000 +23 | trio.sleep(x) # TRIO115 + | ^^^^^^^^^^^^^ TRIO115 +24 | trio.sleep(y) # OK + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +20 20 | trio.sleep(bar) +21 21 | +22 22 | x, y = 0, 2000 +23 |- trio.sleep(x) # TRIO115 + 23 |+ trio.lowlevel.checkpoint() # TRIO115 +24 24 | trio.sleep(y) # OK +25 25 | +26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0])) + +TRIO115.py:27:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0])) +27 | trio.sleep(c) # TRIO115 + | ^^^^^^^^^^^^^ TRIO115 +28 | trio.sleep(d) # OK +29 | trio.sleep(e) # TRIO115 + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +24 24 | trio.sleep(y) # OK +25 25 | +26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0])) +27 |- trio.sleep(c) # TRIO115 + 27 |+ trio.lowlevel.checkpoint() # TRIO115 +28 28 | trio.sleep(d) # OK +29 29 | trio.sleep(e) # TRIO115 +30 30 | + +TRIO115.py:29:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +27 | trio.sleep(c) # TRIO115 +28 | trio.sleep(d) # OK +29 | trio.sleep(e) # TRIO115 + | ^^^^^^^^^^^^^ TRIO115 +30 | +31 | m_x, m_y = 0 + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +26 26 | (a, b, [c, (d, e)]) = (1, 2, (0, [4, 0])) +27 27 | trio.sleep(c) # TRIO115 +28 28 | trio.sleep(d) # OK +29 |- trio.sleep(e) # TRIO115 + 29 |+ trio.lowlevel.checkpoint() # TRIO115 +30 30 | +31 31 | m_x, m_y = 0 +32 32 | trio.sleep(m_y) # OK + +TRIO115.py:36:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +35 | m_a = m_b = 0 +36 | trio.sleep(m_a) # TRIO115 + | ^^^^^^^^^^^^^^^ TRIO115 +37 | trio.sleep(m_b) # TRIO115 + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +33 33 | trio.sleep(m_x) # OK +34 34 | +35 35 | m_a = m_b = 0 +36 |- trio.sleep(m_a) # TRIO115 + 36 |+ trio.lowlevel.checkpoint() # TRIO115 +37 37 | trio.sleep(m_b) # TRIO115 +38 38 | +39 39 | m_c = (m_d, m_e) = (0, 0) + +TRIO115.py:37:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +35 | m_a = m_b = 0 +36 | trio.sleep(m_a) # TRIO115 +37 | trio.sleep(m_b) # TRIO115 + | ^^^^^^^^^^^^^^^ TRIO115 +38 | +39 | m_c = (m_d, m_e) = (0, 0) + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +34 34 | +35 35 | m_a = m_b = 0 +36 36 | trio.sleep(m_a) # TRIO115 +37 |- trio.sleep(m_b) # TRIO115 + 37 |+ trio.lowlevel.checkpoint() # TRIO115 +38 38 | +39 39 | m_c = (m_d, m_e) = (0, 0) +40 40 | trio.sleep(m_c) # OK + +TRIO115.py:41:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +39 | m_c = (m_d, m_e) = (0, 0) +40 | trio.sleep(m_c) # OK +41 | trio.sleep(m_d) # TRIO115 + | ^^^^^^^^^^^^^^^ TRIO115 +42 | trio.sleep(m_e) # TRIO115 + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +38 38 | +39 39 | m_c = (m_d, m_e) = (0, 0) +40 40 | trio.sleep(m_c) # OK +41 |- trio.sleep(m_d) # TRIO115 + 41 |+ trio.lowlevel.checkpoint() # TRIO115 +42 42 | trio.sleep(m_e) # TRIO115 +43 43 | +44 44 | + +TRIO115.py:42:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +40 | trio.sleep(m_c) # OK +41 | trio.sleep(m_d) # TRIO115 +42 | trio.sleep(m_e) # TRIO115 + | ^^^^^^^^^^^^^^^ TRIO115 + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +39 39 | m_c = (m_d, m_e) = (0, 0) +40 40 | trio.sleep(m_c) # OK +41 41 | trio.sleep(m_d) # TRIO115 +42 |- trio.sleep(m_e) # TRIO115 + 42 |+ trio.lowlevel.checkpoint() # TRIO115 +43 43 | +44 44 | +45 45 | def func(): + +TRIO115.py:48:14: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +46 | import trio +47 | +48 | trio.run(trio.sleep(0)) # TRIO115 + | ^^^^^^^^^^^^^ TRIO115 + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +45 45 | def func(): +46 46 | import trio +47 47 | +48 |- trio.run(trio.sleep(0)) # TRIO115 + 48 |+ trio.run(trio.lowlevel.checkpoint()) # TRIO115 +49 49 | +50 50 | +51 51 | from trio import Event, sleep + +TRIO115.py:55:5: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +54 | def func(): +55 | sleep(0) # TRIO115 | ^^^^^^^^ TRIO115 | = help: Replace with `trio.lowlevel.checkpoint()` ℹ Safe fix -24 24 | trio.run(trio.sleep(0)) # TRIO115 -25 25 | -26 26 | -27 |-from trio import Event, sleep - 27 |+from trio import Event, sleep, lowlevel -28 28 | -29 29 | -30 30 | def func(): -31 |- sleep(0) # TRIO115 - 31 |+ lowlevel.checkpoint() # TRIO115 -32 32 | -33 33 | -34 34 | async def func(): +48 48 | trio.run(trio.sleep(0)) # TRIO115 +49 49 | +50 50 | +51 |-from trio import Event, sleep + 51 |+from trio import Event, sleep, lowlevel +52 52 | +53 53 | +54 54 | def func(): +55 |- sleep(0) # TRIO115 + 55 |+ lowlevel.checkpoint() # TRIO115 +56 56 | +57 57 | +58 58 | async def func(): -TRIO115.py:35:11: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` +TRIO115.py:59:11: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` | -34 | async def func(): -35 | await sleep(seconds=0) # TRIO115 +58 | async def func(): +59 | await sleep(seconds=0) # TRIO115 | ^^^^^^^^^^^^^^^^ TRIO115 | = help: Replace with `trio.lowlevel.checkpoint()` ℹ Safe fix -24 24 | trio.run(trio.sleep(0)) # TRIO115 -25 25 | -26 26 | -27 |-from trio import Event, sleep - 27 |+from trio import Event, sleep, lowlevel -28 28 | -29 29 | -30 30 | def func(): +48 48 | trio.run(trio.sleep(0)) # TRIO115 +49 49 | +50 50 | +51 |-from trio import Event, sleep + 51 |+from trio import Event, sleep, lowlevel +52 52 | +53 53 | +54 54 | def func(): -------------------------------------------------------------------------------- -32 32 | -33 33 | -34 34 | async def func(): -35 |- await sleep(seconds=0) # TRIO115 - 35 |+ await lowlevel.checkpoint() # TRIO115 +56 56 | +57 57 | +58 58 | async def func(): +59 |- await sleep(seconds=0) # TRIO115 + 59 |+ await lowlevel.checkpoint() # TRIO115 +60 60 | +61 61 | +62 62 | def func(): + +TRIO115.py:66:9: TRIO115 [*] Use `trio.lowlevel.checkpoint()` instead of `trio.sleep(0)` + | +65 | if (walrus := 0) == 0: +66 | trio.sleep(walrus) # TRIO115 + | ^^^^^^^^^^^^^^^^^^ TRIO115 + | + = help: Replace with `trio.lowlevel.checkpoint()` + +ℹ Safe fix +63 63 | import trio +64 64 | +65 65 | if (walrus := 0) == 0: +66 |- trio.sleep(walrus) # TRIO115 + 66 |+ trio.lowlevel.checkpoint() # TRIO115 diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/helpers.rs b/crates/ruff_linter/src/rules/flake8_type_checking/helpers.rs index 0a51e151f4..47ce35214b 100644 --- a/crates/ruff_linter/src/rules/flake8_type_checking/helpers.rs +++ b/crates/ruff_linter/src/rules/flake8_type_checking/helpers.rs @@ -1,10 +1,34 @@ -use ruff_python_ast::call_path::from_qualified_name; -use ruff_python_ast::helpers::{map_callable, map_subscript}; -use ruff_python_ast::{self as ast, Expr}; -use ruff_python_semantic::{Binding, BindingId, BindingKind, SemanticModel}; -use rustc_hash::FxHashSet; +use anyhow::Result; -pub(crate) fn is_valid_runtime_import(binding: &Binding, semantic: &SemanticModel) -> bool { +use ruff_diagnostics::Edit; +use ruff_python_ast::call_path::from_qualified_name; +use ruff_python_ast::helpers::map_callable; +use ruff_python_ast::{self as ast, Expr}; +use ruff_python_codegen::{Generator, Stylist}; +use ruff_python_semantic::{ + analyze, Binding, BindingKind, NodeId, ResolvedReference, SemanticModel, +}; +use ruff_source_file::Locator; +use ruff_text_size::Ranged; + +use crate::rules::flake8_type_checking::settings::Settings; + +/// Returns `true` if the [`ResolvedReference`] is in a typing-only context _or_ a runtime-evaluated +/// context (with quoting enabled). +pub(crate) fn is_typing_reference(reference: &ResolvedReference, settings: &Settings) -> bool { + reference.in_type_checking_block() + || reference.in_typing_only_annotation() + || reference.in_complex_string_type_definition() + || reference.in_simple_string_type_definition() + || (settings.quote_annotations && reference.in_runtime_evaluated_annotation()) +} + +/// Returns `true` if the [`Binding`] represents a runtime-required import. +pub(crate) fn is_valid_runtime_import( + binding: &Binding, + semantic: &SemanticModel, + settings: &Settings, +) -> bool { if matches!( binding.kind, BindingKind::Import(..) | BindingKind::FromImport(..) | BindingKind::SubmoduleImport(..) @@ -12,81 +36,42 @@ pub(crate) fn is_valid_runtime_import(binding: &Binding, semantic: &SemanticMode binding.context.is_runtime() && binding .references() - .any(|reference_id| semantic.reference(reference_id).context().is_runtime()) + .map(|reference_id| semantic.reference(reference_id)) + .any(|reference| !is_typing_reference(reference, settings)) } else { false } } -pub(crate) fn runtime_evaluated_class( +pub(crate) fn runtime_required_class( class_def: &ast::StmtClassDef, base_classes: &[String], decorators: &[String], semantic: &SemanticModel, ) -> bool { - if runtime_evaluated_base_class(class_def, base_classes, semantic) { + if runtime_required_base_class(class_def, base_classes, semantic) { return true; } - if runtime_evaluated_decorators(class_def, decorators, semantic) { + if runtime_required_decorators(class_def, decorators, semantic) { return true; } false } -fn runtime_evaluated_base_class( +/// Return `true` if a class is a subclass of a runtime-required base class. +fn runtime_required_base_class( class_def: &ast::StmtClassDef, base_classes: &[String], semantic: &SemanticModel, ) -> bool { - fn inner( - class_def: &ast::StmtClassDef, - base_classes: &[String], - semantic: &SemanticModel, - seen: &mut FxHashSet, - ) -> bool { - class_def.bases().iter().any(|expr| { - // If the base class is itself runtime-evaluated, then this is too. - // Ex) `class Foo(BaseModel): ...` - if semantic - .resolve_call_path(map_subscript(expr)) - .is_some_and(|call_path| { - base_classes - .iter() - .any(|base_class| from_qualified_name(base_class) == call_path) - }) - { - return true; - } - - // If the base class extends a runtime-evaluated class, then this does too. - // Ex) `class Bar(BaseModel): ...; class Foo(Bar): ...` - if let Some(id) = semantic.lookup_attribute(map_subscript(expr)) { - if seen.insert(id) { - let binding = semantic.binding(id); - if let Some(base_class) = binding - .kind - .as_class_definition() - .map(|id| &semantic.scopes[*id]) - .and_then(|scope| scope.kind.as_class()) - { - if inner(base_class, base_classes, semantic, seen) { - return true; - } - } - } - } - false - }) - } - - if base_classes.is_empty() { - return false; - } - - inner(class_def, base_classes, semantic, &mut FxHashSet::default()) + analyze::class::any_over_body(class_def, semantic, &|call_path| { + base_classes + .iter() + .any(|base_class| from_qualified_name(base_class) == call_path) + }) } -fn runtime_evaluated_decorators( +fn runtime_required_decorators( class_def: &ast::StmtClassDef, decorators: &[String], semantic: &SemanticModel, @@ -174,3 +159,93 @@ pub(crate) fn is_singledispatch_implementation( is_singledispatch_interface(function_def, semantic) }) } + +/// Wrap a type annotation in quotes. +/// +/// This requires more than just wrapping the reference itself in quotes. For example: +/// - When quoting `Series` in `Series[pd.Timestamp]`, we want `"Series[pd.Timestamp]"`. +/// - When quoting `kubernetes` in `kubernetes.SecurityContext`, we want `"kubernetes.SecurityContext"`. +/// - When quoting `Series` in `Series["pd.Timestamp"]`, we want `"Series[pd.Timestamp]"`. (This is currently unsupported.) +/// - When quoting `Series` in `Series[Literal["pd.Timestamp"]]`, we want `"Series[Literal['pd.Timestamp']]"`. (This is currently unsupported.) +/// +/// In general, when expanding a component of a call chain, we want to quote the entire call chain. +pub(crate) fn quote_annotation( + node_id: NodeId, + semantic: &SemanticModel, + locator: &Locator, + stylist: &Stylist, + generator: Generator, +) -> Result { + let expr = semantic.expression(node_id).expect("Expression not found"); + if let Some(parent_id) = semantic.parent_expression_id(node_id) { + match semantic.expression(parent_id) { + Some(Expr::Subscript(parent)) => { + if expr == parent.value.as_ref() { + // If we're quoting the value of a subscript, we need to quote the entire + // expression. For example, when quoting `DataFrame` in `DataFrame[int]`, we + // should generate `"DataFrame[int]"`. + return quote_annotation(parent_id, semantic, locator, stylist, generator); + } + } + Some(Expr::Attribute(parent)) => { + if expr == parent.value.as_ref() { + // If we're quoting the value of an attribute, we need to quote the entire + // expression. For example, when quoting `DataFrame` in `pd.DataFrame`, we + // should generate `"pd.DataFrame"`. + return quote_annotation(parent_id, semantic, locator, stylist, generator); + } + } + Some(Expr::Call(parent)) => { + if expr == parent.func.as_ref() { + // If we're quoting the function of a call, we need to quote the entire + // expression. For example, when quoting `DataFrame` in `DataFrame()`, we + // should generate `"DataFrame()"`. + return quote_annotation(parent_id, semantic, locator, stylist, generator); + } + } + Some(Expr::BinOp(parent)) => { + if parent.op.is_bit_or() { + // If we're quoting the left or right side of a binary operation, we need to + // quote the entire expression. For example, when quoting `DataFrame` in + // `DataFrame | Series`, we should generate `"DataFrame | Series"`. + return quote_annotation(parent_id, semantic, locator, stylist, generator); + } + } + _ => {} + } + } + + // If the annotation already contains a quote, avoid attempting to re-quote it. For example: + // ```python + // from typing import Literal + // + // Set[Literal["Foo"]] + // ``` + let text = locator.slice(expr); + if text.contains('\'') || text.contains('"') { + return Err(anyhow::anyhow!("Annotation already contains a quote")); + } + + // Quote the entire expression. + let quote = stylist.quote(); + let annotation = generator.expr(expr); + + Ok(Edit::range_replacement( + format!("{quote}{annotation}{quote}"), + expr.range(), + )) +} + +/// Filter out any [`Edit`]s that are completely contained by any other [`Edit`]. +pub(crate) fn filter_contained(edits: Vec) -> Vec { + let mut filtered: Vec = Vec::with_capacity(edits.len()); + for edit in edits { + if filtered + .iter() + .all(|filtered_edit| !filtered_edit.range().contains_range(edit.range())) + { + filtered.push(edit); + } + } + filtered +} diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/imports.rs b/crates/ruff_linter/src/rules/flake8_type_checking/imports.rs new file mode 100644 index 0000000000..92c65cf275 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/imports.rs @@ -0,0 +1,22 @@ +use ruff_python_semantic::{AnyImport, Binding, ResolvedReferenceId}; +use ruff_text_size::{Ranged, TextRange}; + +/// An import with its surrounding context. +pub(crate) struct ImportBinding<'a> { + /// The qualified name of the import (e.g., `typing.List` for `from typing import List`). + pub(crate) import: AnyImport<'a>, + /// The binding for the imported symbol. + pub(crate) binding: &'a Binding<'a>, + /// The first reference to the imported symbol. + pub(crate) reference_id: ResolvedReferenceId, + /// The trimmed range of the import (e.g., `List` in `from typing import List`). + pub(crate) range: TextRange, + /// The range of the import's parent statement. + pub(crate) parent_range: Option, +} + +impl Ranged for ImportBinding<'_> { + fn range(&self) -> TextRange { + self.range + } +} diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/mod.rs b/crates/ruff_linter/src/rules/flake8_type_checking/mod.rs index 82b24755f4..2390486e4f 100644 --- a/crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +++ b/crates/ruff_linter/src/rules/flake8_type_checking/mod.rs @@ -1,5 +1,6 @@ //! Rules from [flake8-type-checking](https://pypi.org/project/flake8-type-checking/). pub(crate) mod helpers; +mod imports; pub(crate) mod rules; pub mod settings; @@ -33,10 +34,14 @@ mod tests { #[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TCH004_7.py"))] #[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TCH004_8.py"))] #[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("TCH004_9.py"))] + #[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("quote.py"))] + #[test_case(Rule::RuntimeStringUnion, Path::new("TCH006_1.py"))] + #[test_case(Rule::RuntimeStringUnion, Path::new("TCH006_2.py"))] #[test_case(Rule::TypingOnlyFirstPartyImport, Path::new("TCH001.py"))] #[test_case(Rule::TypingOnlyStandardLibraryImport, Path::new("TCH003.py"))] #[test_case(Rule::TypingOnlyStandardLibraryImport, Path::new("snapshot.py"))] #[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("TCH002.py"))] + #[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("quote.py"))] #[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("singledispatch.py"))] #[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("strict.py"))] #[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("typing_modules_1.py"))] @@ -51,6 +56,24 @@ mod tests { Ok(()) } + #[test_case(Rule::RuntimeImportInTypeCheckingBlock, Path::new("quote.py"))] + #[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("quote.py"))] + fn quote(rule_code: Rule, path: &Path) -> Result<()> { + let snapshot = format!("quote_{}_{}", rule_code.as_ref(), path.to_string_lossy()); + let diagnostics = test_path( + Path::new("flake8_type_checking").join(path).as_path(), + &settings::LinterSettings { + flake8_type_checking: super::settings::Settings { + quote_annotations: true, + ..Default::default() + }, + ..settings::LinterSettings::for_rule(rule_code) + }, + )?; + assert_messages!(snapshot, diagnostics); + Ok(()) + } + #[test_case(Rule::TypingOnlyThirdPartyImport, Path::new("strict.py"))] fn strict(rule_code: Rule, path: &Path) -> Result<()> { let diagnostics = test_path( @@ -83,6 +106,35 @@ mod tests { Ok(()) } + #[test_case( + Rule::TypingOnlyStandardLibraryImport, + Path::new("exempt_type_checking_1.py") + )] + #[test_case( + Rule::TypingOnlyStandardLibraryImport, + Path::new("exempt_type_checking_2.py") + )] + #[test_case( + Rule::TypingOnlyStandardLibraryImport, + Path::new("exempt_type_checking_3.py") + )] + fn exempt_type_checking(rule_code: Rule, path: &Path) -> Result<()> { + let snapshot = format!("{}_{}", rule_code.as_ref(), path.to_string_lossy()); + let diagnostics = test_path( + Path::new("flake8_type_checking").join(path).as_path(), + &settings::LinterSettings { + flake8_type_checking: super::settings::Settings { + exempt_modules: vec![], + strict: true, + ..Default::default() + }, + ..settings::LinterSettings::for_rule(rule_code) + }, + )?; + assert_messages!(snapshot, diagnostics); + Ok(()) + } + #[test_case( Rule::RuntimeImportInTypeCheckingBlock, Path::new("runtime_evaluated_base_classes_1.py") @@ -109,7 +161,7 @@ mod tests { Path::new("flake8_type_checking").join(path).as_path(), &settings::LinterSettings { flake8_type_checking: super::settings::Settings { - runtime_evaluated_base_classes: vec![ + runtime_required_base_classes: vec![ "pydantic.BaseModel".to_string(), "sqlalchemy.orm.DeclarativeBase".to_string(), ], @@ -140,7 +192,7 @@ mod tests { Path::new("flake8_type_checking").join(path).as_path(), &settings::LinterSettings { flake8_type_checking: super::settings::Settings { - runtime_evaluated_decorators: vec![ + runtime_required_decorators: vec![ "attrs.define".to_string(), "attrs.frozen".to_string(), ], @@ -165,7 +217,7 @@ mod tests { Path::new("flake8_type_checking").join(path).as_path(), &settings::LinterSettings { flake8_type_checking: super::settings::Settings { - runtime_evaluated_base_classes: vec!["module.direct.MyBaseClass".to_string()], + runtime_required_base_classes: vec!["module.direct.MyBaseClass".to_string()], ..Default::default() }, ..settings::LinterSettings::for_rule(rule_code) diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/rules/mod.rs b/crates/ruff_linter/src/rules/flake8_type_checking/rules/mod.rs index 15ceb3ddf1..1f94e927c4 100644 --- a/crates/ruff_linter/src/rules/flake8_type_checking/rules/mod.rs +++ b/crates/ruff_linter/src/rules/flake8_type_checking/rules/mod.rs @@ -1,7 +1,9 @@ pub(crate) use empty_type_checking_block::*; pub(crate) use runtime_import_in_type_checking_block::*; +pub(crate) use runtime_string_union::*; pub(crate) use typing_only_runtime_import::*; mod empty_type_checking_block; mod runtime_import_in_type_checking_block; +mod runtime_string_union; mod typing_only_runtime_import; diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/rules/runtime_import_in_type_checking_block.rs b/crates/ruff_linter/src/rules/flake8_type_checking/rules/runtime_import_in_type_checking_block.rs index dea0f4007e..2c15f1ff49 100644 --- a/crates/ruff_linter/src/rules/flake8_type_checking/rules/runtime_import_in_type_checking_block.rs +++ b/crates/ruff_linter/src/rules/flake8_type_checking/rules/runtime_import_in_type_checking_block.rs @@ -5,13 +5,15 @@ use rustc_hash::FxHashMap; use ruff_diagnostics::{Diagnostic, Fix, FixAvailability, Violation}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_semantic::{AnyImport, Imported, NodeId, ResolvedReferenceId, Scope}; -use ruff_text_size::{Ranged, TextRange}; +use ruff_python_semantic::{Imported, NodeId, Scope}; +use ruff_text_size::Ranged; use crate::checkers::ast::Checker; use crate::codes::Rule; use crate::fix; use crate::importer::ImportedMembers; +use crate::rules::flake8_type_checking::helpers::{filter_contained, quote_annotation}; +use crate::rules::flake8_type_checking::imports::ImportBinding; /// ## What it does /// Checks for runtime imports defined in a type-checking block. @@ -20,6 +22,10 @@ use crate::importer::ImportedMembers; /// The type-checking block is not executed at runtime, so the import will not /// be available at runtime. /// +/// If [`flake8-type-checking.quote-annotations`] is set to `true`, +/// annotations will be wrapped in quotes if doing so would enable the +/// corresponding import to remain in the type-checking block. +/// /// ## Example /// ```python /// from typing import TYPE_CHECKING @@ -41,11 +47,15 @@ use crate::importer::ImportedMembers; /// foo.bar() /// ``` /// +/// ## Options +/// - `flake8-type-checking.quote-annotations` +/// /// ## References /// - [PEP 535](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking) #[violation] pub struct RuntimeImportInTypeCheckingBlock { qualified_name: String, + strategy: Strategy, } impl Violation for RuntimeImportInTypeCheckingBlock { @@ -53,17 +63,39 @@ impl Violation for RuntimeImportInTypeCheckingBlock { #[derive_message_formats] fn message(&self) -> String { - let RuntimeImportInTypeCheckingBlock { qualified_name } = self; - format!( - "Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting." - ) + let Self { + qualified_name, + strategy, + } = self; + match strategy { + Strategy::MoveImport => format!( + "Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting." + ), + Strategy::QuoteUsages => format!( + "Quote references to `{qualified_name}`. Import is in a type-checking block." + ), + } } fn fix_title(&self) -> Option { - Some("Move out of type-checking block".to_string()) + let Self { strategy, .. } = self; + match strategy { + Strategy::MoveImport => Some("Move out of type-checking block".to_string()), + Strategy::QuoteUsages => Some("Quote references".to_string()), + } } } +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] +enum Action { + /// The import should be moved out of the type-checking block. + Move, + /// All usages of the import should be wrapped in quotes. + Quote, + /// The import should be ignored. + Ignore, +} + /// TCH004 pub(crate) fn runtime_import_in_type_checking_block( checker: &Checker, @@ -71,8 +103,7 @@ pub(crate) fn runtime_import_in_type_checking_block( diagnostics: &mut Vec, ) { // Collect all runtime imports by statement. - let mut errors_by_statement: FxHashMap> = FxHashMap::default(); - let mut ignores_by_statement: FxHashMap> = FxHashMap::default(); + let mut actions: FxHashMap<(NodeId, Action), Vec> = FxHashMap::default(); for binding_id in scope.binding_ids() { let binding = checker.semantic().binding(binding_id); @@ -101,6 +132,7 @@ pub(crate) fn runtime_import_in_type_checking_block( let import = ImportBinding { import, reference_id, + binding, range: binding.range(), parent_range: binding.parent_range(checker.semantic()), }; @@ -113,86 +145,154 @@ pub(crate) fn runtime_import_in_type_checking_block( ) }) { - ignores_by_statement - .entry(node_id) + actions + .entry((node_id, Action::Ignore)) .or_default() .push(import); } else { - errors_by_statement.entry(node_id).or_default().push(import); + // Determine whether the member should be fixed by moving the import out of the + // type-checking block, or by quoting its references. + if checker.settings.flake8_type_checking.quote_annotations + && binding.references().all(|reference_id| { + let reference = checker.semantic().reference(reference_id); + reference.context().is_typing() + || reference.in_runtime_evaluated_annotation() + }) + { + actions + .entry((node_id, Action::Quote)) + .or_default() + .push(import); + } else { + actions + .entry((node_id, Action::Move)) + .or_default() + .push(import); + } } } } - // Generate a diagnostic for every import, but share a fix across all imports within the same - // statement (excluding those that are ignored). - for (node_id, imports) in errors_by_statement { - let fix = fix_imports(checker, node_id, &imports).ok(); + for ((node_id, action), imports) in actions { + match action { + // Generate a diagnostic for every import, but share a fix across all imports within the same + // statement (excluding those that are ignored). + Action::Move => { + let fix = move_imports(checker, node_id, &imports).ok(); - for ImportBinding { - import, - range, - parent_range, - .. - } in imports - { - let mut diagnostic = Diagnostic::new( - RuntimeImportInTypeCheckingBlock { - qualified_name: import.qualified_name(), - }, - range, - ); - if let Some(range) = parent_range { - diagnostic.set_parent(range.start()); + for ImportBinding { + import, + range, + parent_range, + .. + } in imports + { + let mut diagnostic = Diagnostic::new( + RuntimeImportInTypeCheckingBlock { + qualified_name: import.qualified_name(), + strategy: Strategy::MoveImport, + }, + range, + ); + if let Some(range) = parent_range { + diagnostic.set_parent(range.start()); + } + if let Some(fix) = fix.as_ref() { + diagnostic.set_fix(fix.clone()); + } + diagnostics.push(diagnostic); + } } - if let Some(fix) = fix.as_ref() { - diagnostic.set_fix(fix.clone()); - } - diagnostics.push(diagnostic); - } - } - // Separately, generate a diagnostic for every _ignored_ import, to ensure that the - // suppression comments aren't marked as unused. - for ImportBinding { - import, - range, - parent_range, - .. - } in ignores_by_statement.into_values().flatten() - { - let mut diagnostic = Diagnostic::new( - RuntimeImportInTypeCheckingBlock { - qualified_name: import.qualified_name(), - }, - range, - ); - if let Some(range) = parent_range { - diagnostic.set_parent(range.start()); + // Generate a diagnostic for every import, but share a fix across all imports within the same + // statement (excluding those that are ignored). + Action::Quote => { + let fix = quote_imports(checker, node_id, &imports).ok(); + + for ImportBinding { + import, + range, + parent_range, + .. + } in imports + { + let mut diagnostic = Diagnostic::new( + RuntimeImportInTypeCheckingBlock { + qualified_name: import.qualified_name(), + strategy: Strategy::QuoteUsages, + }, + range, + ); + if let Some(range) = parent_range { + diagnostic.set_parent(range.start()); + } + if let Some(fix) = fix.as_ref() { + diagnostic.set_fix(fix.clone()); + } + diagnostics.push(diagnostic); + } + } + + // Separately, generate a diagnostic for every _ignored_ import, to ensure that the + // suppression comments aren't marked as unused. + Action::Ignore => { + for ImportBinding { + import, + range, + parent_range, + .. + } in imports + { + let mut diagnostic = Diagnostic::new( + RuntimeImportInTypeCheckingBlock { + qualified_name: import.qualified_name(), + strategy: Strategy::MoveImport, + }, + range, + ); + if let Some(range) = parent_range { + diagnostic.set_parent(range.start()); + } + diagnostics.push(diagnostic); + } + } } - diagnostics.push(diagnostic); } } -/// A runtime-required import with its surrounding context. -struct ImportBinding<'a> { - /// The qualified name of the import (e.g., `typing.List` for `from typing import List`). - import: AnyImport<'a>, - /// The first reference to the imported symbol. - reference_id: ResolvedReferenceId, - /// The trimmed range of the import (e.g., `List` in `from typing import List`). - range: TextRange, - /// The range of the import's parent statement. - parent_range: Option, -} +/// Generate a [`Fix`] to quote runtime usages for imports in a type-checking block. +fn quote_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result { + let quote_reference_edits = filter_contained( + imports + .iter() + .flat_map(|ImportBinding { binding, .. }| { + binding.references.iter().filter_map(|reference_id| { + let reference = checker.semantic().reference(*reference_id); + if reference.context().is_runtime() { + Some(quote_annotation( + reference.expression_id()?, + checker.semantic(), + checker.locator(), + checker.stylist(), + checker.generator(), + )) + } else { + None + } + }) + }) + .collect::>>()?, + ); -impl Ranged for ImportBinding<'_> { - fn range(&self) -> TextRange { - self.range - } + let mut rest = quote_reference_edits.into_iter(); + let head = rest.next().expect("Expected at least one reference"); + Ok(Fix::unsafe_edits(head, rest).isolate(Checker::isolation( + checker.semantic().parent_statement_id(node_id), + ))) } /// Generate a [`Fix`] to remove runtime imports from a type-checking block. -fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result { +fn move_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> Result { let statement = checker.semantic().statement(node_id); let parent = checker.semantic().parent_statement(node_id); @@ -236,3 +336,18 @@ fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> ), ) } + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum Strategy { + /// The import should be moved out of the type-checking block. + /// + /// This is required when at least one reference to the symbol is in a runtime-required context. + /// For example, given `from foo import Bar`, `x = Bar()` would be runtime-required. + MoveImport, + /// All usages of the import should be wrapped in quotes. + /// + /// This is acceptable when all references to the symbol are in a runtime-evaluated, but not + /// runtime-required context. For example, given `from foo import Bar`, `x: Bar` would be + /// runtime-evaluated, but not runtime-required. + QuoteUsages, +} diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/rules/runtime_string_union.rs b/crates/ruff_linter/src/rules/flake8_type_checking/rules/runtime_string_union.rs new file mode 100644 index 0000000000..7d63517a32 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/rules/runtime_string_union.rs @@ -0,0 +1,95 @@ +use ruff_diagnostics::{Diagnostic, Violation}; +use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast as ast; +use ruff_python_ast::{Expr, Operator}; +use ruff_text_size::Ranged; + +use crate::checkers::ast::Checker; + +/// ## What it does +/// Checks for the presence of string literals in `X | Y`-style union types. +/// +/// ## Why is this bad? +/// [PEP 604] introduced a new syntax for union type annotations based on the +/// `|` operator. +/// +/// While Python's type annotations can typically be wrapped in strings to +/// avoid runtime evaluation, the use of a string member within an `X | Y`-style +/// union type will cause a runtime error. +/// +/// Instead, remove the quotes, wrap the _entire_ union in quotes, or use +/// `from __future__ import annotations` to disable runtime evaluation of +/// annotations entirely. +/// +/// ## Example +/// ```python +/// var: str | "int" +/// ``` +/// +/// Use instead: +/// ```python +/// var: str | int +/// ``` +/// +/// Or, extend the quotes to include the entire union: +/// ```python +/// var: "str | int" +/// ``` +/// +/// ## References +/// - [PEP 535](https://peps.python.org/pep-0563/) +/// - [PEP 604](https://peps.python.org/pep-0604/) +/// +/// [PEP 604]: https://peps.python.org/pep-0604/ +#[violation] +pub struct RuntimeStringUnion; + +impl Violation for RuntimeStringUnion { + #[derive_message_formats] + fn message(&self) -> String { + format!("Invalid string member in `X | Y`-style union type") + } +} + +/// TCH006 +pub(crate) fn runtime_string_union(checker: &mut Checker, expr: &Expr) { + if !checker.semantic().in_type_definition() { + return; + } + + if !checker.semantic().execution_context().is_runtime() { + return; + } + + // Search for strings within the binary operator. + let mut strings = Vec::new(); + traverse_op(expr, &mut strings); + + for string in strings { + checker + .diagnostics + .push(Diagnostic::new(RuntimeStringUnion, string.range())); + } +} + +/// Collect all string members in possibly-nested binary `|` expressions. +fn traverse_op<'a>(expr: &'a Expr, strings: &mut Vec<&'a Expr>) { + match expr { + Expr::StringLiteral(_) => { + strings.push(expr); + } + Expr::BytesLiteral(_) => { + strings.push(expr); + } + Expr::BinOp(ast::ExprBinOp { + left, + right, + op: Operator::BitOr, + .. + }) => { + traverse_op(left, strings); + traverse_op(right, strings); + } + _ => {} + } +} diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs b/crates/ruff_linter/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs index 1ce6336d3b..638799515c 100644 --- a/crates/ruff_linter/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs +++ b/crates/ruff_linter/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs @@ -5,13 +5,17 @@ use rustc_hash::FxHashMap; use ruff_diagnostics::{Diagnostic, DiagnosticKind, Fix, FixAvailability, Violation}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_semantic::{AnyImport, Binding, Imported, NodeId, ResolvedReferenceId, Scope}; -use ruff_text_size::{Ranged, TextRange}; +use ruff_python_semantic::{Binding, Imported, NodeId, Scope}; +use ruff_text_size::Ranged; use crate::checkers::ast::Checker; use crate::codes::Rule; use crate::fix; use crate::importer::ImportedMembers; +use crate::rules::flake8_type_checking::helpers::{ + filter_contained, is_typing_reference, quote_annotation, +}; +use crate::rules::flake8_type_checking::imports::ImportBinding; use crate::rules::isort::{categorize, ImportSection, ImportType}; /// ## What it does @@ -24,6 +28,10 @@ use crate::rules::isort::{categorize, ImportSection, ImportType}; /// instead be imported conditionally under an `if TYPE_CHECKING:` block to /// minimize runtime overhead. /// +/// If [`flake8-type-checking.quote-annotations`] is set to `true`, +/// annotations will be wrapped in quotes if doing so would enable the +/// corresponding import to be moved into an `if TYPE_CHECKING:` block. +/// /// If a class _requires_ that type annotations be available at runtime (as is /// the case for Pydantic, SQLAlchemy, and other libraries), consider using /// the [`flake8-type-checking.runtime-evaluated-base-classes`] and @@ -56,6 +64,7 @@ use crate::rules::isort::{categorize, ImportSection, ImportType}; /// ``` /// /// ## Options +/// - `flake8-type-checking.quote-annotations` /// - `flake8-type-checking.runtime-evaluated-base-classes` /// - `flake8-type-checking.runtime-evaluated-decorators` /// @@ -92,6 +101,10 @@ impl Violation for TypingOnlyFirstPartyImport { /// instead be imported conditionally under an `if TYPE_CHECKING:` block to /// minimize runtime overhead. /// +/// If [`flake8-type-checking.quote-annotations`] is set to `true`, +/// annotations will be wrapped in quotes if doing so would enable the +/// corresponding import to be moved into an `if TYPE_CHECKING:` block. +/// /// If a class _requires_ that type annotations be available at runtime (as is /// the case for Pydantic, SQLAlchemy, and other libraries), consider using /// the [`flake8-type-checking.runtime-evaluated-base-classes`] and @@ -124,6 +137,7 @@ impl Violation for TypingOnlyFirstPartyImport { /// ``` /// /// ## Options +/// - `flake8-type-checking.quote-annotations` /// - `flake8-type-checking.runtime-evaluated-base-classes` /// - `flake8-type-checking.runtime-evaluated-decorators` /// @@ -160,6 +174,10 @@ impl Violation for TypingOnlyThirdPartyImport { /// instead be imported conditionally under an `if TYPE_CHECKING:` block to /// minimize runtime overhead. /// +/// If [`flake8-type-checking.quote-annotations`] is set to `true`, +/// annotations will be wrapped in quotes if doing so would enable the +/// corresponding import to be moved into an `if TYPE_CHECKING:` block. +/// /// If a class _requires_ that type annotations be available at runtime (as is /// the case for Pydantic, SQLAlchemy, and other libraries), consider using /// the [`flake8-type-checking.runtime-evaluated-base-classes`] and @@ -192,6 +210,7 @@ impl Violation for TypingOnlyThirdPartyImport { /// ``` /// /// ## Options +/// - `flake8-type-checking.quote-annotations` /// - `flake8-type-checking.runtime-evaluated-base-classes` /// - `flake8-type-checking.runtime-evaluated-decorators` /// @@ -253,13 +272,12 @@ pub(crate) fn typing_only_runtime_import( }; if binding.context.is_runtime() - && binding.references().all(|reference_id| { - checker - .semantic() - .reference(reference_id) - .context() - .is_typing() - }) + && binding + .references() + .map(|reference_id| checker.semantic().reference(reference_id)) + .all(|reference| { + is_typing_reference(reference, &checker.settings.flake8_type_checking) + }) { let qualified_name = import.qualified_name(); @@ -310,6 +328,7 @@ pub(crate) fn typing_only_runtime_import( let import = ImportBinding { import, reference_id, + binding, range: binding.range(), parent_range: binding.parent_range(checker.semantic()), }; @@ -376,24 +395,6 @@ pub(crate) fn typing_only_runtime_import( } } -/// A runtime-required import with its surrounding context. -struct ImportBinding<'a> { - /// The qualified name of the import (e.g., `typing.List` for `from typing import List`). - import: AnyImport<'a>, - /// The first reference to the imported symbol. - reference_id: ResolvedReferenceId, - /// The trimmed range of the import (e.g., `List` in `from typing import List`). - range: TextRange, - /// The range of the import's parent statement. - parent_range: Option, -} - -impl Ranged for ImportBinding<'_> { - fn range(&self) -> TextRange { - self.range - } -} - /// Return the [`Rule`] for the given import type. fn rule_for(import_type: ImportType) -> Rule { match import_type { @@ -472,19 +473,50 @@ fn fix_imports(checker: &Checker, node_id: NodeId, imports: &[ImportBinding]) -> )?; // Step 2) Add the import to a `TYPE_CHECKING` block. - let add_import_edit = checker.importer().typing_import_edit( - &ImportedMembers { - statement, - names: member_names.iter().map(AsRef::as_ref).collect(), - }, - at, - checker.semantic(), - checker.source_type, - )?; + let (type_checking_edit, add_import_edit) = checker + .importer() + .typing_import_edit( + &ImportedMembers { + statement, + names: member_names.iter().map(AsRef::as_ref).collect(), + }, + at, + checker.semantic(), + checker.source_type, + )? + .into_edits(); - Ok( - Fix::unsafe_edits(remove_import_edit, add_import_edit.into_edits()).isolate( - Checker::isolation(checker.semantic().parent_statement_id(node_id)), - ), + // Step 3) Quote any runtime usages of the referenced symbol. + let quote_reference_edits = filter_contained( + imports + .iter() + .flat_map(|ImportBinding { binding, .. }| { + binding.references.iter().filter_map(|reference_id| { + let reference = checker.semantic().reference(*reference_id); + if reference.context().is_runtime() { + Some(quote_annotation( + reference.expression_id()?, + checker.semantic(), + checker.locator(), + checker.stylist(), + checker.generator(), + )) + } else { + None + } + }) + }) + .collect::>>()?, + ); + + Ok(Fix::unsafe_edits( + type_checking_edit, + add_import_edit + .into_iter() + .chain(std::iter::once(remove_import_edit)) + .chain(quote_reference_edits), ) + .isolate(Checker::isolation( + checker.semantic().parent_statement_id(node_id), + ))) } diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/settings.rs b/crates/ruff_linter/src/rules/flake8_type_checking/settings.rs index 425f02fe55..16baf1b91e 100644 --- a/crates/ruff_linter/src/rules/flake8_type_checking/settings.rs +++ b/crates/ruff_linter/src/rules/flake8_type_checking/settings.rs @@ -6,17 +6,19 @@ use ruff_macros::CacheKey; pub struct Settings { pub strict: bool, pub exempt_modules: Vec, - pub runtime_evaluated_base_classes: Vec, - pub runtime_evaluated_decorators: Vec, + pub runtime_required_base_classes: Vec, + pub runtime_required_decorators: Vec, + pub quote_annotations: bool, } impl Default for Settings { fn default() -> Self { Self { strict: false, - exempt_modules: vec!["typing".to_string()], - runtime_evaluated_base_classes: vec![], - runtime_evaluated_decorators: vec![], + exempt_modules: vec!["typing".to_string(), "typing_extensions".to_string()], + runtime_required_base_classes: vec![], + runtime_required_decorators: vec![], + quote_annotations: false, } } } diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__quote_runtime-import-in-type-checking-block_quote.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__quote_runtime-import-in-type-checking-block_quote.py.snap new file mode 100644 index 0000000000..ae71c56c81 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__quote_runtime-import-in-type-checking-block_quote.py.snap @@ -0,0 +1,24 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +quote.py:64:28: TCH004 [*] Quote references to `pandas.DataFrame`. Import is in a type-checking block. + | +63 | if TYPE_CHECKING: +64 | from pandas import DataFrame + | ^^^^^^^^^ TCH004 +65 | +66 | def func(value: DataFrame): + | + = help: Quote references + +ℹ Unsafe fix +63 63 | if TYPE_CHECKING: +64 64 | from pandas import DataFrame +65 65 | +66 |- def func(value: DataFrame): + 66 |+ def func(value: "DataFrame"): +67 67 | ... +68 68 | +69 69 | + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__quote_typing-only-third-party-import_quote.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__quote_typing-only-third-party-import_quote.py.snap new file mode 100644 index 0000000000..9a0e6d8adb --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__quote_typing-only-third-party-import_quote.py.snap @@ -0,0 +1,341 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +quote.py:2:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block + | +1 | def f(): +2 | from pandas import DataFrame + | ^^^^^^^^^ TCH002 +3 | +4 | def baz() -> DataFrame: + | + = help: Move into type-checking block + +ℹ Unsafe fix +1 |-def f(): + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: +2 4 | from pandas import DataFrame + 5 |+def f(): +3 6 | +4 |- def baz() -> DataFrame: + 7 |+ def baz() -> "DataFrame": +5 8 | ... +6 9 | +7 10 | + +quote.py:9:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block + | + 8 | def f(): + 9 | from pandas import DataFrame + | ^^^^^^^^^ TCH002 +10 | +11 | def baz() -> DataFrame[int]: + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ from pandas import DataFrame +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +6 10 | +7 11 | +8 12 | def f(): +9 |- from pandas import DataFrame +10 13 | +11 |- def baz() -> DataFrame[int]: + 14 |+ def baz() -> "DataFrame[int]": +12 15 | ... +13 16 | +14 17 | + +quote.py:16:24: TCH002 Move third-party import `pandas.DataFrame` into a type-checking block + | +15 | def f(): +16 | from pandas import DataFrame + | ^^^^^^^^^ TCH002 +17 | +18 | def baz() -> DataFrame["int"]: + | + = help: Move into type-checking block + +quote.py:23:22: TCH002 [*] Move third-party import `pandas` into a type-checking block + | +22 | def f(): +23 | import pandas as pd + | ^^ TCH002 +24 | +25 | def baz() -> pd.DataFrame: + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ import pandas as pd +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +20 24 | +21 25 | +22 26 | def f(): +23 |- import pandas as pd +24 27 | +25 |- def baz() -> pd.DataFrame: + 28 |+ def baz() -> "pd.DataFrame": +26 29 | ... +27 30 | +28 31 | + +quote.py:30:22: TCH002 [*] Move third-party import `pandas` into a type-checking block + | +29 | def f(): +30 | import pandas as pd + | ^^ TCH002 +31 | +32 | def baz() -> pd.DataFrame.Extra: + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ import pandas as pd +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +27 31 | +28 32 | +29 33 | def f(): +30 |- import pandas as pd +31 34 | +32 |- def baz() -> pd.DataFrame.Extra: + 35 |+ def baz() -> "pd.DataFrame.Extra": +33 36 | ... +34 37 | +35 38 | + +quote.py:37:22: TCH002 [*] Move third-party import `pandas` into a type-checking block + | +36 | def f(): +37 | import pandas as pd + | ^^ TCH002 +38 | +39 | def baz() -> pd.DataFrame | int: + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ import pandas as pd +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +34 38 | +35 39 | +36 40 | def f(): +37 |- import pandas as pd +38 41 | +39 |- def baz() -> pd.DataFrame | int: + 42 |+ def baz() -> "pd.DataFrame | int": +40 43 | ... +41 44 | +42 45 | + +quote.py:45:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block + | +44 | def f(): +45 | from pandas import DataFrame + | ^^^^^^^^^ TCH002 +46 | +47 | def baz() -> DataFrame(): + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ from pandas import DataFrame +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +42 46 | +43 47 | +44 48 | def f(): +45 |- from pandas import DataFrame +46 49 | +47 |- def baz() -> DataFrame(): + 50 |+ def baz() -> "DataFrame()": +48 51 | ... +49 52 | +50 53 | + +quote.py:54:24: TCH002 Move third-party import `pandas.DataFrame` into a type-checking block + | +52 | from typing import Literal +53 | +54 | from pandas import DataFrame + | ^^^^^^^^^ TCH002 +55 | +56 | def baz() -> DataFrame[Literal["int"]]: + | + = help: Move into type-checking block + +quote.py:71:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block + | +70 | def f(): +71 | from pandas import DataFrame, Series + | ^^^^^^^^^ TCH002 +72 | +73 | def baz() -> DataFrame | Series: + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ from pandas import DataFrame, Series +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +68 72 | +69 73 | +70 74 | def f(): +71 |- from pandas import DataFrame, Series +72 75 | +73 |- def baz() -> DataFrame | Series: + 76 |+ def baz() -> "DataFrame | Series": +74 77 | ... +75 78 | +76 79 | + +quote.py:71:35: TCH002 [*] Move third-party import `pandas.Series` into a type-checking block + | +70 | def f(): +71 | from pandas import DataFrame, Series + | ^^^^^^ TCH002 +72 | +73 | def baz() -> DataFrame | Series: + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ from pandas import DataFrame, Series +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +68 72 | +69 73 | +70 74 | def f(): +71 |- from pandas import DataFrame, Series +72 75 | +73 |- def baz() -> DataFrame | Series: + 76 |+ def baz() -> "DataFrame | Series": +74 77 | ... +75 78 | +76 79 | + +quote.py:78:24: TCH002 [*] Move third-party import `pandas.DataFrame` into a type-checking block + | +77 | def f(): +78 | from pandas import DataFrame, Series + | ^^^^^^^^^ TCH002 +79 | +80 | def baz() -> ( + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ from pandas import DataFrame, Series +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +75 79 | +76 80 | +77 81 | def f(): +78 |- from pandas import DataFrame, Series +79 82 | +80 83 | def baz() -> ( +81 |- DataFrame | +82 |- Series + 84 |+ "DataFrame | Series" +83 85 | ): +84 86 | ... +85 87 | +86 88 | class C: +87 |- x: DataFrame[ +88 |- int +89 |- ] = 1 + 89 |+ x: "DataFrame[int]" = 1 +90 90 | +91 |- def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]: + 91 |+ def func() -> "DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]": +92 92 | ... + +quote.py:78:35: TCH002 [*] Move third-party import `pandas.Series` into a type-checking block + | +77 | def f(): +78 | from pandas import DataFrame, Series + | ^^^^^^ TCH002 +79 | +80 | def baz() -> ( + | + = help: Move into type-checking block + +ℹ Unsafe fix + 1 |+from typing import TYPE_CHECKING + 2 |+ + 3 |+if TYPE_CHECKING: + 4 |+ from pandas import DataFrame, Series +1 5 | def f(): +2 6 | from pandas import DataFrame +3 7 | +-------------------------------------------------------------------------------- +75 79 | +76 80 | +77 81 | def f(): +78 |- from pandas import DataFrame, Series +79 82 | +80 83 | def baz() -> ( +81 |- DataFrame | +82 |- Series + 84 |+ "DataFrame | Series" +83 85 | ): +84 86 | ... +85 87 | +86 88 | class C: +87 |- x: DataFrame[ +88 |- int +89 |- ] = 1 + 89 |+ x: "DataFrame[int]" = 1 +90 90 | +91 |- def func() -> DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]: + 91 |+ def func() -> "DataFrame[[DataFrame[_P, _R]], DataFrame[_P, _R]]": +92 92 | ... + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-import-in-type-checking-block_quote.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-import-in-type-checking-block_quote.py.snap new file mode 100644 index 0000000000..c09777853b --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-import-in-type-checking-block_quote.py.snap @@ -0,0 +1,29 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +quote.py:64:28: TCH004 [*] Move import `pandas.DataFrame` out of type-checking block. Import is used for more than type hinting. + | +63 | if TYPE_CHECKING: +64 | from pandas import DataFrame + | ^^^^^^^^^ TCH004 +65 | +66 | def func(value: DataFrame): + | + = help: Move out of type-checking block + +ℹ Unsafe fix + 1 |+from pandas import DataFrame +1 2 | def f(): +2 3 | from pandas import DataFrame +3 4 | +-------------------------------------------------------------------------------- +61 62 | from typing import TYPE_CHECKING +62 63 | +63 64 | if TYPE_CHECKING: +64 |- from pandas import DataFrame + 65 |+ pass +65 66 | +66 67 | def func(value: DataFrame): +67 68 | ... + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-string-union_TCH006_1.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-string-union_TCH006_1.py.snap new file mode 100644 index 0000000000..4ab8798d51 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-string-union_TCH006_1.py.snap @@ -0,0 +1,12 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +TCH006_1.py:18:30: TCH006 Invalid string member in `X | Y`-style union type + | +16 | type A = Value["int" | str] # OK +17 | +18 | OldS = TypeVar('OldS', int | 'str', str) # TCH006 + | ^^^^^ TCH006 + | + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-string-union_TCH006_2.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-string-union_TCH006_2.py.snap new file mode 100644 index 0000000000..8a2023ed44 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__runtime-string-union_TCH006_2.py.snap @@ -0,0 +1,41 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +TCH006_2.py:4:4: TCH006 Invalid string member in `X | Y`-style union type + | +4 | x: "int" | str # TCH006 + | ^^^^^ TCH006 +5 | x: ("int" | str) | "bool" # TCH006 + | + +TCH006_2.py:5:5: TCH006 Invalid string member in `X | Y`-style union type + | +4 | x: "int" | str # TCH006 +5 | x: ("int" | str) | "bool" # TCH006 + | ^^^^^ TCH006 + | + +TCH006_2.py:5:20: TCH006 Invalid string member in `X | Y`-style union type + | +4 | x: "int" | str # TCH006 +5 | x: ("int" | str) | "bool" # TCH006 + | ^^^^^^ TCH006 + | + +TCH006_2.py:12:20: TCH006 Invalid string member in `X | Y`-style union type + | +12 | z: list[str, str | "int"] = [] # TCH006 + | ^^^^^ TCH006 +13 | +14 | type A = Value["int" | str] # OK + | + +TCH006_2.py:16:30: TCH006 Invalid string member in `X | Y`-style union type + | +14 | type A = Value["int" | str] # OK +15 | +16 | OldS = TypeVar('OldS', int | 'str', str) # TCH006 + | ^^^^^ TCH006 + | + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_1.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_1.py.snap new file mode 100644 index 0000000000..ca63f50e1f --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_1.py.snap @@ -0,0 +1,27 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +exempt_type_checking_1.py:5:20: TCH003 [*] Move standard library import `typing.Final` into a type-checking block + | +3 | from __future__ import annotations +4 | +5 | from typing import Final + | ^^^^^ TCH003 +6 | +7 | Const: Final[dict] = {} + | + = help: Move into type-checking block + +ℹ Unsafe fix +2 2 | +3 3 | from __future__ import annotations +4 4 | +5 |-from typing import Final + 5 |+from typing import TYPE_CHECKING + 6 |+ + 7 |+if TYPE_CHECKING: + 8 |+ from typing import Final +6 9 | +7 10 | Const: Final[dict] = {} + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_2.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_2.py.snap new file mode 100644 index 0000000000..82d27250c6 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_2.py.snap @@ -0,0 +1,27 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +exempt_type_checking_2.py:5:20: TCH003 [*] Move standard library import `typing.Final` into a type-checking block + | +3 | from __future__ import annotations +4 | +5 | from typing import Final, TYPE_CHECKING + | ^^^^^ TCH003 +6 | +7 | Const: Final[dict] = {} + | + = help: Move into type-checking block + +ℹ Unsafe fix +2 2 | +3 3 | from __future__ import annotations +4 4 | +5 |-from typing import Final, TYPE_CHECKING + 5 |+from typing import TYPE_CHECKING + 6 |+ + 7 |+if TYPE_CHECKING: + 8 |+ from typing import Final +6 9 | +7 10 | Const: Final[dict] = {} + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_3.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_3.py.snap new file mode 100644 index 0000000000..18b9b569ec --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-standard-library-import_exempt_type_checking_3.py.snap @@ -0,0 +1,28 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- +exempt_type_checking_3.py:5:20: TCH003 [*] Move standard library import `typing.Final` into a type-checking block + | +3 | from __future__ import annotations +4 | +5 | from typing import Final, Mapping + | ^^^^^ TCH003 +6 | +7 | Const: Final[dict] = {} + | + = help: Move into type-checking block + +ℹ Unsafe fix +2 2 | +3 3 | from __future__ import annotations +4 4 | +5 |-from typing import Final, Mapping + 5 |+from typing import Mapping + 6 |+from typing import TYPE_CHECKING + 7 |+ + 8 |+if TYPE_CHECKING: + 9 |+ from typing import Final +6 10 | +7 11 | Const: Final[dict] = {} + + diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-third-party-import_quote.py.snap b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-third-party-import_quote.py.snap new file mode 100644 index 0000000000..6c5ead2742 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_type_checking/snapshots/ruff_linter__rules__flake8_type_checking__tests__typing-only-third-party-import_quote.py.snap @@ -0,0 +1,4 @@ +--- +source: crates/ruff_linter/src/rules/flake8_type_checking/mod.rs +--- + diff --git a/crates/ruff_linter/src/rules/flynt/helpers.rs b/crates/ruff_linter/src/rules/flynt/helpers.rs index 83c6b13146..7a6af204d1 100644 --- a/crates/ruff_linter/src/rules/flynt/helpers.rs +++ b/crates/ruff_linter/src/rules/flynt/helpers.rs @@ -1,25 +1,23 @@ use ruff_python_ast::{self as ast, Arguments, ConversionFlag, Expr}; use ruff_text_size::TextRange; -/// Wrap an expression in a `FormattedValue` with no special formatting. -fn to_formatted_value_expr(inner: &Expr) -> Expr { - let node = ast::ExprFormattedValue { - value: Box::new(inner.clone()), +/// Wrap an expression in a [`ast::FStringElement::Expression`] with no special formatting. +fn to_f_string_expression_element(inner: &Expr) -> ast::FStringElement { + ast::FStringElement::Expression(ast::FStringExpressionElement { + expression: Box::new(inner.clone()), debug_text: None, conversion: ConversionFlag::None, format_spec: None, range: TextRange::default(), - }; - node.into() + }) } -/// Convert a string to a constant string expression. -pub(super) fn to_constant_string(s: &str) -> Expr { - let node = ast::StringLiteral { - value: s.to_string(), - ..ast::StringLiteral::default() - }; - node.into() +/// Convert a string to a [`ast::FStringElement::Literal`]. +pub(super) fn to_f_string_literal_element(s: &str) -> ast::FStringElement { + ast::FStringElement::Literal(ast::FStringLiteralElement { + value: s.to_owned(), + range: TextRange::default(), + }) } /// Figure out if `expr` represents a "simple" call @@ -51,15 +49,19 @@ fn is_simple_callee(func: &Expr) -> bool { } /// Convert an expression to a f-string element (if it looks like a good idea). -pub(super) fn to_f_string_element(expr: &Expr) -> Option { +pub(super) fn to_f_string_element(expr: &Expr) -> Option { match expr { - // These are directly handled by `unparse_f_string_element`: - Expr::StringLiteral(_) | Expr::FString(_) | Expr::FormattedValue(_) => Some(expr.clone()), + Expr::StringLiteral(ast::ExprStringLiteral { value, range }) => { + Some(ast::FStringElement::Literal(ast::FStringLiteralElement { + value: value.to_string(), + range: *range, + })) + } // These should be pretty safe to wrap in a formatted value. Expr::NumberLiteral(_) | Expr::BooleanLiteral(_) | Expr::Name(_) | Expr::Attribute(_) => { - Some(to_formatted_value_expr(expr)) + Some(to_f_string_expression_element(expr)) } - Expr::Call(_) if is_simple_call(expr) => Some(to_formatted_value_expr(expr)), + Expr::Call(_) if is_simple_call(expr) => Some(to_f_string_expression_element(expr)), _ => None, } } diff --git a/crates/ruff_linter/src/rules/flynt/rules/static_join_to_fstring.rs b/crates/ruff_linter/src/rules/flynt/rules/static_join_to_fstring.rs index 4f37fafe35..86c77bbb0e 100644 --- a/crates/ruff_linter/src/rules/flynt/rules/static_join_to_fstring.rs +++ b/crates/ruff_linter/src/rules/flynt/rules/static_join_to_fstring.rs @@ -78,7 +78,7 @@ fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option { return Some(node.into()); } - let mut fstring_elems = Vec::with_capacity(joinees.len() * 2); + let mut f_string_elements = Vec::with_capacity(joinees.len() * 2); let mut first = true; for expr in joinees { @@ -88,13 +88,13 @@ fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option { return None; } if !std::mem::take(&mut first) { - fstring_elems.push(helpers::to_constant_string(joiner)); + f_string_elements.push(helpers::to_f_string_literal_element(joiner)); } - fstring_elems.push(helpers::to_f_string_element(expr)?); + f_string_elements.push(helpers::to_f_string_element(expr)?); } let node = ast::FString { - values: fstring_elems, + elements: f_string_elements, range: TextRange::default(), }; Some(node.into()) @@ -127,7 +127,7 @@ pub(crate) fn static_join_to_fstring(checker: &mut Checker, expr: &Expr, joiner: }; // Try to build the fstring (internally checks whether e.g. the elements are - // convertible to f-string parts). + // convertible to f-string elements). let Some(new_expr) = build_fstring(joiner, joinees) else { return; }; diff --git a/crates/ruff_linter/src/rules/isort/mod.rs b/crates/ruff_linter/src/rules/isort/mod.rs index 60b511847c..d77a818aa8 100644 --- a/crates/ruff_linter/src/rules/isort/mod.rs +++ b/crates/ruff_linter/src/rules/isort/mod.rs @@ -180,7 +180,7 @@ fn format_import_block( continue; }; - let imports = order_imports(import_block, settings); + let imports = order_imports(import_block, import_section, settings); // Add a blank line between every section. if is_first_block { @@ -200,6 +200,7 @@ fn format_import_block( // Add a blank lines between direct and from imports. if settings.from_first && lines_between_types > 0 + && !settings.force_sort_within_sections && line_insertion == Some(LineInsertion::Necessary) { for _ in 0..lines_between_types { @@ -225,6 +226,7 @@ fn format_import_block( // Add a blank lines between direct and from imports. if !settings.from_first && lines_between_types > 0 + && !settings.force_sort_within_sections && line_insertion == Some(LineInsertion::Necessary) { for _ in 0..lines_between_types { @@ -291,6 +293,7 @@ mod tests { #[test_case(Path::new("force_sort_within_sections.py"))] #[test_case(Path::new("force_to_top.py"))] #[test_case(Path::new("force_wrap_aliases.py"))] + #[test_case(Path::new("future_from.py"))] #[test_case(Path::new("if_elif_else.py"))] #[test_case(Path::new("import_from_after_import.py"))] #[test_case(Path::new("inline_comments.py"))] @@ -701,6 +704,7 @@ mod tests { #[test_case(Path::new("force_sort_within_sections.py"))] #[test_case(Path::new("force_sort_within_sections_with_as_names.py"))] + #[test_case(Path::new("force_sort_within_sections_future.py"))] fn force_sort_within_sections(path: &Path) -> Result<()> { let snapshot = format!("force_sort_within_sections_{}", path.to_string_lossy()); let mut diagnostics = test_path( @@ -720,6 +724,26 @@ mod tests { Ok(()) } + #[test_case(Path::new("force_sort_within_sections_lines_between.py"))] + fn force_sort_within_sections_lines_between(path: &Path) -> Result<()> { + let snapshot = format!("force_sort_within_sections_{}", path.to_string_lossy()); + let mut diagnostics = test_path( + Path::new("isort").join(path).as_path(), + &LinterSettings { + isort: super::settings::Settings { + force_sort_within_sections: true, + lines_between_types: 2, + ..super::settings::Settings::default() + }, + src: vec![test_resource_path("fixtures/isort")], + ..LinterSettings::for_rule(Rule::UnsortedImports) + }, + )?; + diagnostics.sort_by_key(Ranged::start); + assert_messages!(snapshot, diagnostics); + Ok(()) + } + #[test_case(Path::new("comment.py"))] #[test_case(Path::new("comments_and_newlines.py"))] #[test_case(Path::new("docstring.py"))] diff --git a/crates/ruff_linter/src/rules/isort/order.rs b/crates/ruff_linter/src/rules/isort/order.rs index 25430b84ad..7f1f6f97fc 100644 --- a/crates/ruff_linter/src/rules/isort/order.rs +++ b/crates/ruff_linter/src/rules/isort/order.rs @@ -1,4 +1,5 @@ use crate::rules::isort::sorting::ImportStyle; +use crate::rules::isort::{ImportSection, ImportType}; use itertools::Itertools; use super::settings::Settings; @@ -8,6 +9,7 @@ use super::types::{AliasData, CommentSet, ImportBlock, ImportFromStatement}; pub(crate) fn order_imports<'a>( block: ImportBlock<'a>, + section: &ImportSection, settings: &Settings, ) -> Vec> { let straight_imports = block.import.into_iter(); @@ -52,7 +54,35 @@ pub(crate) fn order_imports<'a>( }, ); - let ordered_imports = if settings.force_sort_within_sections { + let ordered_imports = if matches!(section, ImportSection::Known(ImportType::Future)) { + from_imports + .sorted_by_cached_key(|(import_from, _, _, aliases)| { + ModuleKey::from_module( + import_from.module, + None, + import_from.level, + aliases.first().map(|(alias, _)| (alias.name, alias.asname)), + ImportStyle::From, + settings, + ) + }) + .map(ImportFrom) + .chain( + straight_imports + .sorted_by_cached_key(|(alias, _)| { + ModuleKey::from_module( + Some(alias.name), + alias.asname, + None, + None, + ImportStyle::Straight, + settings, + ) + }) + .map(Import), + ) + .collect() + } else if settings.force_sort_within_sections { straight_imports .map(Import) .chain(from_imports.map(ImportFrom)) diff --git a/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__force_sort_within_sections_force_sort_within_sections_future.py.snap b/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__force_sort_within_sections_force_sort_within_sections_future.py.snap new file mode 100644 index 0000000000..6864d488b7 --- /dev/null +++ b/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__force_sort_within_sections_force_sort_within_sections_future.py.snap @@ -0,0 +1,16 @@ +--- +source: crates/ruff_linter/src/rules/isort/mod.rs +--- +force_sort_within_sections_future.py:1:1: I001 [*] Import block is un-sorted or un-formatted + | +1 | / import __future__ +2 | | from __future__ import annotations + | + = help: Organize imports + +ℹ Safe fix + 1 |+from __future__ import annotations +1 2 | import __future__ +2 |-from __future__ import annotations + + diff --git a/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__force_sort_within_sections_force_sort_within_sections_lines_between.py.snap b/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__force_sort_within_sections_force_sort_within_sections_lines_between.py.snap new file mode 100644 index 0000000000..ed369f0fd6 --- /dev/null +++ b/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__force_sort_within_sections_force_sort_within_sections_lines_between.py.snap @@ -0,0 +1,4 @@ +--- +source: crates/ruff_linter/src/rules/isort/mod.rs +--- + diff --git a/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__future_from.py.snap b/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__future_from.py.snap new file mode 100644 index 0000000000..f3f5cd2a35 --- /dev/null +++ b/crates/ruff_linter/src/rules/isort/snapshots/ruff_linter__rules__isort__tests__future_from.py.snap @@ -0,0 +1,16 @@ +--- +source: crates/ruff_linter/src/rules/isort/mod.rs +--- +future_from.py:1:1: I001 [*] Import block is un-sorted or un-formatted + | +1 | / import __future__ +2 | | from __future__ import annotations + | + = help: Organize imports + +ℹ Safe fix + 1 |+from __future__ import annotations +1 2 | import __future__ +2 |-from __future__ import annotations + + diff --git a/crates/ruff_linter/src/rules/pep8_naming/helpers.rs b/crates/ruff_linter/src/rules/pep8_naming/helpers.rs index fc8f6568af..b48388935c 100644 --- a/crates/ruff_linter/src/rules/pep8_naming/helpers.rs +++ b/crates/ruff_linter/src/rules/pep8_naming/helpers.rs @@ -101,14 +101,22 @@ pub(super) fn is_django_model_import(name: &str, stmt: &Stmt, semantic: &Semanti return false; }; + if arguments.is_empty() { + return false; + } + // Match against, e.g., `apps.get_model("zerver", "Attachment")`. if let Some(call_path) = collect_call_path(func.as_ref()) { if matches!(call_path.as_slice(), [.., "get_model"]) { if let Some(argument) = - arguments.find_argument("model_name", arguments.args.len() - 1) + arguments.find_argument("model_name", arguments.args.len().saturating_sub(1)) { if let Some(string_literal) = argument.as_string_literal_expr() { - return string_literal.value.to_str() == name; + if string_literal.value.to_str() == name { + return true; + } + } else { + return true; } } } @@ -123,7 +131,9 @@ pub(super) fn is_django_model_import(name: &str, stmt: &Stmt, semantic: &Semanti if let Some(argument) = arguments.find_argument("dotted_path", 0) { if let Some(string_literal) = argument.as_string_literal_expr() { if let Some((.., model)) = string_literal.value.to_str().rsplit_once('.') { - return model == name; + if model == name { + return true; + } } } } diff --git a/crates/ruff_linter/src/rules/pep8_naming/snapshots/ruff_linter__rules__pep8_naming__tests__N806_N806.py.snap b/crates/ruff_linter/src/rules/pep8_naming/snapshots/ruff_linter__rules__pep8_naming__tests__N806_N806.py.snap index e572eecccb..1baa39c610 100644 --- a/crates/ruff_linter/src/rules/pep8_naming/snapshots/ruff_linter__rules__pep8_naming__tests__N806_N806.py.snap +++ b/crates/ruff_linter/src/rules/pep8_naming/snapshots/ruff_linter__rules__pep8_naming__tests__N806_N806.py.snap @@ -38,4 +38,29 @@ N806.py:53:5: N806 Variable `Bad` in function should be lowercase 54 | ValidationError = import_string("django.core.exceptions.ValidationError") # OK | +N806.py:56:5: N806 Variable `Bad` in function should be lowercase + | +54 | ValidationError = import_string("django.core.exceptions.ValidationError") # OK +55 | +56 | Bad = apps.get_model() # N806 + | ^^^ N806 +57 | Bad = apps.get_model(model_name="Stream") # N806 + | + +N806.py:57:5: N806 Variable `Bad` in function should be lowercase + | +56 | Bad = apps.get_model() # N806 +57 | Bad = apps.get_model(model_name="Stream") # N806 + | ^^^ N806 +58 | +59 | Address: Type = apps.get_model("zerver", variable) # OK + | + +N806.py:60:5: N806 Variable `ValidationError` in function should be lowercase + | +59 | Address: Type = apps.get_model("zerver", variable) # OK +60 | ValidationError = import_string(variable) # N806 + | ^^^^^^^^^^^^^^^ N806 + | + diff --git a/crates/ruff_linter/src/rules/perflint/rules/unnecessary_list_cast.rs b/crates/ruff_linter/src/rules/perflint/rules/unnecessary_list_cast.rs index 93321cf6b0..4c73fd4800 100644 --- a/crates/ruff_linter/src/rules/perflint/rules/unnecessary_list_cast.rs +++ b/crates/ruff_linter/src/rules/perflint/rules/unnecessary_list_cast.rs @@ -1,7 +1,7 @@ use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::Stmt; -use ruff_python_ast::{self as ast, Arguments, Expr}; +use ruff_python_ast::{self as ast, Arguments, Expr, Stmt}; +use ruff_python_semantic::analyze::typing::find_assigned_value; use ruff_text_size::TextRange; use crate::checkers::ast::Checker; @@ -110,30 +110,13 @@ pub(crate) fn unnecessary_list_cast(checker: &mut Checker, iter: &Expr, body: &[ if body.iter().any(|stmt| match_append(stmt, id)) { return; } - let scope = checker.semantic().current_scope(); - if let Some(binding_id) = scope.get(id) { - let binding = checker.semantic().binding(binding_id); - if binding.kind.is_assignment() || binding.kind.is_named_expr_assignment() { - if let Some(parent_id) = binding.source { - let parent = checker.semantic().statement(parent_id); - if let Stmt::Assign(ast::StmtAssign { value, .. }) - | Stmt::AnnAssign(ast::StmtAnnAssign { - value: Some(value), .. - }) - | Stmt::AugAssign(ast::StmtAugAssign { value, .. }) = parent - { - if matches!( - value.as_ref(), - Expr::Tuple(_) | Expr::List(_) | Expr::Set(_) - ) { - let mut diagnostic = - Diagnostic::new(UnnecessaryListCast, *list_range); - diagnostic.set_fix(remove_cast(*list_range, *iterable_range)); - checker.diagnostics.push(diagnostic); - } - } - } - } + let Some(value) = find_assigned_value(id, checker.semantic()) else { + return; + }; + if matches!(value, Expr::Tuple(_) | Expr::List(_) | Expr::Set(_)) { + let mut diagnostic = Diagnostic::new(UnnecessaryListCast, *list_range); + diagnostic.set_fix(remove_cast(*list_range, *iterable_range)); + checker.diagnostics.push(diagnostic); } } _ => {} diff --git a/crates/ruff_linter/src/rules/perflint/snapshots/ruff_linter__rules__perflint__tests__PERF101_PERF101.py.snap b/crates/ruff_linter/src/rules/perflint/snapshots/ruff_linter__rules__perflint__tests__PERF101_PERF101.py.snap index 1b4b456af3..11dafc4dd2 100644 --- a/crates/ruff_linter/src/rules/perflint/snapshots/ruff_linter__rules__perflint__tests__PERF101_PERF101.py.snap +++ b/crates/ruff_linter/src/rules/perflint/snapshots/ruff_linter__rules__perflint__tests__PERF101_PERF101.py.snap @@ -201,4 +201,22 @@ PERF101.py:57:10: PERF101 [*] Do not cast an iterable to `list` before iterating 59 59 | other_list.append(i + 1) 60 60 | +PERF101.py:69:10: PERF101 [*] Do not cast an iterable to `list` before iterating over it + | +67 | x, y, nested_tuple = (1, 2, (3, 4, 5)) +68 | +69 | for i in list(nested_tuple): # PERF101 + | ^^^^^^^^^^^^^^^^^^ PERF101 +70 | pass + | + = help: Remove `list()` cast + +ℹ Safe fix +66 66 | +67 67 | x, y, nested_tuple = (1, 2, (3, 4, 5)) +68 68 | +69 |-for i in list(nested_tuple): # PERF101 + 69 |+for i in nested_tuple: # PERF101 +70 70 | pass + diff --git a/crates/ruff_linter/src/rules/pycodestyle/mod.rs b/crates/ruff_linter/src/rules/pycodestyle/mod.rs index b2a02f5e23..008118ceeb 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/mod.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/mod.rs @@ -31,7 +31,6 @@ mod tests { #[test_case(Rule::BlankLineWithWhitespace, Path::new("W29.py"))] #[test_case(Rule::InvalidEscapeSequence, Path::new("W605_0.py"))] #[test_case(Rule::InvalidEscapeSequence, Path::new("W605_1.py"))] - #[test_case(Rule::InvalidEscapeSequence, Path::new("W605_2.py"))] #[test_case(Rule::LineTooLong, Path::new("E501.py"))] #[test_case(Rule::LineTooLong, Path::new("E501_3.py"))] #[test_case(Rule::MixedSpacesAndTabs, Path::new("E101.py"))] @@ -66,6 +65,7 @@ mod tests { } #[test_case(Rule::IsLiteral, Path::new("constant_literals.py"))] + #[test_case(Rule::ModuleImportNotAtTopOfFile, Path::new("E402.py"))] #[test_case(Rule::TypeComparison, Path::new("E721.py"))] fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> { let snapshot = format!( diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs index 50a41f4f1b..99c7ba0703 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs @@ -51,6 +51,19 @@ use crate::settings::LinterSettings; /// """ /// ``` /// +/// ## Error suppression +/// Hint: when suppressing `W505` errors within multi-line strings (like +/// docstrings), the `noqa` directive should come at the end of the string +/// (after the closing triple quote), and will apply to the entire string, like +/// so: +/// +/// ```python +/// """Lorem ipsum dolor sit amet. +/// +/// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor. +/// """ # noqa: W505 +/// ``` +/// /// ## Options /// - `task-tags` /// - `pycodestyle.max-doc-length` diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/imports.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/imports.rs index 443e044498..b85dc9eba8 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/imports.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/imports.rs @@ -41,6 +41,10 @@ impl Violation for MultipleImportsOnOneLine { /// According to [PEP 8], "imports are always put at the top of the file, just after any /// module comments and docstrings, and before module globals and constants." /// +/// In [preview], this rule makes an exception for `sys.path` modifications, +/// allowing for `sys.path.insert`, `sys.path.append`, and similar +/// modifications between import statements. +/// /// ## Example /// ```python /// "One string" diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/invalid_escape_sequence.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/invalid_escape_sequence.rs index b4d100aa9d..77a3d6ba62 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/invalid_escape_sequence.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/invalid_escape_sequence.rs @@ -3,9 +3,9 @@ use memchr::memchr_iter; use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_index::Indexer; -use ruff_python_parser::Tok; +use ruff_python_parser::{StringKind, Tok}; use ruff_source_file::Locator; -use ruff_text_size::{TextLen, TextRange, TextSize}; +use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use crate::fix::edits::pad_start; @@ -58,18 +58,6 @@ impl AlwaysFixableViolation for InvalidEscapeSequence { } } -#[derive(Debug, PartialEq, Eq)] -enum FixTitle { - AddBackslash, - UseRawStringLiteral, -} - -#[derive(Debug)] -struct InvalidEscapeChar { - ch: char, - range: TextRange, -} - /// W605 pub(crate) fn invalid_escape_sequence( diagnostics: &mut Vec, @@ -195,41 +183,77 @@ pub(crate) fn invalid_escape_sequence( if contains_valid_escape_sequence { // Escape with backslash. for invalid_escape_char in &invalid_escape_chars { - let diagnostic = Diagnostic::new( + let mut diagnostic = Diagnostic::new( InvalidEscapeSequence { ch: invalid_escape_char.ch, fix_title: FixTitle::AddBackslash, }, - invalid_escape_char.range, - ) - .with_fix(Fix::safe_edit(Edit::insertion( + invalid_escape_char.range(), + ); + diagnostic.set_fix(Fix::safe_edit(Edit::insertion( r"\".to_string(), - invalid_escape_char.range.start() + TextSize::from(1), + invalid_escape_char.start() + TextSize::from(1), ))); invalid_escape_sequence.push(diagnostic); } } else { // Turn into raw string. for invalid_escape_char in &invalid_escape_chars { - let diagnostic = Diagnostic::new( + let mut diagnostic = Diagnostic::new( InvalidEscapeSequence { ch: invalid_escape_char.ch, fix_title: FixTitle::UseRawStringLiteral, }, - invalid_escape_char.range, - ) - .with_fix( - // If necessary, add a space between any leading keyword (`return`, `yield`, - // `assert`, etc.) and the string. For example, `return"foo"` is valid, but - // `returnr"foo"` is not. - Fix::safe_edit(Edit::insertion( - pad_start("r".to_string(), string_start_location, locator), - string_start_location, - )), + invalid_escape_char.range(), ); + + if matches!( + token, + Tok::String { + kind: StringKind::Unicode, + .. + } + ) { + // Replace the Unicode prefix with `r`. + diagnostic.set_fix(Fix::safe_edit(Edit::replacement( + "r".to_string(), + string_start_location, + string_start_location + TextSize::from(1), + ))); + } else { + // Insert the `r` prefix. + diagnostic.set_fix( + // If necessary, add a space between any leading keyword (`return`, `yield`, + // `assert`, etc.) and the string. For example, `return"foo"` is valid, but + // `returnr"foo"` is not. + Fix::safe_edit(Edit::insertion( + pad_start("r".to_string(), string_start_location, locator), + string_start_location, + )), + ); + } + invalid_escape_sequence.push(diagnostic); } } diagnostics.extend(invalid_escape_sequence); } + +#[derive(Debug, PartialEq, Eq)] +enum FixTitle { + AddBackslash, + UseRawStringLiteral, +} + +#[derive(Debug)] +struct InvalidEscapeChar { + ch: char, + range: TextRange, +} + +impl Ranged for InvalidEscapeChar { + fn range(&self) -> TextRange { + self.range + } +} diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs index d803cfa559..8161ebb753 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs @@ -45,11 +45,24 @@ use crate::settings::LinterSettings; /// ) /// ``` /// +/// ## Error suppression +/// Hint: when suppressing `E501` errors within multi-line strings (like +/// docstrings), the `noqa` directive should come at the end of the string +/// (after the closing triple quote), and will apply to the entire string, like +/// so: +/// +/// ```python +/// """Lorem ipsum dolor sit amet. +/// +/// Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor. +/// """ # noqa: E501 +/// ``` +/// /// ## Options /// - `line-length` -/// - `pycodestyle.max-line-length` /// - `task-tags` /// - `pycodestyle.ignore-overlong-task-comments` +/// - `pycodestyle.max-line-length` /// /// [PEP 8]: https://peps.python.org/pep-0008/#maximum-line-length #[violation] diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/mod.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/mod.rs index 8fcfca6d76..329116eca9 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/mod.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/mod.rs @@ -381,20 +381,16 @@ impl Whitespace { } } - if has_tabs { + if len == content.text_len() { + // All whitespace up to the start of the line -> Indent + (Self::None, TextSize::default()) + } else if has_tabs { (Self::Tab, len) } else { match count { 0 => (Self::None, TextSize::default()), 1 => (Self::Single, len), - _ => { - if len == content.text_len() { - // All whitespace up to the start of the line -> Indent - (Self::None, TextSize::default()) - } else { - (Self::Many, len) - } - } + _ => (Self::Many, len), } } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs index f5baea1508..a0c4f49bf2 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs @@ -1,4 +1,4 @@ -use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix, Violation}; +use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_parser::TokenKind; use ruff_text_size::{Ranged, TextRange, TextSize}; @@ -34,11 +34,15 @@ use crate::rules::pycodestyle::rules::logical_lines::{LogicalLine, LogicalLineTo #[violation] pub struct UnexpectedSpacesAroundKeywordParameterEquals; -impl Violation for UnexpectedSpacesAroundKeywordParameterEquals { +impl AlwaysFixableViolation for UnexpectedSpacesAroundKeywordParameterEquals { #[derive_message_formats] fn message(&self) -> String { format!("Unexpected spaces around keyword / parameter equals") } + + fn fix_title(&self) -> String { + format!("Remove whitespace") + } } /// ## What it does @@ -165,22 +169,31 @@ pub(crate) fn whitespace_around_named_parameter_equals( } } } else { + // If there's space between the preceding token and the equals sign, report it. if token.start() != prev_end { - context.push( + let mut diagnostic = Diagnostic::new( UnexpectedSpacesAroundKeywordParameterEquals, TextRange::new(prev_end, token.start()), ); + diagnostic.set_fix(Fix::safe_edit(Edit::deletion(prev_end, token.start()))); + context.push_diagnostic(diagnostic); } + // If there's space between the equals sign and the following token, report it. while let Some(next) = iter.peek() { if next.kind() == TokenKind::NonLogicalNewline { iter.next(); } else { if next.start() != token.end() { - context.push( + let mut diagnostic = Diagnostic::new( UnexpectedSpacesAroundKeywordParameterEquals, TextRange::new(token.end(), next.start()), ); + diagnostic.set_fix(Fix::safe_edit(Edit::deletion( + token.end(), + next.start(), + ))); + context.push_diagnostic(diagnostic); } break; } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs index 235c12f7b6..0df27d8fe5 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs @@ -1,4 +1,4 @@ -use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix, Violation}; +use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_parser::TokenKind; use ruff_python_trivia::PythonWhitespace; @@ -66,11 +66,15 @@ impl AlwaysFixableViolation for TooFewSpacesBeforeInlineComment { #[violation] pub struct NoSpaceAfterInlineComment; -impl Violation for NoSpaceAfterInlineComment { +impl AlwaysFixableViolation for NoSpaceAfterInlineComment { #[derive_message_formats] fn message(&self) -> String { format!("Inline comment should start with `# `") } + + fn fix_title(&self) -> String { + format!("Format space") + } } /// ## What it does @@ -98,11 +102,15 @@ impl Violation for NoSpaceAfterInlineComment { #[violation] pub struct NoSpaceAfterBlockComment; -impl Violation for NoSpaceAfterBlockComment { +impl AlwaysFixableViolation for NoSpaceAfterBlockComment { #[derive_message_formats] fn message(&self) -> String { format!("Block comment should start with `# `") } + + fn fix_title(&self) -> String { + format!("Format space") + } } /// ## What it does @@ -130,11 +138,15 @@ impl Violation for NoSpaceAfterBlockComment { #[violation] pub struct MultipleLeadingHashesForBlockComment; -impl Violation for MultipleLeadingHashesForBlockComment { +impl AlwaysFixableViolation for MultipleLeadingHashesForBlockComment { #[derive_message_formats] fn message(&self) -> String { format!("Too many leading `#` before block comment") } + + fn fix_title(&self) -> String { + format!("Remove leading `#`") + } } /// E261, E262, E265, E266 @@ -184,14 +196,30 @@ pub(crate) fn whitespace_before_comment( if is_inline_comment { if bad_prefix.is_some() || comment.chars().next().is_some_and(char::is_whitespace) { - context.push(NoSpaceAfterInlineComment, range); + let mut diagnostic = Diagnostic::new(NoSpaceAfterInlineComment, range); + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + format_leading_space(token_text), + range, + ))); + context.push_diagnostic(diagnostic); } } else if let Some(bad_prefix) = bad_prefix { if bad_prefix != '!' || !line.is_start_of_file() { if bad_prefix != '#' { - context.push(NoSpaceAfterBlockComment, range); + let mut diagnostic = Diagnostic::new(NoSpaceAfterBlockComment, range); + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + format_leading_space(token_text), + range, + ))); + context.push_diagnostic(diagnostic); } else if !comment.is_empty() { - context.push(MultipleLeadingHashesForBlockComment, range); + let mut diagnostic = + Diagnostic::new(MultipleLeadingHashesForBlockComment, range); + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + format_leading_hashes(token_text), + range, + ))); + context.push_diagnostic(diagnostic); } } } @@ -200,3 +228,17 @@ pub(crate) fn whitespace_before_comment( } } } + +/// Format a comment to have a single space after the `#`. +fn format_leading_space(comment: &str) -> String { + if let Some(rest) = comment.strip_prefix("#:") { + format!("#: {}", rest.trim_start()) + } else { + format!("# {}", comment.trim_start_matches('#').trim_start()) + } +} + +/// Format a comment to strip multiple leading `#` characters. +fn format_leading_hashes(comment: &str) -> String { + format!("# {}", comment.trim_start_matches('#').trim_start()) +} diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E251_E25.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E251_E25.py.snap index 88d211954b..a87bcdff55 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E251_E25.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E251_E25.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -E25.py:2:12: E251 Unexpected spaces around keyword / parameter equals +E25.py:2:12: E251 [*] Unexpected spaces around keyword / parameter equals | 1 | #: E251 E251 2 | def foo(bar = False): @@ -9,8 +9,17 @@ E25.py:2:12: E251 Unexpected spaces around keyword / parameter equals 3 | '''Test function with an error in declaration''' 4 | pass | + = help: Remove whitespace -E25.py:2:14: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +1 1 | #: E251 E251 +2 |-def foo(bar = False): + 2 |+def foo(bar= False): +3 3 | '''Test function with an error in declaration''' +4 4 | pass +5 5 | #: E251 + +E25.py:2:14: E251 [*] Unexpected spaces around keyword / parameter equals | 1 | #: E251 E251 2 | def foo(bar = False): @@ -18,8 +27,17 @@ E25.py:2:14: E251 Unexpected spaces around keyword / parameter equals 3 | '''Test function with an error in declaration''' 4 | pass | + = help: Remove whitespace -E25.py:6:9: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +1 1 | #: E251 E251 +2 |-def foo(bar = False): + 2 |+def foo(bar =False): +3 3 | '''Test function with an error in declaration''' +4 4 | pass +5 5 | #: E251 + +E25.py:6:9: E251 [*] Unexpected spaces around keyword / parameter equals | 4 | pass 5 | #: E251 @@ -28,8 +46,19 @@ E25.py:6:9: E251 Unexpected spaces around keyword / parameter equals 7 | #: E251 8 | foo(bar =True) | + = help: Remove whitespace -E25.py:8:8: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +3 3 | '''Test function with an error in declaration''' +4 4 | pass +5 5 | #: E251 +6 |-foo(bar= True) + 6 |+foo(bar=True) +7 7 | #: E251 +8 8 | foo(bar =True) +9 9 | #: E251 E251 + +E25.py:8:8: E251 [*] Unexpected spaces around keyword / parameter equals | 6 | foo(bar= True) 7 | #: E251 @@ -38,8 +67,19 @@ E25.py:8:8: E251 Unexpected spaces around keyword / parameter equals 9 | #: E251 E251 10 | foo(bar = True) | + = help: Remove whitespace -E25.py:10:8: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +5 5 | #: E251 +6 6 | foo(bar= True) +7 7 | #: E251 +8 |-foo(bar =True) + 8 |+foo(bar=True) +9 9 | #: E251 E251 +10 10 | foo(bar = True) +11 11 | #: E251 + +E25.py:10:8: E251 [*] Unexpected spaces around keyword / parameter equals | 8 | foo(bar =True) 9 | #: E251 E251 @@ -48,8 +88,19 @@ E25.py:10:8: E251 Unexpected spaces around keyword / parameter equals 11 | #: E251 12 | y = bar(root= "sdasd") | + = help: Remove whitespace -E25.py:10:10: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +7 7 | #: E251 +8 8 | foo(bar =True) +9 9 | #: E251 E251 +10 |-foo(bar = True) + 10 |+foo(bar= True) +11 11 | #: E251 +12 12 | y = bar(root= "sdasd") +13 13 | #: E251:2:29 + +E25.py:10:10: E251 [*] Unexpected spaces around keyword / parameter equals | 8 | foo(bar =True) 9 | #: E251 E251 @@ -58,8 +109,19 @@ E25.py:10:10: E251 Unexpected spaces around keyword / parameter equals 11 | #: E251 12 | y = bar(root= "sdasd") | + = help: Remove whitespace -E25.py:12:14: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +7 7 | #: E251 +8 8 | foo(bar =True) +9 9 | #: E251 E251 +10 |-foo(bar = True) + 10 |+foo(bar =True) +11 11 | #: E251 +12 12 | y = bar(root= "sdasd") +13 13 | #: E251:2:29 + +E25.py:12:14: E251 [*] Unexpected spaces around keyword / parameter equals | 10 | foo(bar = True) 11 | #: E251 @@ -68,8 +130,19 @@ E25.py:12:14: E251 Unexpected spaces around keyword / parameter equals 13 | #: E251:2:29 14 | parser.add_argument('--long-option', | + = help: Remove whitespace -E25.py:15:29: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +9 9 | #: E251 E251 +10 10 | foo(bar = True) +11 11 | #: E251 +12 |-y = bar(root= "sdasd") + 12 |+y = bar(root="sdasd") +13 13 | #: E251:2:29 +14 14 | parser.add_argument('--long-option', +15 15 | default= + +E25.py:15:29: E251 [*] Unexpected spaces around keyword / parameter equals | 13 | #: E251:2:29 14 | parser.add_argument('--long-option', @@ -80,8 +153,20 @@ E25.py:15:29: E251 Unexpected spaces around keyword / parameter equals 17 | #: E251:1:45 18 | parser.add_argument('--long-option', default | + = help: Remove whitespace -E25.py:18:45: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +12 12 | y = bar(root= "sdasd") +13 13 | #: E251:2:29 +14 14 | parser.add_argument('--long-option', +15 |- default= +16 |- "/rather/long/filesystem/path/here/blah/blah/blah") + 15 |+ default="/rather/long/filesystem/path/here/blah/blah/blah") +17 16 | #: E251:1:45 +18 17 | parser.add_argument('--long-option', default +19 18 | ="/rather/long/filesystem/path/here/blah/blah/blah") + +E25.py:18:45: E251 [*] Unexpected spaces around keyword / parameter equals | 16 | "/rather/long/filesystem/path/here/blah/blah/blah") 17 | #: E251:1:45 @@ -92,8 +177,20 @@ E25.py:18:45: E251 Unexpected spaces around keyword / parameter equals 20 | #: E251:3:8 E251:3:10 21 | foo(True, | + = help: Remove whitespace -E25.py:23:8: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +15 15 | default= +16 16 | "/rather/long/filesystem/path/here/blah/blah/blah") +17 17 | #: E251:1:45 +18 |-parser.add_argument('--long-option', default +19 |- ="/rather/long/filesystem/path/here/blah/blah/blah") + 18 |+parser.add_argument('--long-option', default="/rather/long/filesystem/path/here/blah/blah/blah") +20 19 | #: E251:3:8 E251:3:10 +21 20 | foo(True, +22 21 | baz=(1, 2), + +E25.py:23:8: E251 [*] Unexpected spaces around keyword / parameter equals | 21 | foo(True, 22 | baz=(1, 2), @@ -102,8 +199,19 @@ E25.py:23:8: E251 Unexpected spaces around keyword / parameter equals 24 | ) 25 | #: Okay | + = help: Remove whitespace -E25.py:23:10: E251 Unexpected spaces around keyword / parameter equals +ℹ Safe fix +20 20 | #: E251:3:8 E251:3:10 +21 21 | foo(True, +22 22 | baz=(1, 2), +23 |- biz = 'foo' + 23 |+ biz= 'foo' +24 24 | ) +25 25 | #: Okay +26 26 | foo(bar=(1 == 1)) + +E25.py:23:10: E251 [*] Unexpected spaces around keyword / parameter equals | 21 | foo(True, 22 | baz=(1, 2), @@ -112,5 +220,16 @@ E25.py:23:10: E251 Unexpected spaces around keyword / parameter equals 24 | ) 25 | #: Okay | + = help: Remove whitespace + +ℹ Safe fix +20 20 | #: E251:3:8 E251:3:10 +21 21 | foo(True, +22 22 | baz=(1, 2), +23 |- biz = 'foo' + 23 |+ biz ='foo' +24 24 | ) +25 25 | #: Okay +26 26 | foo(bar=(1 == 1)) diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E262_E26.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E262_E26.py.snap index a01a1eb2d7..658391acdc 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E262_E26.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E262_E26.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -E26.py:4:12: E262 Inline comment should start with `# ` +E26.py:4:12: E262 [*] Inline comment should start with `# ` | 2 | pass # an inline comment 3 | #: E262:1:12 @@ -10,8 +10,19 @@ E26.py:4:12: E262 Inline comment should start with `# ` 5 | #: E262:1:12 6 | x = x + 1 # Increment x | + = help: Format space -E26.py:6:12: E262 Inline comment should start with `# ` +ℹ Safe fix +1 1 | #: E261:1:5 +2 2 | pass # an inline comment +3 3 | #: E262:1:12 +4 |-x = x + 1 #Increment x + 4 |+x = x + 1 # Increment x +5 5 | #: E262:1:12 +6 6 | x = x + 1 # Increment x +7 7 | #: E262:1:12 + +E26.py:6:12: E262 [*] Inline comment should start with `# ` | 4 | x = x + 1 #Increment x 5 | #: E262:1:12 @@ -20,8 +31,19 @@ E26.py:6:12: E262 Inline comment should start with `# ` 7 | #: E262:1:12 8 | x = y + 1 #: Increment x | + = help: Format space -E26.py:8:12: E262 Inline comment should start with `# ` +ℹ Safe fix +3 3 | #: E262:1:12 +4 4 | x = x + 1 #Increment x +5 5 | #: E262:1:12 +6 |-x = x + 1 # Increment x + 6 |+x = x + 1 # Increment x +7 7 | #: E262:1:12 +8 8 | x = y + 1 #: Increment x +9 9 | #: E265:1:1 + +E26.py:8:12: E262 [*] Inline comment should start with `# ` | 6 | x = x + 1 # Increment x 7 | #: E262:1:12 @@ -30,8 +52,19 @@ E26.py:8:12: E262 Inline comment should start with `# ` 9 | #: E265:1:1 10 | #Block comment | + = help: Format space -E26.py:63:9: E262 Inline comment should start with `# ` +ℹ Safe fix +5 5 | #: E262:1:12 +6 6 | x = x + 1 # Increment x +7 7 | #: E262:1:12 +8 |-x = y + 1 #: Increment x + 8 |+x = y + 1 #: Increment x +9 9 | #: E265:1:1 +10 10 | #Block comment +11 11 | a = 1 + +E26.py:63:9: E262 [*] Inline comment should start with `# ` | 61 | # -*- coding: utf8 -*- 62 | #  (One space one NBSP) Ok for block comment @@ -40,8 +73,19 @@ E26.py:63:9: E262 Inline comment should start with `# ` 64 | #: E262:2:9 65 | # (Two spaces) Ok for block comment | + = help: Format space -E26.py:66:9: E262 Inline comment should start with `# ` +ℹ Safe fix +60 60 | #: E262:3:9 +61 61 | # -*- coding: utf8 -*- +62 62 | #  (One space one NBSP) Ok for block comment +63 |-a = 42 #  (One space one NBSP) + 63 |+a = 42 # (One space one NBSP) +64 64 | #: E262:2:9 +65 65 | # (Two spaces) Ok for block comment +66 66 | a = 42 # (Two spaces) + +E26.py:66:9: E262 [*] Inline comment should start with `# ` | 64 | #: E262:2:9 65 | # (Two spaces) Ok for block comment @@ -50,5 +94,52 @@ E26.py:66:9: E262 Inline comment should start with `# ` 67 | 68 | #: E265:5:1 | + = help: Format space + +ℹ Safe fix +63 63 | a = 42 #  (One space one NBSP) +64 64 | #: E262:2:9 +65 65 | # (Two spaces) Ok for block comment +66 |-a = 42 # (Two spaces) + 66 |+a = 42 # (Two spaces) +67 67 | +68 68 | #: E265:5:1 +69 69 | ### Means test is not done yet + +E26.py:84:8: E262 [*] Inline comment should start with `# ` + | +82 | ## Foo +83 | +84 | a = 1 ## Foo + | ^^^^^^ E262 +85 | +86 | a = 1 #:Foo + | + = help: Format space + +ℹ Safe fix +81 81 | #: E266:1:3 +82 82 | ## Foo +83 83 | +84 |-a = 1 ## Foo + 84 |+a = 1 # Foo +85 85 | +86 86 | a = 1 #:Foo + +E26.py:86:8: E262 [*] Inline comment should start with `# ` + | +84 | a = 1 ## Foo +85 | +86 | a = 1 #:Foo + | ^^^^^ E262 + | + = help: Format space + +ℹ Safe fix +83 83 | +84 84 | a = 1 ## Foo +85 85 | +86 |-a = 1 #:Foo + 86 |+a = 1 #: Foo diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E265_E26.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E265_E26.py.snap index 6017827141..92b25ccb21 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E265_E26.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E265_E26.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -E26.py:10:1: E265 Block comment should start with `# ` +E26.py:10:1: E265 [*] Block comment should start with `# ` | 8 | x = y + 1 #: Increment x 9 | #: E265:1:1 @@ -10,8 +10,19 @@ E26.py:10:1: E265 Block comment should start with `# ` 11 | a = 1 12 | #: E265:2:1 | + = help: Format space -E26.py:14:1: E265 Block comment should start with `# ` +ℹ Safe fix +7 7 | #: E262:1:12 +8 8 | x = y + 1 #: Increment x +9 9 | #: E265:1:1 +10 |-#Block comment + 10 |+# Block comment +11 11 | a = 1 +12 12 | #: E265:2:1 +13 13 | m = 42 + +E26.py:14:1: E265 [*] Block comment should start with `# ` | 12 | #: E265:2:1 13 | m = 42 @@ -20,8 +31,19 @@ E26.py:14:1: E265 Block comment should start with `# ` 15 | mx = 42 - 42 16 | #: E266:3:5 E266:6:5 | + = help: Format space -E26.py:25:1: E265 Block comment should start with `# ` +ℹ Safe fix +11 11 | a = 1 +12 12 | #: E265:2:1 +13 13 | m = 42 +14 |-#! This is important + 14 |+# ! This is important +15 15 | mx = 42 - 42 +16 16 | #: E266:3:5 E266:6:5 +17 17 | def how_it_feel(r): + +E26.py:25:1: E265 [*] Block comment should start with `# ` | 23 | return 24 | #: E265:1:1 E266:2:1 @@ -30,8 +52,19 @@ E26.py:25:1: E265 Block comment should start with `# ` 26 | ## logging.error() 27 | #: W291:1:42 | + = help: Format space -E26.py:32:1: E265 Block comment should start with `# ` +ℹ Safe fix +22 22 | ### Of course it is unused +23 23 | return +24 24 | #: E265:1:1 E266:2:1 +25 |-##if DEBUG: + 25 |+# if DEBUG: +26 26 | ## logging.error() +27 27 | #: W291:1:42 +28 28 | ######################################### + +E26.py:32:1: E265 [*] Block comment should start with `# ` | 31 | #: Okay 32 | #!/usr/bin/env python @@ -39,8 +72,19 @@ E26.py:32:1: E265 Block comment should start with `# ` 33 | 34 | pass # an inline comment | + = help: Format space -E26.py:73:1: E265 Block comment should start with `# ` +ℹ Safe fix +29 29 | #: +30 30 | +31 31 | #: Okay +32 |-#!/usr/bin/env python + 32 |+# !/usr/bin/env python +33 33 | +34 34 | pass # an inline comment +35 35 | x = x + 1 # Increment x + +E26.py:73:1: E265 [*] Block comment should start with `# ` | 71 | # F Means test is failing (F) 72 | # EF Means test is giving error and Failing @@ -48,5 +92,37 @@ E26.py:73:1: E265 Block comment should start with `# ` | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E265 74 | # 8 Means test runs forever | + = help: Format space + +ℹ Safe fix +70 70 | # E Means test is giving error (E) +71 71 | # F Means test is failing (F) +72 72 | # EF Means test is giving error and Failing +73 |-#! Means test is segfaulting + 73 |+# ! Means test is segfaulting +74 74 | # 8 Means test runs forever +75 75 | +76 76 | #: Colon prefix is okay + +E26.py:78:1: E265 [*] Block comment should start with `# ` + | +76 | #: Colon prefix is okay +77 | +78 | ###This is a variable ### + | ^^^^^^^^^^^^^^^^^^^^^^^^^ E265 +79 | +80 | # We should strip the space, but preserve the hashes. + | + = help: Format space + +ℹ Safe fix +75 75 | +76 76 | #: Colon prefix is okay +77 77 | +78 |-###This is a variable ### + 78 |+# This is a variable ### +79 79 | +80 80 | # We should strip the space, but preserve the hashes. +81 81 | #: E266:1:3 diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E266_E26.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E266_E26.py.snap index 5eb382385c..4469e1b225 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E266_E26.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E266_E26.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -E26.py:19:5: E266 Too many leading `#` before block comment +E26.py:19:5: E266 [*] Too many leading `#` before block comment | 17 | def how_it_feel(r): 18 | @@ -9,8 +9,19 @@ E26.py:19:5: E266 Too many leading `#` before block comment | ^^^^^^^^^^^^^^^^^^^^^^^^^^ E266 20 | a = 42 | + = help: Remove leading `#` -E26.py:22:5: E266 Too many leading `#` before block comment +ℹ Safe fix +16 16 | #: E266:3:5 E266:6:5 +17 17 | def how_it_feel(r): +18 18 | +19 |- ### This is a variable ### + 19 |+ # This is a variable ### +20 20 | a = 42 +21 21 | +22 22 | ### Of course it is unused + +E26.py:22:5: E266 [*] Too many leading `#` before block comment | 20 | a = 42 21 | @@ -19,8 +30,19 @@ E26.py:22:5: E266 Too many leading `#` before block comment 23 | return 24 | #: E265:1:1 E266:2:1 | + = help: Remove leading `#` -E26.py:26:1: E266 Too many leading `#` before block comment +ℹ Safe fix +19 19 | ### This is a variable ### +20 20 | a = 42 +21 21 | +22 |- ### Of course it is unused + 22 |+ # Of course it is unused +23 23 | return +24 24 | #: E265:1:1 E266:2:1 +25 25 | ##if DEBUG: + +E26.py:26:1: E266 [*] Too many leading `#` before block comment | 24 | #: E265:1:1 E266:2:1 25 | ##if DEBUG: @@ -29,8 +51,19 @@ E26.py:26:1: E266 Too many leading `#` before block comment 27 | #: W291:1:42 28 | ######################################### | + = help: Remove leading `#` -E26.py:69:1: E266 Too many leading `#` before block comment +ℹ Safe fix +23 23 | return +24 24 | #: E265:1:1 E266:2:1 +25 25 | ##if DEBUG: +26 |-## logging.error() + 26 |+# logging.error() +27 27 | #: W291:1:42 +28 28 | ######################################### +29 29 | #: + +E26.py:69:1: E266 [*] Too many leading `#` before block comment | 68 | #: E265:5:1 69 | ### Means test is not done yet @@ -38,5 +71,37 @@ E26.py:69:1: E266 Too many leading `#` before block comment 70 | # E Means test is giving error (E) 71 | # F Means test is failing (F) | + = help: Remove leading `#` + +ℹ Safe fix +66 66 | a = 42 # (Two spaces) +67 67 | +68 68 | #: E265:5:1 +69 |-### Means test is not done yet + 69 |+# Means test is not done yet +70 70 | # E Means test is giving error (E) +71 71 | # F Means test is failing (F) +72 72 | # EF Means test is giving error and Failing + +E26.py:82:1: E266 [*] Too many leading `#` before block comment + | +80 | # We should strip the space, but preserve the hashes. +81 | #: E266:1:3 +82 | ## Foo + | ^^^^^^^ E266 +83 | +84 | a = 1 ## Foo + | + = help: Remove leading `#` + +ℹ Safe fix +79 79 | +80 80 | # We should strip the space, but preserve the hashes. +81 81 | #: E266:1:3 +82 |-## Foo + 82 |+# Foo +83 83 | +84 84 | a = 1 ## Foo +85 85 | diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E402_E402.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E402_E402.py.snap index 8c15438eb5..072290ae87 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E402_E402.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E402_E402.py.snap @@ -1,27 +1,57 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -E402.py:24:1: E402 Module level import not at top of file +E402.py:25:1: E402 Module level import not at top of file | -22 | __some__magic = 1 -23 | -24 | import f +23 | sys.path.insert(0, "some/path") +24 | +25 | import f + | ^^^^^^^^ E402 +26 | +27 | import matplotlib + | + +E402.py:27:1: E402 Module level import not at top of file + | +25 | import f +26 | +27 | import matplotlib + | ^^^^^^^^^^^^^^^^^ E402 +28 | +29 | matplotlib.use("Agg") + | + +E402.py:31:1: E402 Module level import not at top of file + | +29 | matplotlib.use("Agg") +30 | +31 | import g + | ^^^^^^^^ E402 +32 | +33 | __some__magic = 1 + | + +E402.py:35:1: E402 Module level import not at top of file + | +33 | __some__magic = 1 +34 | +35 | import h | ^^^^^^^^ E402 | -E402.py:34:1: E402 Module level import not at top of file +E402.py:45:1: E402 Module level import not at top of file | -32 | import g -33 | -34 | import h; import i +43 | import j +44 | +45 | import k; import l | ^^^^^^^^ E402 | -E402.py:34:11: E402 Module level import not at top of file +E402.py:45:11: E402 Module level import not at top of file | -32 | import g -33 | -34 | import h; import i +43 | import j +44 | +45 | import k; import l | ^^^^^^^^ E402 | diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_0.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_0.py.snap index 98da5fff80..e4630487b7 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_0.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_0.py.snap @@ -125,6 +125,8 @@ W605_0.py:45:12: W605 [*] Invalid escape sequence: `\_` 44 | 45 | regex = '\\\_' | ^^ W605 +46 | +47 | #: W605:1:7 | = help: Add backslash to escape sequence @@ -134,5 +136,23 @@ W605_0.py:45:12: W605 [*] Invalid escape sequence: `\_` 44 44 | 45 |-regex = '\\\_' 45 |+regex = '\\\\_' +46 46 | +47 47 | #: W605:1:7 +48 48 | u'foo\ bar' + +W605_0.py:48:6: W605 [*] Invalid escape sequence: `\ ` + | +47 | #: W605:1:7 +48 | u'foo\ bar' + | ^^ W605 + | + = help: Use a raw string literal + +ℹ Safe fix +45 45 | regex = '\\\_' +46 46 | +47 47 | #: W605:1:7 +48 |-u'foo\ bar' + 48 |+r'foo\ bar' diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_1.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_1.py.snap index 2fee83a5fe..c47507e0cc 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_1.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_1.py.snap @@ -1,104 +1,227 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -W605_1.py:2:10: W605 [*] Invalid escape sequence: `\.` +W605_1.py:4:11: W605 [*] Invalid escape sequence: `\.` | -1 | #: W605:1:10 -2 | regex = '\.png$' - | ^^ W605 -3 | -4 | #: W605:2:1 +3 | #: W605:1:10 +4 | regex = f'\.png$' + | ^^ W605 +5 | +6 | #: W605:2:1 | = help: Use a raw string literal ℹ Safe fix -1 1 | #: W605:1:10 -2 |-regex = '\.png$' - 2 |+regex = r'\.png$' -3 3 | -4 4 | #: W605:2:1 -5 5 | regex = ''' +1 1 | # Same as `W605_0.py` but using f-strings instead. +2 2 | +3 3 | #: W605:1:10 +4 |-regex = f'\.png$' + 4 |+regex = rf'\.png$' +5 5 | +6 6 | #: W605:2:1 +7 7 | regex = f''' -W605_1.py:6:1: W605 [*] Invalid escape sequence: `\.` +W605_1.py:8:1: W605 [*] Invalid escape sequence: `\.` | -4 | #: W605:2:1 -5 | regex = ''' -6 | \.png$ +6 | #: W605:2:1 +7 | regex = f''' +8 | \.png$ | ^^ W605 -7 | ''' +9 | ''' | = help: Use a raw string literal ℹ Safe fix -2 2 | regex = '\.png$' -3 3 | -4 4 | #: W605:2:1 -5 |-regex = ''' - 5 |+regex = r''' -6 6 | \.png$ -7 7 | ''' -8 8 | +4 4 | regex = f'\.png$' +5 5 | +6 6 | #: W605:2:1 +7 |-regex = f''' + 7 |+regex = rf''' +8 8 | \.png$ +9 9 | ''' +10 10 | -W605_1.py:11:6: W605 [*] Invalid escape sequence: `\_` +W605_1.py:13:7: W605 [*] Invalid escape sequence: `\_` | - 9 | #: W605:2:6 -10 | f( -11 | '\_' +11 | #: W605:2:6 +12 | f( +13 | f'\_' + | ^^ W605 +14 | ) + | + = help: Use a raw string literal + +ℹ Safe fix +10 10 | +11 11 | #: W605:2:6 +12 12 | f( +13 |- f'\_' + 13 |+ rf'\_' +14 14 | ) +15 15 | +16 16 | #: W605:4:6 + +W605_1.py:20:6: W605 [*] Invalid escape sequence: `\_` + | +18 | multi-line +19 | literal +20 | with \_ somewhere | ^^ W605 -12 | ) +21 | in the middle +22 | """ | = help: Use a raw string literal ℹ Safe fix -8 8 | -9 9 | #: W605:2:6 -10 10 | f( -11 |- '\_' - 11 |+ r'\_' -12 12 | ) -13 13 | -14 14 | #: W605:4:6 +14 14 | ) +15 15 | +16 16 | #: W605:4:6 +17 |-f""" + 17 |+rf""" +18 18 | multi-line +19 19 | literal +20 20 | with \_ somewhere -W605_1.py:18:6: W605 [*] Invalid escape sequence: `\_` +W605_1.py:25:40: W605 [*] Invalid escape sequence: `\_` | -16 | multi-line -17 | literal -18 | with \_ somewhere - | ^^ W605 -19 | in the middle -20 | """ +24 | #: W605:1:38 +25 | value = f'new line\nand invalid escape \_ here' + | ^^ W605 | - = help: Use a raw string literal + = help: Add backslash to escape sequence ℹ Safe fix -12 12 | ) -13 13 | -14 14 | #: W605:4:6 -15 |-""" - 15 |+r""" -16 16 | multi-line -17 17 | literal -18 18 | with \_ somewhere - -W605_1.py:25:12: W605 [*] Invalid escape sequence: `\.` - | -23 | def f(): -24 | #: W605:1:11 -25 | return'\.png$' - | ^^ W605 -26 | -27 | #: Okay - | - = help: Use a raw string literal - -ℹ Safe fix -22 22 | -23 23 | def f(): -24 24 | #: W605:1:11 -25 |- return'\.png$' - 25 |+ return r'\.png$' +22 22 | """ +23 23 | +24 24 | #: W605:1:38 +25 |-value = f'new line\nand invalid escape \_ here' + 25 |+value = f'new line\nand invalid escape \\_ here' 26 26 | -27 27 | #: Okay -28 28 | regex = r'\.png$' +27 27 | +28 28 | #: Okay + +W605_1.py:43:13: W605 [*] Invalid escape sequence: `\_` + | +41 | ''' # noqa +42 | +43 | regex = f'\\\_' + | ^^ W605 +44 | value = f'\{{1}}' +45 | value = f'\{1}' + | + = help: Add backslash to escape sequence + +ℹ Safe fix +40 40 | \w +41 41 | ''' # noqa +42 42 | +43 |-regex = f'\\\_' + 43 |+regex = f'\\\\_' +44 44 | value = f'\{{1}}' +45 45 | value = f'\{1}' +46 46 | value = f'{1:\}' + +W605_1.py:44:11: W605 [*] Invalid escape sequence: `\{` + | +43 | regex = f'\\\_' +44 | value = f'\{{1}}' + | ^^ W605 +45 | value = f'\{1}' +46 | value = f'{1:\}' + | + = help: Use a raw string literal + +ℹ Safe fix +41 41 | ''' # noqa +42 42 | +43 43 | regex = f'\\\_' +44 |-value = f'\{{1}}' + 44 |+value = rf'\{{1}}' +45 45 | value = f'\{1}' +46 46 | value = f'{1:\}' +47 47 | value = f"{f"\{1}"}" + +W605_1.py:45:11: W605 [*] Invalid escape sequence: `\{` + | +43 | regex = f'\\\_' +44 | value = f'\{{1}}' +45 | value = f'\{1}' + | ^^ W605 +46 | value = f'{1:\}' +47 | value = f"{f"\{1}"}" + | + = help: Use a raw string literal + +ℹ Safe fix +42 42 | +43 43 | regex = f'\\\_' +44 44 | value = f'\{{1}}' +45 |-value = f'\{1}' + 45 |+value = rf'\{1}' +46 46 | value = f'{1:\}' +47 47 | value = f"{f"\{1}"}" +48 48 | value = rf"{f"\{1}"}" + +W605_1.py:46:14: W605 [*] Invalid escape sequence: `\}` + | +44 | value = f'\{{1}}' +45 | value = f'\{1}' +46 | value = f'{1:\}' + | ^^ W605 +47 | value = f"{f"\{1}"}" +48 | value = rf"{f"\{1}"}" + | + = help: Use a raw string literal + +ℹ Safe fix +43 43 | regex = f'\\\_' +44 44 | value = f'\{{1}}' +45 45 | value = f'\{1}' +46 |-value = f'{1:\}' + 46 |+value = rf'{1:\}' +47 47 | value = f"{f"\{1}"}" +48 48 | value = rf"{f"\{1}"}" +49 49 | + +W605_1.py:47:14: W605 [*] Invalid escape sequence: `\{` + | +45 | value = f'\{1}' +46 | value = f'{1:\}' +47 | value = f"{f"\{1}"}" + | ^^ W605 +48 | value = rf"{f"\{1}"}" + | + = help: Use a raw string literal + +ℹ Safe fix +44 44 | value = f'\{{1}}' +45 45 | value = f'\{1}' +46 46 | value = f'{1:\}' +47 |-value = f"{f"\{1}"}" + 47 |+value = f"{rf"\{1}"}" +48 48 | value = rf"{f"\{1}"}" +49 49 | +50 50 | # Okay + +W605_1.py:48:15: W605 [*] Invalid escape sequence: `\{` + | +46 | value = f'{1:\}' +47 | value = f"{f"\{1}"}" +48 | value = rf"{f"\{1}"}" + | ^^ W605 +49 | +50 | # Okay + | + = help: Use a raw string literal + +ℹ Safe fix +45 45 | value = f'\{1}' +46 46 | value = f'{1:\}' +47 47 | value = f"{f"\{1}"}" +48 |-value = rf"{f"\{1}"}" + 48 |+value = rf"{rf"\{1}"}" +49 49 | +50 50 | # Okay +51 51 | value = rf'\{{1}}' diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_2.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_2.py.snap deleted file mode 100644 index 9f1016ae83..0000000000 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__W605_W605_2.py.snap +++ /dev/null @@ -1,227 +0,0 @@ ---- -source: crates/ruff_linter/src/rules/pycodestyle/mod.rs ---- -W605_2.py:4:11: W605 [*] Invalid escape sequence: `\.` - | -3 | #: W605:1:10 -4 | regex = f'\.png$' - | ^^ W605 -5 | -6 | #: W605:2:1 - | - = help: Use a raw string literal - -ℹ Safe fix -1 1 | # Same as `W605_0.py` but using f-strings instead. -2 2 | -3 3 | #: W605:1:10 -4 |-regex = f'\.png$' - 4 |+regex = rf'\.png$' -5 5 | -6 6 | #: W605:2:1 -7 7 | regex = f''' - -W605_2.py:8:1: W605 [*] Invalid escape sequence: `\.` - | -6 | #: W605:2:1 -7 | regex = f''' -8 | \.png$ - | ^^ W605 -9 | ''' - | - = help: Use a raw string literal - -ℹ Safe fix -4 4 | regex = f'\.png$' -5 5 | -6 6 | #: W605:2:1 -7 |-regex = f''' - 7 |+regex = rf''' -8 8 | \.png$ -9 9 | ''' -10 10 | - -W605_2.py:13:7: W605 [*] Invalid escape sequence: `\_` - | -11 | #: W605:2:6 -12 | f( -13 | f'\_' - | ^^ W605 -14 | ) - | - = help: Use a raw string literal - -ℹ Safe fix -10 10 | -11 11 | #: W605:2:6 -12 12 | f( -13 |- f'\_' - 13 |+ rf'\_' -14 14 | ) -15 15 | -16 16 | #: W605:4:6 - -W605_2.py:20:6: W605 [*] Invalid escape sequence: `\_` - | -18 | multi-line -19 | literal -20 | with \_ somewhere - | ^^ W605 -21 | in the middle -22 | """ - | - = help: Use a raw string literal - -ℹ Safe fix -14 14 | ) -15 15 | -16 16 | #: W605:4:6 -17 |-f""" - 17 |+rf""" -18 18 | multi-line -19 19 | literal -20 20 | with \_ somewhere - -W605_2.py:25:40: W605 [*] Invalid escape sequence: `\_` - | -24 | #: W605:1:38 -25 | value = f'new line\nand invalid escape \_ here' - | ^^ W605 - | - = help: Add backslash to escape sequence - -ℹ Safe fix -22 22 | """ -23 23 | -24 24 | #: W605:1:38 -25 |-value = f'new line\nand invalid escape \_ here' - 25 |+value = f'new line\nand invalid escape \\_ here' -26 26 | -27 27 | -28 28 | #: Okay - -W605_2.py:43:13: W605 [*] Invalid escape sequence: `\_` - | -41 | ''' # noqa -42 | -43 | regex = f'\\\_' - | ^^ W605 -44 | value = f'\{{1}}' -45 | value = f'\{1}' - | - = help: Add backslash to escape sequence - -ℹ Safe fix -40 40 | \w -41 41 | ''' # noqa -42 42 | -43 |-regex = f'\\\_' - 43 |+regex = f'\\\\_' -44 44 | value = f'\{{1}}' -45 45 | value = f'\{1}' -46 46 | value = f'{1:\}' - -W605_2.py:44:11: W605 [*] Invalid escape sequence: `\{` - | -43 | regex = f'\\\_' -44 | value = f'\{{1}}' - | ^^ W605 -45 | value = f'\{1}' -46 | value = f'{1:\}' - | - = help: Use a raw string literal - -ℹ Safe fix -41 41 | ''' # noqa -42 42 | -43 43 | regex = f'\\\_' -44 |-value = f'\{{1}}' - 44 |+value = rf'\{{1}}' -45 45 | value = f'\{1}' -46 46 | value = f'{1:\}' -47 47 | value = f"{f"\{1}"}" - -W605_2.py:45:11: W605 [*] Invalid escape sequence: `\{` - | -43 | regex = f'\\\_' -44 | value = f'\{{1}}' -45 | value = f'\{1}' - | ^^ W605 -46 | value = f'{1:\}' -47 | value = f"{f"\{1}"}" - | - = help: Use a raw string literal - -ℹ Safe fix -42 42 | -43 43 | regex = f'\\\_' -44 44 | value = f'\{{1}}' -45 |-value = f'\{1}' - 45 |+value = rf'\{1}' -46 46 | value = f'{1:\}' -47 47 | value = f"{f"\{1}"}" -48 48 | value = rf"{f"\{1}"}" - -W605_2.py:46:14: W605 [*] Invalid escape sequence: `\}` - | -44 | value = f'\{{1}}' -45 | value = f'\{1}' -46 | value = f'{1:\}' - | ^^ W605 -47 | value = f"{f"\{1}"}" -48 | value = rf"{f"\{1}"}" - | - = help: Use a raw string literal - -ℹ Safe fix -43 43 | regex = f'\\\_' -44 44 | value = f'\{{1}}' -45 45 | value = f'\{1}' -46 |-value = f'{1:\}' - 46 |+value = rf'{1:\}' -47 47 | value = f"{f"\{1}"}" -48 48 | value = rf"{f"\{1}"}" -49 49 | - -W605_2.py:47:14: W605 [*] Invalid escape sequence: `\{` - | -45 | value = f'\{1}' -46 | value = f'{1:\}' -47 | value = f"{f"\{1}"}" - | ^^ W605 -48 | value = rf"{f"\{1}"}" - | - = help: Use a raw string literal - -ℹ Safe fix -44 44 | value = f'\{{1}}' -45 45 | value = f'\{1}' -46 46 | value = f'{1:\}' -47 |-value = f"{f"\{1}"}" - 47 |+value = f"{rf"\{1}"}" -48 48 | value = rf"{f"\{1}"}" -49 49 | -50 50 | # Okay - -W605_2.py:48:15: W605 [*] Invalid escape sequence: `\{` - | -46 | value = f'{1:\}' -47 | value = f"{f"\{1}"}" -48 | value = rf"{f"\{1}"}" - | ^^ W605 -49 | -50 | # Okay - | - = help: Use a raw string literal - -ℹ Safe fix -45 45 | value = f'\{1}' -46 46 | value = f'{1:\}' -47 47 | value = f"{f"\{1}"}" -48 |-value = rf"{f"\{1}"}" - 48 |+value = rf"{rf"\{1}"}" -49 49 | -50 50 | # Okay -51 51 | value = rf'\{{1}}' - - diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__preview__E402_E402.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__preview__E402_E402.py.snap new file mode 100644 index 0000000000..7ec9e200b3 --- /dev/null +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__preview__E402_E402.py.snap @@ -0,0 +1,28 @@ +--- +source: crates/ruff_linter/src/rules/pycodestyle/mod.rs +--- +E402.py:35:1: E402 Module level import not at top of file + | +33 | __some__magic = 1 +34 | +35 | import h + | ^^^^^^^^ E402 + | + +E402.py:45:1: E402 Module level import not at top of file + | +43 | import j +44 | +45 | import k; import l + | ^^^^^^^^ E402 + | + +E402.py:45:11: E402 Module level import not at top of file + | +43 | import j +44 | +45 | import k; import l + | ^^^^^^^^ E402 + | + + diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__shebang.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__shebang.snap index 63d5b9fefc..fa3897bebc 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__shebang.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__shebang.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -shebang.py:3:1: E265 Block comment should start with `# ` +shebang.py:3:1: E265 [*] Block comment should start with `# ` | 1 | #!/usr/bin/python 2 | # @@ -9,5 +9,13 @@ shebang.py:3:1: E265 Block comment should start with `# ` | ^^ E265 4 | #: | + = help: Format space + +ℹ Safe fix +1 1 | #!/usr/bin/python +2 2 | # +3 |-#! + 3 |+# ! +4 4 | #: diff --git a/crates/ruff_linter/src/rules/pydocstyle/rules/indent.rs b/crates/ruff_linter/src/rules/pydocstyle/rules/indent.rs index f56bc1bed4..c91565837f 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/rules/indent.rs +++ b/crates/ruff_linter/src/rules/pydocstyle/rules/indent.rs @@ -172,8 +172,9 @@ pub(crate) fn indent(checker: &mut Checker, docstring: &Docstring) { let mut has_seen_tab = docstring.indentation.contains('\t'); let mut is_over_indented = true; let mut over_indented_lines = vec![]; - let mut over_indented_offset = usize::MAX; + let mut over_indented_size = usize::MAX; + let docstring_indent_size = docstring.indentation.chars().count(); for i in 0..lines.len() { // First lines and continuations doesn't need any indentation. if i == 0 || lines[i - 1].ends_with('\\') { @@ -189,6 +190,7 @@ pub(crate) fn indent(checker: &mut Checker, docstring: &Docstring) { } let line_indent = leading_space(line); + let line_indent_size = line_indent.chars().count(); // We only report tab indentation once, so only check if we haven't seen a tab // yet. @@ -197,9 +199,7 @@ pub(crate) fn indent(checker: &mut Checker, docstring: &Docstring) { if checker.enabled(Rule::UnderIndentation) { // We report under-indentation on every line. This isn't great, but enables // fix. - if (i == lines.len() - 1 || !is_blank) - && line_indent.len() < docstring.indentation.len() - { + if (i == lines.len() - 1 || !is_blank) && line_indent_size < docstring_indent_size { let mut diagnostic = Diagnostic::new(UnderIndentation, TextRange::empty(line.start())); diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( @@ -217,14 +217,12 @@ pub(crate) fn indent(checker: &mut Checker, docstring: &Docstring) { // until we've viewed all the lines, so for now, just track // the over-indentation status of every line. if i < lines.len() - 1 { - if line_indent.len() > docstring.indentation.len() { + if line_indent_size > docstring_indent_size { over_indented_lines.push(line); // Track the _smallest_ offset we see, in terms of characters. - over_indented_offset = std::cmp::min( - line_indent.chars().count() - docstring.indentation.chars().count(), - over_indented_offset, - ); + over_indented_size = + std::cmp::min(line_indent_size - docstring_indent_size, over_indented_size); } else { is_over_indented = false; } @@ -250,21 +248,33 @@ pub(crate) fn indent(checker: &mut Checker, docstring: &Docstring) { // enables the fix capability. let mut diagnostic = Diagnostic::new(OverIndentation, TextRange::empty(line.start())); + let edit = if indent.is_empty() { - Edit::deletion(line.start(), line_indent.text_len()) + // Delete the entire indent. + Edit::range_deletion(TextRange::at(line.start(), line_indent.text_len())) } else { // Convert the character count to an offset within the source. + // Example, where `[]` is a 2 byte non-breaking space: + // ``` + // def f(): + // """ Docstring header + // ^^^^ Real indentation is 4 chars + // docstring body, over-indented + // ^^^^^^ Over-indentation is 6 - 4 = 2 chars due to this line + // [] [] docstring body 2, further indented + // ^^^^^ We take these 4 chars/5 bytes to match the docstring ... + // ^^^ ... and these 2 chars/3 bytes to remove the `over_indented_size` ... + // ^^ ... but preserve this real indent + // ``` let offset = checker .locator() - .after(line.start() + indent.text_len()) + .after(line.start()) .chars() - .take(over_indented_offset) + .take(docstring.indentation.chars().count() + over_indented_size) .map(TextLen::text_len) .sum::(); - Edit::range_replacement( - indent.clone(), - TextRange::at(line.start(), indent.text_len() + offset), - ) + let range = TextRange::at(line.start(), offset); + Edit::range_replacement(indent, range) }; diagnostic.set_fix(Fix::safe_edit(edit)); checker.diagnostics.push(diagnostic); @@ -274,7 +284,8 @@ pub(crate) fn indent(checker: &mut Checker, docstring: &Docstring) { // If the last line is over-indented... if let Some(last) = lines.last() { let line_indent = leading_space(last); - if line_indent.len() > docstring.indentation.len() { + let line_indent_size = line_indent.chars().count(); + if line_indent_size > docstring_indent_size { let mut diagnostic = Diagnostic::new(OverIndentation, TextRange::empty(last.start())); let indent = clean_space(docstring.indentation); diff --git a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D.py.snap b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D.py.snap index caa57c1faa..5048d030c7 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D.py.snap +++ b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D.py.snap @@ -411,4 +411,22 @@ D.py:707:1: D208 [*] Docstring is over-indented 709 709 | 710 710 | +D.py:723:1: D208 [*] Docstring is over-indented + | +721 | """There's a non-breaking space (2-bytes) after 3 spaces (https://github.com/astral-sh/ruff/issues/9080). +722 | +723 |     Returns: + | D208 +724 | """ + | + = help: Remove over-indentation + +ℹ Safe fix +720 720 | def inconsistent_indent_byte_size(): +721 721 | """There's a non-breaking space (2-bytes) after 3 spaces (https://github.com/astral-sh/ruff/issues/9080). +722 722 | +723 |-     Returns: + 723 |+ Returns: +724 724 | """ + diff --git a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D208.py.snap b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D208.py.snap index ca8b68bcbe..517dc3802e 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D208.py.snap +++ b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D208_D208.py.snap @@ -1,57 +1,81 @@ --- source: crates/ruff_linter/src/rules/pydocstyle/mod.rs --- -D208.py:3:1: D208 [*] Docstring is over-indented +D208.py:2:1: D208 [*] Docstring is over-indented | -1 | class Platform: -2 | """ Remove sampler -3 | Args: +1 | """ +2 | Author | D208 -4 |     Returns: -5 | """ +3 | """ | = help: Remove over-indentation ℹ Safe fix -1 1 | class Platform: -2 2 | """ Remove sampler -3 |- Args: - 3 |+ Args: -4 4 |     Returns: -5 5 | """ +1 1 | """ +2 |- Author + 2 |+Author +3 3 | """ +4 4 | +5 5 | -D208.py:4:1: D208 [*] Docstring is over-indented - | -2 | """ Remove sampler -3 | Args: -4 |     Returns: - | D208 -5 | """ - | - = help: Remove over-indentation +D208.py:8:1: D208 [*] Docstring is over-indented + | + 6 | class Platform: + 7 | """ Remove sampler + 8 | Args: + | D208 + 9 |     Returns: +10 | """ + | + = help: Remove over-indentation ℹ Safe fix -1 1 | class Platform: -2 2 | """ Remove sampler -3 3 | Args: -4 |-     Returns: - 4 |+ Returns: -5 5 | """ +5 5 | +6 6 | class Platform: +7 7 | """ Remove sampler +8 |- Args: + 8 |+ Args: +9 9 |     Returns: +10 10 | """ +11 11 | -D208.py:5:1: D208 [*] Docstring is over-indented - | -3 | Args: -4 |     Returns: -5 | """ - | D208 - | - = help: Remove over-indentation +D208.py:9:1: D208 [*] Docstring is over-indented + | + 7 | """ Remove sampler + 8 | Args: + 9 |     Returns: + | D208 +10 | """ + | + = help: Remove over-indentation ℹ Safe fix -2 2 | """ Remove sampler -3 3 | Args: -4 4 |     Returns: -5 |- """ - 5 |+ """ +6 6 | class Platform: +7 7 | """ Remove sampler +8 8 | Args: +9 |-     Returns: + 9 |+ Returns: +10 10 | """ +11 11 | +12 12 | + +D208.py:10:1: D208 [*] Docstring is over-indented + | + 8 | Args: + 9 |     Returns: +10 | """ + | D208 + | + = help: Remove over-indentation + +ℹ Safe fix +7 7 | """ Remove sampler +8 8 | Args: +9 9 |     Returns: +10 |- """ + 10 |+ """ +11 11 | +12 12 | +13 13 | def memory_test(): diff --git a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D213_D.py.snap b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D213_D.py.snap index e0f3516e76..0253bc44cd 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D213_D.py.snap +++ b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__D213_D.py.snap @@ -662,7 +662,7 @@ D.py:712:5: D213 [*] Multi-line docstring summary should start at the second lin 713 | | 714 | | This is not overindented 715 | | This is overindented, but since one line is not overindented this should not raise -716 | | And so is this, but it we should preserve the extra space on this line relative +716 | | And so is this, but it we should preserve the extra space on this line relative 717 | | """ | |_______^ D213 | @@ -679,4 +679,27 @@ D.py:712:5: D213 [*] Multi-line docstring summary should start at the second lin 714 715 | This is not overindented 715 716 | This is overindented, but since one line is not overindented this should not raise +D.py:721:5: D213 [*] Multi-line docstring summary should start at the second line + | +720 | def inconsistent_indent_byte_size(): +721 | """There's a non-breaking space (2-bytes) after 3 spaces (https://github.com/astral-sh/ruff/issues/9080). + | _____^ +722 | | +723 | |     Returns: +724 | | """ + | |_______^ D213 + | + = help: Insert line break and indentation after opening quotes + +ℹ Safe fix +718 718 | +719 719 | +720 720 | def inconsistent_indent_byte_size(): +721 |- """There's a non-breaking space (2-bytes) after 3 spaces (https://github.com/astral-sh/ruff/issues/9080). + 721 |+ """ + 722 |+ There's a non-breaking space (2-bytes) after 3 spaces (https://github.com/astral-sh/ruff/issues/9080). +722 723 | +723 724 |     Returns: +724 725 | """ + diff --git a/crates/ruff_linter/src/rules/pyflakes/mod.rs b/crates/ruff_linter/src/rules/pyflakes/mod.rs index 435755ce9f..814dcb3d74 100644 --- a/crates/ruff_linter/src/rules/pyflakes/mod.rs +++ b/crates/ruff_linter/src/rules/pyflakes/mod.rs @@ -52,6 +52,7 @@ mod tests { #[test_case(Rule::UnusedImport, Path::new("F401_17.py"))] #[test_case(Rule::UnusedImport, Path::new("F401_18.py"))] #[test_case(Rule::UnusedImport, Path::new("F401_19.py"))] + #[test_case(Rule::UnusedImport, Path::new("F401_20.py"))] #[test_case(Rule::ImportShadowedByLoopVar, Path::new("F402.py"))] #[test_case(Rule::UndefinedLocalWithImportStar, Path::new("F403.py"))] #[test_case(Rule::LateFutureImport, Path::new("F404.py"))] diff --git a/crates/ruff_linter/src/rules/pyflakes/rules/f_string_missing_placeholders.rs b/crates/ruff_linter/src/rules/pyflakes/rules/f_string_missing_placeholders.rs index f2adeb69d7..36263f3328 100644 --- a/crates/ruff_linter/src/rules/pyflakes/rules/f_string_missing_placeholders.rs +++ b/crates/ruff_linter/src/rules/pyflakes/rules/f_string_missing_placeholders.rs @@ -1,6 +1,6 @@ use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::{self as ast, Expr}; +use ruff_python_ast as ast; use ruff_source_file::Locator; use ruff_text_size::{Ranged, TextRange, TextSize}; @@ -47,11 +47,12 @@ impl AlwaysFixableViolation for FStringMissingPlaceholders { /// F541 pub(crate) fn f_string_missing_placeholders(checker: &mut Checker, expr: &ast::ExprFString) { - if expr - .value - .f_strings() - .any(|f_string| f_string.values.iter().any(Expr::is_formatted_value_expr)) - { + if expr.value.f_strings().any(|f_string| { + f_string + .elements + .iter() + .any(ast::FStringElement::is_expression) + }) { return; } diff --git a/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs b/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs index f5eb694483..e099c15f31 100644 --- a/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs +++ b/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs @@ -247,6 +247,14 @@ fn remove_unused_variable(binding: &Binding, checker: &Checker) -> Option { Some(Fix::unsafe_edit(edit).isolate(isolation)) }; } + } else { + let name = binding.name(checker.locator()); + let renamed = format!("_{name}"); + if checker.settings.dummy_variable_rgx.is_match(&renamed) { + let edit = Edit::range_replacement(renamed, binding.range()); + + return Some(Fix::unsafe_edit(edit).isolate(isolation)); + } } } diff --git a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F401_F401_20.py.snap b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F401_F401_20.py.snap new file mode 100644 index 0000000000..d0b409f39e --- /dev/null +++ b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F401_F401_20.py.snap @@ -0,0 +1,4 @@ +--- +source: crates/ruff_linter/src/rules/pyflakes/mod.rs +--- + diff --git a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_0.py.snap b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_0.py.snap index 3fc06720a7..7e1b2458bf 100644 --- a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_0.py.snap +++ b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_0.py.snap @@ -57,7 +57,7 @@ F841_0.py:20:5: F841 [*] Local variable `foo` is assigned to but never used 22 21 | 23 22 | bar = (1, 2) -F841_0.py:21:6: F841 Local variable `a` is assigned to but never used +F841_0.py:21:6: F841 [*] Local variable `a` is assigned to but never used | 19 | def f(): 20 | foo = (1, 2) @@ -68,7 +68,17 @@ F841_0.py:21:6: F841 Local variable `a` is assigned to but never used | = help: Remove assignment to unused variable `a` -F841_0.py:21:9: F841 Local variable `b` is assigned to but never used +ℹ Unsafe fix +18 18 | +19 19 | def f(): +20 20 | foo = (1, 2) +21 |- (a, b) = (1, 2) + 21 |+ (_a, b) = (1, 2) +22 22 | +23 23 | bar = (1, 2) +24 24 | (c, d) = bar + +F841_0.py:21:9: F841 [*] Local variable `b` is assigned to but never used | 19 | def f(): 20 | foo = (1, 2) @@ -79,6 +89,16 @@ F841_0.py:21:9: F841 Local variable `b` is assigned to but never used | = help: Remove assignment to unused variable `b` +ℹ Unsafe fix +18 18 | +19 19 | def f(): +20 20 | foo = (1, 2) +21 |- (a, b) = (1, 2) + 21 |+ (a, _b) = (1, 2) +22 22 | +23 23 | bar = (1, 2) +24 24 | (c, d) = bar + F841_0.py:26:14: F841 [*] Local variable `baz` is assigned to but never used | 24 | (c, d) = bar diff --git a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_1.py.snap b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_1.py.snap index 7985de9913..9cb0b013fd 100644 --- a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_1.py.snap +++ b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_1.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pyflakes/mod.rs --- -F841_1.py:6:5: F841 Local variable `x` is assigned to but never used +F841_1.py:6:5: F841 [*] Local variable `x` is assigned to but never used | 5 | def f(): 6 | x, y = 1, 2 # this triggers F841 as it's just a simple assignment where unpacking isn't needed @@ -9,7 +9,17 @@ F841_1.py:6:5: F841 Local variable `x` is assigned to but never used | = help: Remove assignment to unused variable `x` -F841_1.py:6:8: F841 Local variable `y` is assigned to but never used +ℹ Unsafe fix +3 3 | +4 4 | +5 5 | def f(): +6 |- x, y = 1, 2 # this triggers F841 as it's just a simple assignment where unpacking isn't needed + 6 |+ _x, y = 1, 2 # this triggers F841 as it's just a simple assignment where unpacking isn't needed +7 7 | +8 8 | +9 9 | def f(): + +F841_1.py:6:8: F841 [*] Local variable `y` is assigned to but never used | 5 | def f(): 6 | x, y = 1, 2 # this triggers F841 as it's just a simple assignment where unpacking isn't needed @@ -17,6 +27,16 @@ F841_1.py:6:8: F841 Local variable `y` is assigned to but never used | = help: Remove assignment to unused variable `y` +ℹ Unsafe fix +3 3 | +4 4 | +5 5 | def f(): +6 |- x, y = 1, 2 # this triggers F841 as it's just a simple assignment where unpacking isn't needed + 6 |+ x, _y = 1, 2 # this triggers F841 as it's just a simple assignment where unpacking isn't needed +7 7 | +8 8 | +9 9 | def f(): + F841_1.py:16:14: F841 [*] Local variable `coords` is assigned to but never used | 15 | def f(): @@ -53,7 +73,7 @@ F841_1.py:20:5: F841 [*] Local variable `coords` is assigned to but never used 22 22 | 23 23 | def f(): -F841_1.py:24:6: F841 Local variable `a` is assigned to but never used +F841_1.py:24:6: F841 [*] Local variable `a` is assigned to but never used | 23 | def f(): 24 | (a, b) = (x, y) = 1, 2 # this triggers F841 on everything @@ -61,7 +81,14 @@ F841_1.py:24:6: F841 Local variable `a` is assigned to but never used | = help: Remove assignment to unused variable `a` -F841_1.py:24:9: F841 Local variable `b` is assigned to but never used +ℹ Unsafe fix +21 21 | +22 22 | +23 23 | def f(): +24 |- (a, b) = (x, y) = 1, 2 # this triggers F841 on everything + 24 |+ (_a, b) = (x, y) = 1, 2 # this triggers F841 on everything + +F841_1.py:24:9: F841 [*] Local variable `b` is assigned to but never used | 23 | def f(): 24 | (a, b) = (x, y) = 1, 2 # this triggers F841 on everything @@ -69,7 +96,14 @@ F841_1.py:24:9: F841 Local variable `b` is assigned to but never used | = help: Remove assignment to unused variable `b` -F841_1.py:24:15: F841 Local variable `x` is assigned to but never used +ℹ Unsafe fix +21 21 | +22 22 | +23 23 | def f(): +24 |- (a, b) = (x, y) = 1, 2 # this triggers F841 on everything + 24 |+ (a, _b) = (x, y) = 1, 2 # this triggers F841 on everything + +F841_1.py:24:15: F841 [*] Local variable `x` is assigned to but never used | 23 | def f(): 24 | (a, b) = (x, y) = 1, 2 # this triggers F841 on everything @@ -77,7 +111,14 @@ F841_1.py:24:15: F841 Local variable `x` is assigned to but never used | = help: Remove assignment to unused variable `x` -F841_1.py:24:18: F841 Local variable `y` is assigned to but never used +ℹ Unsafe fix +21 21 | +22 22 | +23 23 | def f(): +24 |- (a, b) = (x, y) = 1, 2 # this triggers F841 on everything + 24 |+ (a, b) = (_x, y) = 1, 2 # this triggers F841 on everything + +F841_1.py:24:18: F841 [*] Local variable `y` is assigned to but never used | 23 | def f(): 24 | (a, b) = (x, y) = 1, 2 # this triggers F841 on everything @@ -85,4 +126,11 @@ F841_1.py:24:18: F841 Local variable `y` is assigned to but never used | = help: Remove assignment to unused variable `y` +ℹ Unsafe fix +21 21 | +22 22 | +23 23 | def f(): +24 |- (a, b) = (x, y) = 1, 2 # this triggers F841 on everything + 24 |+ (a, b) = (x, _y) = 1, 2 # this triggers F841 on everything + diff --git a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_3.py.snap b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_3.py.snap index 9a29ab4098..8b60f4da8a 100644 --- a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_3.py.snap +++ b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__F841_F841_3.py.snap @@ -156,7 +156,7 @@ F841_3.py:27:46: F841 [*] Local variable `z3` is assigned to but never used 29 29 | 30 30 | -F841_3.py:32:6: F841 Local variable `x1` is assigned to but never used +F841_3.py:32:6: F841 [*] Local variable `x1` is assigned to but never used | 31 | def f(): 32 | (x1, y1) = (1, 2) @@ -166,7 +166,17 @@ F841_3.py:32:6: F841 Local variable `x1` is assigned to but never used | = help: Remove assignment to unused variable `x1` -F841_3.py:32:10: F841 Local variable `y1` is assigned to but never used +ℹ Unsafe fix +29 29 | +30 30 | +31 31 | def f(): +32 |- (x1, y1) = (1, 2) + 32 |+ (_x1, y1) = (1, 2) +33 33 | (x2, y2) = coords2 = (1, 2) +34 34 | coords3 = (x3, y3) = (1, 2) +35 35 | + +F841_3.py:32:10: F841 [*] Local variable `y1` is assigned to but never used | 31 | def f(): 32 | (x1, y1) = (1, 2) @@ -176,6 +186,16 @@ F841_3.py:32:10: F841 Local variable `y1` is assigned to but never used | = help: Remove assignment to unused variable `y1` +ℹ Unsafe fix +29 29 | +30 30 | +31 31 | def f(): +32 |- (x1, y1) = (1, 2) + 32 |+ (x1, _y1) = (1, 2) +33 33 | (x2, y2) = coords2 = (1, 2) +34 34 | coords3 = (x3, y3) = (1, 2) +35 35 | + F841_3.py:33:16: F841 [*] Local variable `coords2` is assigned to but never used | 31 | def f(): diff --git a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__preview__F841_F841_4.py.snap b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__preview__F841_F841_4.py.snap index f781c31d82..661343dd14 100644 --- a/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__preview__F841_F841_4.py.snap +++ b/crates/ruff_linter/src/rules/pyflakes/snapshots/ruff_linter__rules__pyflakes__tests__preview__F841_F841_4.py.snap @@ -20,7 +20,7 @@ F841_4.py:12:5: F841 [*] Local variable `a` is assigned to but never used 14 14 | 15 15 | -F841_4.py:13:5: F841 Local variable `b` is assigned to but never used +F841_4.py:13:5: F841 [*] Local variable `b` is assigned to but never used | 11 | def bar(): 12 | a = foo() @@ -29,7 +29,17 @@ F841_4.py:13:5: F841 Local variable `b` is assigned to but never used | = help: Remove assignment to unused variable `b` -F841_4.py:13:8: F841 Local variable `c` is assigned to but never used +ℹ Unsafe fix +10 10 | +11 11 | def bar(): +12 12 | a = foo() +13 |- b, c = foo() + 13 |+ _b, c = foo() +14 14 | +15 15 | +16 16 | def baz(): + +F841_4.py:13:8: F841 [*] Local variable `c` is assigned to but never used | 11 | def bar(): 12 | a = foo() @@ -38,4 +48,14 @@ F841_4.py:13:8: F841 Local variable `c` is assigned to but never used | = help: Remove assignment to unused variable `c` +ℹ Unsafe fix +10 10 | +11 11 | def bar(): +12 12 | a = foo() +13 |- b, c = foo() + 13 |+ b, _c = foo() +14 14 | +15 15 | +16 16 | def baz(): + diff --git a/crates/ruff_linter/src/rules/pylint/helpers.rs b/crates/ruff_linter/src/rules/pylint/helpers.rs index 027272d03c..d68e639b59 100644 --- a/crates/ruff_linter/src/rules/pylint/helpers.rs +++ b/crates/ruff_linter/src/rules/pylint/helpers.rs @@ -1,9 +1,11 @@ use std::fmt; use ruff_python_ast as ast; -use ruff_python_ast::{Arguments, CmpOp, Expr}; +use ruff_python_ast::visitor::Visitor; +use ruff_python_ast::{visitor, Arguments, CmpOp, Expr, Stmt}; use ruff_python_semantic::analyze::function_type; use ruff_python_semantic::{ScopeKind, SemanticModel}; +use ruff_text_size::TextRange; use crate::settings::LinterSettings; @@ -82,3 +84,116 @@ impl fmt::Display for CmpOpExt { write!(f, "{representation}") } } + +/// Visitor to track reads from an iterable in a loop. +#[derive(Debug)] +pub(crate) struct SequenceIndexVisitor<'a> { + /// `letters`, given `for index, letter in enumerate(letters)`. + sequence_name: &'a str, + /// `index`, given `for index, letter in enumerate(letters)`. + index_name: &'a str, + /// `letter`, given `for index, letter in enumerate(letters)`. + value_name: &'a str, + /// The ranges of any `letters[index]` accesses. + accesses: Vec, + /// Whether any of the variables have been modified. + modified: bool, +} + +impl<'a> SequenceIndexVisitor<'a> { + pub(crate) fn new(sequence_name: &'a str, index_name: &'a str, value_name: &'a str) -> Self { + Self { + sequence_name, + index_name, + value_name, + accesses: Vec::new(), + modified: false, + } + } + + pub(crate) fn into_accesses(self) -> Vec { + self.accesses + } +} + +impl SequenceIndexVisitor<'_> { + fn is_assignment(&self, expr: &Expr) -> bool { + // If we see the sequence, a subscript, or the index being modified, we'll stop emitting + // diagnostics. + match expr { + Expr::Name(ast::ExprName { id, .. }) => { + id == self.sequence_name || id == self.index_name || id == self.value_name + } + Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => { + let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else { + return false; + }; + if id == self.sequence_name { + let Expr::Name(ast::ExprName { id, .. }) = slice.as_ref() else { + return false; + }; + if id == self.index_name { + return true; + } + } + false + } + _ => false, + } + } +} + +impl<'a> Visitor<'_> for SequenceIndexVisitor<'a> { + fn visit_stmt(&mut self, stmt: &Stmt) { + if self.modified { + return; + } + match stmt { + Stmt::Assign(ast::StmtAssign { targets, value, .. }) => { + self.modified = targets.iter().any(|target| self.is_assignment(target)); + self.visit_expr(value); + } + Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => { + if let Some(value) = value { + self.modified = self.is_assignment(target); + self.visit_expr(value); + } + } + Stmt::AugAssign(ast::StmtAugAssign { target, value, .. }) => { + self.modified = self.is_assignment(target); + self.visit_expr(value); + } + Stmt::Delete(ast::StmtDelete { targets, .. }) => { + self.modified = targets.iter().any(|target| self.is_assignment(target)); + } + _ => visitor::walk_stmt(self, stmt), + } + } + + fn visit_expr(&mut self, expr: &Expr) { + if self.modified { + return; + } + match expr { + Expr::Subscript(ast::ExprSubscript { + value, + slice, + range, + .. + }) => { + let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else { + return; + }; + if id == self.sequence_name { + let Expr::Name(ast::ExprName { id, .. }) = slice.as_ref() else { + return; + }; + if id == self.index_name { + self.accesses.push(*range); + } + } + } + _ => visitor::walk_expr(self, expr), + } + } +} diff --git a/crates/ruff_linter/src/rules/pylint/mod.rs b/crates/ruff_linter/src/rules/pylint/mod.rs index 20920043f9..303395de50 100644 --- a/crates/ruff_linter/src/rules/pylint/mod.rs +++ b/crates/ruff_linter/src/rules/pylint/mod.rs @@ -94,6 +94,7 @@ mod tests { #[test_case(Rule::RedefinedLoopName, Path::new("redefined_loop_name.py"))] #[test_case(Rule::ReturnInInit, Path::new("return_in_init.py"))] #[test_case(Rule::TooManyArguments, Path::new("too_many_arguments.py"))] + #[test_case(Rule::TooManyPositional, Path::new("too_many_positional.py"))] #[test_case(Rule::TooManyBranches, Path::new("too_many_branches.py"))] #[test_case( Rule::TooManyReturnStatements, @@ -249,6 +250,22 @@ mod tests { Ok(()) } + #[test] + fn max_positional_args() -> Result<()> { + let diagnostics = test_path( + Path::new("pylint/too_many_positional_params.py"), + &LinterSettings { + pylint: pylint::settings::Settings { + max_positional_args: 4, + ..pylint::settings::Settings::default() + }, + ..LinterSettings::for_rule(Rule::TooManyPositional) + }, + )?; + assert_messages!(diagnostics); + Ok(()) + } + #[test] fn max_branches() -> Result<()> { let diagnostics = test_path( @@ -329,6 +346,22 @@ mod tests { Ok(()) } + #[test] + fn too_many_locals() -> Result<()> { + let diagnostics = test_path( + Path::new("pylint/too_many_locals.py"), + &LinterSettings { + pylint: pylint::settings::Settings { + max_locals: 15, + ..pylint::settings::Settings::default() + }, + ..LinterSettings::for_rules(vec![Rule::TooManyLocals]) + }, + )?; + assert_messages!(diagnostics); + Ok(()) + } + #[test] fn unspecified_encoding_python39_or_lower() -> Result<()> { let diagnostics = test_path( diff --git a/crates/ruff_linter/src/rules/pylint/rules/assert_on_string_literal.rs b/crates/ruff_linter/src/rules/pylint/rules/assert_on_string_literal.rs index fee8f01a5c..5f0801731f 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/assert_on_string_literal.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/assert_on_string_literal.rs @@ -70,26 +70,28 @@ pub(crate) fn assert_on_string_literal(checker: &mut Checker, test: &Expr) { )); } Expr::FString(ast::ExprFString { value, .. }) => { - let kind = if value.parts().all(|f_string_part| match f_string_part { + let kind = if value.iter().all(|f_string_part| match f_string_part { ast::FStringPart::Literal(literal) => literal.is_empty(), - ast::FStringPart::FString(f_string) => f_string.values.iter().all(|value| { - if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = value { - value.is_empty() - } else { - false - } - }), + ast::FStringPart::FString(f_string) => { + f_string.elements.iter().all(|element| match element { + ast::FStringElement::Literal(ast::FStringLiteralElement { + value, .. + }) => value.is_empty(), + ast::FStringElement::Expression(_) => false, + }) + } }) { Kind::Empty - } else if value.parts().any(|f_string_part| match f_string_part { + } else if value.iter().any(|f_string_part| match f_string_part { ast::FStringPart::Literal(literal) => !literal.is_empty(), - ast::FStringPart::FString(f_string) => f_string.values.iter().any(|value| { - if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = value { - !value.is_empty() - } else { - false - } - }), + ast::FStringPart::FString(f_string) => { + f_string.elements.iter().any(|element| match element { + ast::FStringElement::Literal(ast::FStringLiteralElement { + value, .. + }) => !value.is_empty(), + ast::FStringElement::Expression(_) => false, + }) + } }) { Kind::NonEmpty } else { diff --git a/crates/ruff_linter/src/rules/pylint/rules/comparison_with_itself.rs b/crates/ruff_linter/src/rules/pylint/rules/comparison_with_itself.rs index ce02b12fff..8aa58e70e8 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/comparison_with_itself.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/comparison_with_itself.rs @@ -21,6 +21,14 @@ use crate::rules::pylint::helpers::CmpOpExt; /// foo == foo /// ``` /// +/// In some cases, self-comparisons are used to determine whether a float is +/// NaN. Instead, prefer `math.isnan`: +/// ```python +/// import math +/// +/// math.isnan(foo) +/// ``` +/// /// ## References /// - [Python documentation: Comparisons](https://docs.python.org/3/reference/expressions.html#comparisons) #[violation] diff --git a/crates/ruff_linter/src/rules/pylint/rules/mod.rs b/crates/ruff_linter/src/rules/pylint/rules/mod.rs index 02ae6796a5..85699ab30e 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/mod.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/mod.rs @@ -55,6 +55,8 @@ pub(crate) use sys_exit_alias::*; pub(crate) use too_many_arguments::*; pub(crate) use too_many_boolean_expressions::*; pub(crate) use too_many_branches::*; +pub(crate) use too_many_locals::*; +pub(crate) use too_many_positional::*; pub(crate) use too_many_public_methods::*; pub(crate) use too_many_return_statements::*; pub(crate) use too_many_statements::*; @@ -131,6 +133,8 @@ mod sys_exit_alias; mod too_many_arguments; mod too_many_boolean_expressions; mod too_many_branches; +mod too_many_locals; +mod too_many_positional; mod too_many_public_methods; mod too_many_return_statements; mod too_many_statements; diff --git a/crates/ruff_linter/src/rules/pylint/rules/no_method_decorator.rs b/crates/ruff_linter/src/rules/pylint/rules/no_method_decorator.rs index d2dc1ea329..60439119fd 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/no_method_decorator.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/no_method_decorator.rs @@ -53,7 +53,7 @@ impl AlwaysFixableViolation for NoClassmethodDecorator { /// ## Example /// ```python /// class Foo: -/// def bar(cls): +/// def bar(arg1, arg2): /// ... /// /// bar = staticmethod(bar) @@ -63,7 +63,7 @@ impl AlwaysFixableViolation for NoClassmethodDecorator { /// ```python /// class Foo: /// @staticmethod -/// def bar(cls): +/// def bar(arg1, arg2): /// ... /// ``` #[violation] diff --git a/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs b/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs index 4758de8d01..67b494d6bb 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs @@ -1,9 +1,10 @@ -use ruff_diagnostics::{Diagnostic, Violation}; +use ruff_diagnostics::{AlwaysFixableViolation, Applicability, Diagnostic, Fix}; use ruff_macros::{derive_message_formats, violation}; use ruff_python_ast as ast; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; +use crate::fix::edits::add_argument; /// ## What it does /// Checks for uses of `subprocess.run` without an explicit `check` argument. @@ -36,16 +37,25 @@ use crate::checkers::ast::Checker; /// subprocess.run(["ls", "nonexistent"], check=False) # Explicitly no check. /// ``` /// +/// ## Fix safety +/// This rule's fix is marked as unsafe for function calls that contain +/// `**kwargs`, as adding a `check` keyword argument to such a call may lead +/// to a duplicate keyword argument error. +/// /// ## References /// - [Python documentation: `subprocess.run`](https://docs.python.org/3/library/subprocess.html#subprocess.run) #[violation] pub struct SubprocessRunWithoutCheck; -impl Violation for SubprocessRunWithoutCheck { +impl AlwaysFixableViolation for SubprocessRunWithoutCheck { #[derive_message_formats] fn message(&self) -> String { format!("`subprocess.run` without explicit `check` argument") } + + fn fix_title(&self) -> String { + "Add explicit `check=False`".to_string() + } } /// PLW1510 @@ -56,10 +66,27 @@ pub(crate) fn subprocess_run_without_check(checker: &mut Checker, call: &ast::Ex .is_some_and(|call_path| matches!(call_path.as_slice(), ["subprocess", "run"])) { if call.arguments.find_keyword("check").is_none() { - checker.diagnostics.push(Diagnostic::new( - SubprocessRunWithoutCheck, - call.func.range(), + let mut diagnostic = Diagnostic::new(SubprocessRunWithoutCheck, call.func.range()); + diagnostic.set_fix(Fix::applicable_edit( + add_argument( + "check=False", + &call.arguments, + checker.indexer().comment_ranges(), + checker.locator().contents(), + ), + // If the function call contains `**kwargs`, mark the fix as unsafe. + if call + .arguments + .keywords + .iter() + .any(|keyword| keyword.arg.is_none()) + { + Applicability::Unsafe + } else { + Applicability::Safe + }, )); + checker.diagnostics.push(diagnostic); } } } diff --git a/crates/ruff_linter/src/rules/pylint/rules/too_many_locals.rs b/crates/ruff_linter/src/rules/pylint/rules/too_many_locals.rs new file mode 100644 index 0000000000..e2137b637b --- /dev/null +++ b/crates/ruff_linter/src/rules/pylint/rules/too_many_locals.rs @@ -0,0 +1,59 @@ +use ruff_diagnostics::{Diagnostic, Violation}; +use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::identifier::Identifier; +use ruff_python_semantic::{Scope, ScopeKind}; + +use crate::checkers::ast::Checker; + +/// ## What it does +/// Checks for functions that include too many local variables. +/// +/// By default, this rule allows up to fifteen locals, as configured by the +/// [`pylint.max-locals`] option. +/// +/// ## Why is this bad? +/// Functions with many local variables are harder to understand and maintain. +/// +/// Consider refactoring functions with many local variables into smaller +/// functions with fewer assignments. +/// +/// ## Options +/// - `pylint.max-locals` +#[violation] +pub struct TooManyLocals { + current_amount: usize, + max_amount: usize, +} + +impl Violation for TooManyLocals { + #[derive_message_formats] + fn message(&self) -> String { + let TooManyLocals { + current_amount, + max_amount, + } = self; + format!("Too many local variables: ({current_amount}/{max_amount})") + } +} + +/// PLR0914 +pub(crate) fn too_many_locals(checker: &Checker, scope: &Scope, diagnostics: &mut Vec) { + let num_locals = scope + .binding_ids() + .filter(|id| { + let binding = checker.semantic().binding(*id); + binding.kind.is_assignment() + }) + .count(); + if num_locals > checker.settings.pylint.max_locals { + if let ScopeKind::Function(func) = scope.kind { + diagnostics.push(Diagnostic::new( + TooManyLocals { + current_amount: num_locals, + max_amount: checker.settings.pylint.max_locals, + }, + func.identifier(), + )); + }; + } +} diff --git a/crates/ruff_linter/src/rules/pylint/rules/too_many_positional.rs b/crates/ruff_linter/src/rules/pylint/rules/too_many_positional.rs new file mode 100644 index 0000000000..bc424441c0 --- /dev/null +++ b/crates/ruff_linter/src/rules/pylint/rules/too_many_positional.rs @@ -0,0 +1,90 @@ +use ruff_diagnostics::{Diagnostic, Violation}; +use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::{self as ast, identifier::Identifier}; +use ruff_python_semantic::analyze::visibility; + +use crate::checkers::ast::Checker; + +/// ## What it does +/// Checks for function definitions that include too many positional arguments. +/// +/// By default, this rule allows up to five arguments, as configured by the +/// [`pylint.max-positional-args`] option. +/// +/// ## Why is this bad? +/// Functions with many arguments are harder to understand, maintain, and call. +/// This is especially true for functions with many positional arguments, as +/// providing arguments positionally is more error-prone and less clear to +/// readers than providing arguments by name. +/// +/// Consider refactoring functions with many arguments into smaller functions +/// with fewer arguments, using objects to group related arguments, or +/// migrating to keyword-only arguments. +/// +/// ## Example +/// ```python +/// def plot(x, y, z, color, mark, add_trendline): +/// ... +/// +/// +/// plot(1, 2, 3, "r", "*", True) +/// ``` +/// +/// Use instead: +/// ```python +/// def plot(x, y, z, *, color, mark, add_trendline): +/// ... +/// +/// +/// plot(1, 2, 3, color="r", mark="*", add_trendline=True) +/// ``` +/// +/// ## Options +/// - `pylint.max-positional-args` +#[violation] +pub struct TooManyPositional { + c_pos: usize, + max_pos: usize, +} + +impl Violation for TooManyPositional { + #[derive_message_formats] + fn message(&self) -> String { + let TooManyPositional { c_pos, max_pos } = self; + format!("Too many positional arguments: ({c_pos}/{max_pos})") + } +} + +/// PLR0917 +pub(crate) fn too_many_positional(checker: &mut Checker, function_def: &ast::StmtFunctionDef) { + let num_positional_args = function_def + .parameters + .args + .iter() + .chain(&function_def.parameters.posonlyargs) + .filter(|arg| { + !checker + .settings + .dummy_variable_rgx + .is_match(&arg.parameter.name) + }) + .count(); + + if num_positional_args > checker.settings.pylint.max_positional_args { + // Allow excessive arguments in `@override` or `@overload` methods, since they're required + // to adhere to the parent signature. + if visibility::is_override(&function_def.decorator_list, checker.semantic()) + || visibility::is_overload(&function_def.decorator_list, checker.semantic()) + { + return; + } + + checker.diagnostics.push(Diagnostic::new( + TooManyPositional { + c_pos: num_positional_args, + max_pos: checker.settings.pylint.max_positional_args, + }, + function_def.identifier(), + )); + } +} diff --git a/crates/ruff_linter/src/rules/pylint/rules/unnecessary_dict_index_lookup.rs b/crates/ruff_linter/src/rules/pylint/rules/unnecessary_dict_index_lookup.rs index 16dc733d61..913279081f 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/unnecessary_dict_index_lookup.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/unnecessary_dict_index_lookup.rs @@ -1,13 +1,11 @@ -use ast::Stmt; -use ruff_python_ast::{self as ast, Expr, StmtFor}; - use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::visitor; use ruff_python_ast::visitor::Visitor; -use ruff_text_size::TextRange; +use ruff_python_ast::{self as ast, Expr, StmtFor}; +use ruff_text_size::Ranged; use crate::checkers::ast::Checker; +use crate::rules::pylint::helpers::SequenceIndexVisitor; /// ## What it does /// Checks for key-based dict accesses during `.items()` iterations. @@ -54,18 +52,18 @@ pub(crate) fn unnecessary_dict_index_lookup(checker: &mut Checker, stmt_for: &St }; let ranges = { - let mut visitor = SubscriptVisitor::new(dict_name, index_name); + let mut visitor = SequenceIndexVisitor::new(&dict_name.id, &index_name.id, &value_name.id); visitor.visit_body(&stmt_for.body); visitor.visit_body(&stmt_for.orelse); - visitor.diagnostic_ranges + visitor.into_accesses() }; for range in ranges { let mut diagnostic = Diagnostic::new(UnnecessaryDictIndexLookup, range); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - value_name.to_string(), - range, - ))); + diagnostic.set_fix(Fix::safe_edits( + Edit::range_replacement(value_name.id.to_string(), range), + [noop(index_name), noop(value_name)], + )); checker.diagnostics.push(diagnostic); } } @@ -96,20 +94,21 @@ pub(crate) fn unnecessary_dict_index_lookup_comprehension(checker: &mut Checker, }; let ranges = { - let mut visitor = SubscriptVisitor::new(dict_name, index_name); + let mut visitor = + SequenceIndexVisitor::new(&dict_name.id, &index_name.id, &value_name.id); visitor.visit_expr(elt.as_ref()); for expr in &comp.ifs { visitor.visit_expr(expr); } - visitor.diagnostic_ranges + visitor.into_accesses() }; for range in ranges { let mut diagnostic = Diagnostic::new(UnnecessaryDictIndexLookup, range); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - value_name.to_string(), - range, - ))); + diagnostic.set_fix(Fix::safe_edits( + Edit::range_replacement(value_name.id.to_string(), range), + [noop(index_name), noop(value_name)], + )); checker.diagnostics.push(diagnostic); } } @@ -118,7 +117,7 @@ pub(crate) fn unnecessary_dict_index_lookup_comprehension(checker: &mut Checker, fn dict_items<'a>( call_expr: &'a Expr, tuple_expr: &'a Expr, -) -> Option<(&'a str, &'a str, &'a str)> { +) -> Option<(&'a ast::ExprName, &'a ast::ExprName, &'a ast::ExprName)> { let ast::ExprCall { func, arguments, .. } = call_expr.as_call_expr()?; @@ -133,7 +132,7 @@ fn dict_items<'a>( return None; } - let Expr::Name(ast::ExprName { id: dict_name, .. }) = value.as_ref() else { + let Expr::Name(dict_name) = value.as_ref() else { return None; }; @@ -145,110 +144,24 @@ fn dict_items<'a>( }; // Grab the variable names. - let Expr::Name(ast::ExprName { id: index_name, .. }) = index else { + let Expr::Name(index_name) = index else { return None; }; - let Expr::Name(ast::ExprName { id: value_name, .. }) = value else { + let Expr::Name(value_name) = value else { return None; }; // If either of the variable names are intentionally ignored by naming them `_`, then don't // emit. - if index_name == "_" || value_name == "_" { + if index_name.id == "_" || value_name.id == "_" { return None; } Some((dict_name, index_name, value_name)) } -#[derive(Debug)] -struct SubscriptVisitor<'a> { - dict_name: &'a str, - index_name: &'a str, - diagnostic_ranges: Vec, - modified: bool, -} - -impl<'a> SubscriptVisitor<'a> { - fn new(dict_name: &'a str, index_name: &'a str) -> Self { - Self { - dict_name, - index_name, - diagnostic_ranges: Vec::new(), - modified: false, - } - } -} - -impl SubscriptVisitor<'_> { - fn is_assignment(&self, expr: &Expr) -> bool { - let Expr::Subscript(ast::ExprSubscript { value, slice, .. }) = expr else { - return false; - }; - let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else { - return false; - }; - if id == self.dict_name { - let Expr::Name(ast::ExprName { id, .. }) = slice.as_ref() else { - return false; - }; - if id == self.index_name { - return true; - } - } - false - } -} - -impl<'a> Visitor<'_> for SubscriptVisitor<'a> { - fn visit_stmt(&mut self, stmt: &Stmt) { - if self.modified { - return; - } - match stmt { - Stmt::Assign(ast::StmtAssign { targets, value, .. }) => { - self.modified = targets.iter().any(|target| self.is_assignment(target)); - self.visit_expr(value); - } - Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => { - if let Some(value) = value { - self.modified = self.is_assignment(target); - self.visit_expr(value); - } - } - Stmt::AugAssign(ast::StmtAugAssign { target, value, .. }) => { - self.modified = self.is_assignment(target); - self.visit_expr(value); - } - _ => visitor::walk_stmt(self, stmt), - } - } - - fn visit_expr(&mut self, expr: &Expr) { - if self.modified { - return; - } - match expr { - Expr::Subscript(ast::ExprSubscript { - value, - slice, - range, - .. - }) => { - let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else { - return; - }; - if id == self.dict_name { - let Expr::Name(ast::ExprName { id, .. }) = slice.as_ref() else { - return; - }; - if id == self.index_name { - self.diagnostic_ranges.push(*range); - } - } - } - _ => visitor::walk_expr(self, expr), - } - } +/// Return a no-op edit for the given name. +fn noop(name: &ast::ExprName) -> Edit { + Edit::range_replacement(name.id.to_string(), name.range()) } diff --git a/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs b/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs index 17a42b29e2..461a8d56c9 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/unnecessary_list_index_lookup.rs @@ -1,13 +1,12 @@ -use ruff_python_ast::{self as ast, Expr, Stmt, StmtFor}; - use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_ast::visitor; use ruff_python_ast::visitor::Visitor; +use ruff_python_ast::{self as ast, Expr, StmtFor}; use ruff_python_semantic::SemanticModel; -use ruff_text_size::TextRange; +use ruff_text_size::Ranged; use crate::checkers::ast::Checker; +use crate::rules::pylint::helpers::SequenceIndexVisitor; /// ## What it does /// Checks for index-based list accesses during `enumerate` iterations. @@ -55,18 +54,18 @@ pub(crate) fn unnecessary_list_index_lookup(checker: &mut Checker, stmt_for: &St }; let ranges = { - let mut visitor = SubscriptVisitor::new(sequence, index_name); + let mut visitor = SequenceIndexVisitor::new(&sequence.id, &index_name.id, &value_name.id); visitor.visit_body(&stmt_for.body); visitor.visit_body(&stmt_for.orelse); - visitor.diagnostic_ranges + visitor.into_accesses() }; for range in ranges { let mut diagnostic = Diagnostic::new(UnnecessaryListIndexLookup, range); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - value_name.to_string(), - range, - ))); + diagnostic.set_fix(Fix::safe_edits( + Edit::range_replacement(value_name.id.to_string(), range), + [noop(index_name), noop(value_name)], + )); checker.diagnostics.push(diagnostic); } } @@ -99,17 +98,18 @@ pub(crate) fn unnecessary_list_index_lookup_comprehension(checker: &mut Checker, }; let ranges = { - let mut visitor = SubscriptVisitor::new(sequence, index_name); + let mut visitor = + SequenceIndexVisitor::new(&sequence.id, &index_name.id, &value_name.id); visitor.visit_expr(elt.as_ref()); - visitor.diagnostic_ranges + visitor.into_accesses() }; for range in ranges { let mut diagnostic = Diagnostic::new(UnnecessaryListIndexLookup, range); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - value_name.to_string(), - range, - ))); + diagnostic.set_fix(Fix::safe_edits( + Edit::range_replacement(value_name.id.to_string(), range), + [noop(index_name), noop(value_name)], + )); checker.diagnostics.push(diagnostic); } } @@ -119,7 +119,7 @@ fn enumerate_items<'a>( call_expr: &'a Expr, tuple_expr: &'a Expr, semantic: &SemanticModel, -) -> Option<(&'a str, &'a str, &'a str)> { +) -> Option<(&'a ast::ExprName, &'a ast::ExprName, &'a ast::ExprName)> { let ast::ExprCall { func, arguments, .. } = call_expr.as_call_expr()?; @@ -140,125 +140,29 @@ fn enumerate_items<'a>( }; // Grab the variable names. - let Expr::Name(ast::ExprName { id: index_name, .. }) = index else { + let Expr::Name(index_name) = index else { return None; }; - let Expr::Name(ast::ExprName { id: value_name, .. }) = value else { + let Expr::Name(value_name) = value else { return None; }; // If either of the variable names are intentionally ignored by naming them `_`, then don't // emit. - if index_name == "_" || value_name == "_" { + if index_name.id == "_" || value_name.id == "_" { return None; } // Get the first argument of the enumerate call. - let Some(Expr::Name(ast::ExprName { id: sequence, .. })) = arguments.args.first() else { + let Some(Expr::Name(sequence)) = arguments.args.first() else { return None; }; Some((sequence, index_name, value_name)) } -#[derive(Debug)] -struct SubscriptVisitor<'a> { - sequence_name: &'a str, - index_name: &'a str, - diagnostic_ranges: Vec, - modified: bool, -} - -impl<'a> SubscriptVisitor<'a> { - fn new(sequence_name: &'a str, index_name: &'a str) -> Self { - Self { - sequence_name, - index_name, - diagnostic_ranges: Vec::new(), - modified: false, - } - } -} - -impl SubscriptVisitor<'_> { - fn is_assignment(&self, expr: &Expr) -> bool { - // If we see the sequence, a subscript, or the index being modified, we'll stop emitting - // diagnostics. - match expr { - Expr::Name(ast::ExprName { id, .. }) => { - id == self.sequence_name || id == self.index_name - } - Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => { - let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else { - return false; - }; - if id == self.sequence_name { - let Expr::Name(ast::ExprName { id, .. }) = slice.as_ref() else { - return false; - }; - if id == self.index_name { - return true; - } - } - false - } - _ => false, - } - } -} - -impl<'a> Visitor<'_> for SubscriptVisitor<'a> { - fn visit_stmt(&mut self, stmt: &Stmt) { - if self.modified { - return; - } - match stmt { - Stmt::Assign(ast::StmtAssign { targets, value, .. }) => { - self.modified = targets.iter().any(|target| self.is_assignment(target)); - self.visit_expr(value); - } - Stmt::AnnAssign(ast::StmtAnnAssign { target, value, .. }) => { - if let Some(value) = value { - self.modified = self.is_assignment(target); - self.visit_expr(value); - } - } - Stmt::AugAssign(ast::StmtAugAssign { target, value, .. }) => { - self.modified = self.is_assignment(target); - self.visit_expr(value); - } - Stmt::Delete(ast::StmtDelete { targets, .. }) => { - self.modified = targets.iter().any(|target| self.is_assignment(target)); - } - _ => visitor::walk_stmt(self, stmt), - } - } - - fn visit_expr(&mut self, expr: &Expr) { - if self.modified { - return; - } - match expr { - Expr::Subscript(ast::ExprSubscript { - value, - slice, - range, - .. - }) => { - let Expr::Name(ast::ExprName { id, .. }) = value.as_ref() else { - return; - }; - if id == self.sequence_name { - let Expr::Name(ast::ExprName { id, .. }) = slice.as_ref() else { - return; - }; - if id == self.index_name { - self.diagnostic_ranges.push(*range); - } - } - } - _ => visitor::walk_expr(self, expr), - } - } +/// Return a no-op edit for the given name. +fn noop(name: &ast::ExprName) -> Edit { + Edit::range_replacement(name.id.to_string(), name.range()) } diff --git a/crates/ruff_linter/src/rules/pylint/settings.rs b/crates/ruff_linter/src/rules/pylint/settings.rs index cb9846b11e..8ea19cdfaf 100644 --- a/crates/ruff_linter/src/rules/pylint/settings.rs +++ b/crates/ruff_linter/src/rules/pylint/settings.rs @@ -39,11 +39,13 @@ pub struct Settings { pub allow_magic_value_types: Vec, pub allow_dunder_method_names: FxHashSet, pub max_args: usize, + pub max_positional_args: usize, pub max_returns: usize, pub max_bool_expr: usize, pub max_branches: usize, pub max_statements: usize, pub max_public_methods: usize, + pub max_locals: usize, } impl Default for Settings { @@ -52,11 +54,13 @@ impl Default for Settings { allow_magic_value_types: vec![ConstantType::Str, ConstantType::Bytes], allow_dunder_method_names: FxHashSet::default(), max_args: 5, + max_positional_args: 5, max_returns: 6, max_bool_expr: 5, max_branches: 12, max_statements: 50, max_public_methods: 20, + max_locals: 15, } } } diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLR0917_too_many_positional.py.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLR0917_too_many_positional.py.snap new file mode 100644 index 0000000000..4578419f43 --- /dev/null +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLR0917_too_many_positional.py.snap @@ -0,0 +1,25 @@ +--- +source: crates/ruff_linter/src/rules/pylint/mod.rs +--- +too_many_positional.py:1:5: PLR0917 Too many positional arguments: (8/5) + | +1 | def f(x, y, z, t, u, v, w, r): # Too many positional arguments (8/3) + | ^ PLR0917 +2 | pass + | + +too_many_positional.py:21:5: PLR0917 Too many positional arguments: (6/5) + | +21 | def f(x, y, z, /, u, v, w): # Too many positional arguments (6/3) + | ^ PLR0917 +22 | pass + | + +too_many_positional.py:29:5: PLR0917 Too many positional arguments: (6/5) + | +29 | def f(x, y, z, a, b, c, *, u, v, w): # Too many positional arguments (6/3) + | ^ PLR0917 +30 | pass + | + + diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLR1736_unnecessary_list_index_lookup.py.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLR1736_unnecessary_list_index_lookup.py.snap index 8e4d22472d..880422eab6 100644 --- a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLR1736_unnecessary_list_index_lookup.py.snap +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLR1736_unnecessary_list_index_lookup.py.snap @@ -80,7 +80,7 @@ unnecessary_list_index_lookup.py:12:15: PLR1736 [*] Unnecessary lookup of list i 12 |+ print(letter) # PLR1736 13 13 | blah = letters[index] # PLR1736 14 14 | assert letters[index] == "d" # PLR1736 -15 15 | +15 15 | unnecessary_list_index_lookup.py:13:16: PLR1736 [*] Unnecessary lookup of list item by index | @@ -99,7 +99,7 @@ unnecessary_list_index_lookup.py:13:16: PLR1736 [*] Unnecessary lookup of list i 13 |- blah = letters[index] # PLR1736 13 |+ blah = letter # PLR1736 14 14 | assert letters[index] == "d" # PLR1736 -15 15 | +15 15 | 16 16 | for index, letter in builtins.enumerate(letters): unnecessary_list_index_lookup.py:14:16: PLR1736 [*] Unnecessary lookup of list item by index @@ -108,7 +108,7 @@ unnecessary_list_index_lookup.py:14:16: PLR1736 [*] Unnecessary lookup of list i 13 | blah = letters[index] # PLR1736 14 | assert letters[index] == "d" # PLR1736 | ^^^^^^^^^^^^^^ PLR1736 -15 | +15 | 16 | for index, letter in builtins.enumerate(letters): | = help: Use existing variable @@ -119,7 +119,7 @@ unnecessary_list_index_lookup.py:14:16: PLR1736 [*] Unnecessary lookup of list i 13 13 | blah = letters[index] # PLR1736 14 |- assert letters[index] == "d" # PLR1736 14 |+ assert letter == "d" # PLR1736 -15 15 | +15 15 | 16 16 | for index, letter in builtins.enumerate(letters): 17 17 | print(letters[index]) # PLR1736 @@ -135,7 +135,7 @@ unnecessary_list_index_lookup.py:17:15: PLR1736 [*] Unnecessary lookup of list i ℹ Safe fix 14 14 | assert letters[index] == "d" # PLR1736 -15 15 | +15 15 | 16 16 | for index, letter in builtins.enumerate(letters): 17 |- print(letters[index]) # PLR1736 17 |+ print(letter) # PLR1736 @@ -154,7 +154,7 @@ unnecessary_list_index_lookup.py:18:16: PLR1736 [*] Unnecessary lookup of list i = help: Use existing variable ℹ Safe fix -15 15 | +15 15 | 16 16 | for index, letter in builtins.enumerate(letters): 17 17 | print(letters[index]) # PLR1736 18 |- blah = letters[index] # PLR1736 diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW1510_subprocess_run_without_check.py.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW1510_subprocess_run_without_check.py.snap index b0306f17a2..7419c16569 100644 --- a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW1510_subprocess_run_without_check.py.snap +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW1510_subprocess_run_without_check.py.snap @@ -1,22 +1,87 @@ --- source: crates/ruff_linter/src/rules/pylint/mod.rs --- -subprocess_run_without_check.py:4:1: PLW1510 `subprocess.run` without explicit `check` argument +subprocess_run_without_check.py:4:1: PLW1510 [*] `subprocess.run` without explicit `check` argument | 3 | # Errors. 4 | subprocess.run("ls") | ^^^^^^^^^^^^^^ PLW1510 5 | subprocess.run("ls", shell=True) +6 | subprocess.run( | + = help: Add explicit `check=False` -subprocess_run_without_check.py:5:1: PLW1510 `subprocess.run` without explicit `check` argument +ℹ Safe fix +1 1 | import subprocess +2 2 | +3 3 | # Errors. +4 |-subprocess.run("ls") + 4 |+subprocess.run("ls", check=False) +5 5 | subprocess.run("ls", shell=True) +6 6 | subprocess.run( +7 7 | ["ls"], + +subprocess_run_without_check.py:5:1: PLW1510 [*] `subprocess.run` without explicit `check` argument | 3 | # Errors. 4 | subprocess.run("ls") 5 | subprocess.run("ls", shell=True) | ^^^^^^^^^^^^^^ PLW1510 -6 | -7 | # Non-errors. +6 | subprocess.run( +7 | ["ls"], | + = help: Add explicit `check=False` + +ℹ Safe fix +2 2 | +3 3 | # Errors. +4 4 | subprocess.run("ls") +5 |-subprocess.run("ls", shell=True) + 5 |+subprocess.run("ls", shell=True, check=False) +6 6 | subprocess.run( +7 7 | ["ls"], +8 8 | shell=False, + +subprocess_run_without_check.py:6:1: PLW1510 [*] `subprocess.run` without explicit `check` argument + | +4 | subprocess.run("ls") +5 | subprocess.run("ls", shell=True) +6 | subprocess.run( + | ^^^^^^^^^^^^^^ PLW1510 +7 | ["ls"], +8 | shell=False, + | + = help: Add explicit `check=False` + +ℹ Safe fix +5 5 | subprocess.run("ls", shell=True) +6 6 | subprocess.run( +7 7 | ["ls"], +8 |- shell=False, + 8 |+ shell=False, check=False, +9 9 | ) +10 10 | subprocess.run(["ls"], **kwargs) +11 11 | + +subprocess_run_without_check.py:10:1: PLW1510 [*] `subprocess.run` without explicit `check` argument + | + 8 | shell=False, + 9 | ) +10 | subprocess.run(["ls"], **kwargs) + | ^^^^^^^^^^^^^^ PLW1510 +11 | +12 | # Non-errors. + | + = help: Add explicit `check=False` + +ℹ Unsafe fix +7 7 | ["ls"], +8 8 | shell=False, +9 9 | ) +10 |-subprocess.run(["ls"], **kwargs) + 10 |+subprocess.run(["ls"], **kwargs, check=False) +11 11 | +12 12 | # Non-errors. +13 13 | subprocess.run("ls", check=True) diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__max_positional_args.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__max_positional_args.snap new file mode 100644 index 0000000000..98c9648207 --- /dev/null +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__max_positional_args.snap @@ -0,0 +1,22 @@ +--- +source: crates/ruff_linter/src/rules/pylint/mod.rs +--- +too_many_positional_params.py:3:5: PLR0917 Too many positional arguments: (7/4) + | +1 | # Too many positional arguments (7/4) for max_positional=4 +2 | # OK for dummy_variable_rgx ~ "skip_.*" +3 | def f(w, x, y, z, skip_t, skip_u, skip_v): + | ^ PLR0917 +4 | pass + | + +too_many_positional_params.py:9:5: PLR0917 Too many positional arguments: (7/4) + | + 7 | # Too many positional arguments (7/4) for max_args=4 + 8 | # Too many positional arguments (7/3) for dummy_variable_rgx ~ "skip_.*" + 9 | def f(w, x, y, z, t, u, v): + | ^ PLR0917 +10 | pass + | + + diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__too_many_locals.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__too_many_locals.snap new file mode 100644 index 0000000000..5150eac445 --- /dev/null +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__too_many_locals.snap @@ -0,0 +1,12 @@ +--- +source: crates/ruff_linter/src/rules/pylint/mod.rs +--- +too_many_locals.py:20:5: PLR0914 Too many local variables: (16/15) + | +20 | def func() -> None: # PLR0914 + | ^^^^ PLR0914 +21 | first = 1 +22 | second = 2 + | + + diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/printf_string_formatting.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/printf_string_formatting.rs index 576b0dacbc..4a340e3369 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/printf_string_formatting.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/printf_string_formatting.rs @@ -490,18 +490,10 @@ pub(crate) fn printf_string_formatting(checker: &mut Checker, expr: &Expr, right contents.push_str(&format!(".format{params_string}")); let mut diagnostic = Diagnostic::new(PrintfStringFormatting, expr.range()); - // Avoid fix if there are comments within the right-hand side: - // ``` - // "%s" % ( - // 0, # 0 - // ) - // ``` - if !checker.indexer().comment_ranges().intersects(right.range()) { - diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement( - contents, - expr.range(), - ))); - } + diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement( + contents, + expr.range(), + ))); checker.diagnostics.push(diagnostic); } diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/use_pep604_annotation.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/use_pep604_annotation.rs index 70aa2cec0e..5332be054f 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/use_pep604_annotation.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/use_pep604_annotation.rs @@ -180,7 +180,6 @@ fn is_allowed_value(expr: &Expr) -> bool { | Expr::GeneratorExp(_) | Expr::Compare(_) | Expr::Call(_) - | Expr::FormattedValue(_) | Expr::FString(_) | Expr::StringLiteral(_) | Expr::BytesLiteral(_) diff --git a/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP031_0.py.snap b/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP031_0.py.snap index a06ca5060c..fd5827fcfa 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP031_0.py.snap +++ b/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP031_0.py.snap @@ -896,7 +896,7 @@ UP031_0.py:104:5: UP031 [*] Use format specifiers instead of percent format 109 108 | 110 109 | "%s" % ( -UP031_0.py:110:1: UP031 Use format specifiers instead of percent format +UP031_0.py:110:1: UP031 [*] Use format specifiers instead of percent format | 108 | ) 109 | @@ -907,4 +907,36 @@ UP031_0.py:110:1: UP031 Use format specifiers instead of percent format | = help: Replace with format specifiers +ℹ Unsafe fix +107 107 | % (x,) +108 108 | ) +109 109 | +110 |-"%s" % ( + 110 |+"{}".format( +111 111 | x, # comment +112 112 | ) +113 113 | + +UP031_0.py:115:8: UP031 [*] Use format specifiers instead of percent format + | +115 | path = "%s-%s-%s.pem" % ( + | ________^ +116 | | safe_domain_name(cn), # common name, which should be filename safe because it is IDNA-encoded, but in case of a malformed cert make sure it's ok to use as a filename +117 | | cert.not_valid_after.date().isoformat().replace("-", ""), # expiration date +118 | | hexlify(cert.fingerprint(hashes.SHA256())).decode("ascii")[0:8], # fingerprint prefix +119 | | ) + | |_^ UP031 + | + = help: Replace with format specifiers + +ℹ Unsafe fix +112 112 | ) +113 113 | +114 114 | +115 |-path = "%s-%s-%s.pem" % ( + 115 |+path = "{}-{}-{}.pem".format( +116 116 | safe_domain_name(cn), # common name, which should be filename safe because it is IDNA-encoded, but in case of a malformed cert make sure it's ok to use as a filename +117 117 | cert.not_valid_after.date().isoformat().replace("-", ""), # expiration date +118 118 | hexlify(cert.fingerprint(hashes.SHA256())).decode("ascii")[0:8], # fingerprint prefix + diff --git a/crates/ruff_linter/src/rules/refurb/mod.rs b/crates/ruff_linter/src/rules/refurb/mod.rs index a7640bcd4c..5ce76cd342 100644 --- a/crates/ruff_linter/src/rules/refurb/mod.rs +++ b/crates/ruff_linter/src/rules/refurb/mod.rs @@ -16,6 +16,7 @@ mod tests { #[test_case(Rule::ReadWholeFile, Path::new("FURB101.py"))] #[test_case(Rule::RepeatedAppend, Path::new("FURB113.py"))] + #[test_case(Rule::ReimplementedOperator, Path::new("FURB118.py"))] #[test_case(Rule::DeleteFullSlice, Path::new("FURB131.py"))] #[test_case(Rule::CheckAndRemoveFromSet, Path::new("FURB132.py"))] #[test_case(Rule::IfExprMinMax, Path::new("FURB136.py"))] @@ -29,6 +30,7 @@ mod tests { #[test_case(Rule::IsinstanceTypeNone, Path::new("FURB168.py"))] #[test_case(Rule::TypeNoneComparison, Path::new("FURB169.py"))] #[test_case(Rule::RedundantLogBase, Path::new("FURB163.py"))] + #[test_case(Rule::HashlibDigestHex, Path::new("FURB181.py"))] fn rules(rule_code: Rule, path: &Path) -> Result<()> { let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy()); let diagnostics = test_path( diff --git a/crates/ruff_linter/src/rules/refurb/rules/delete_full_slice.rs b/crates/ruff_linter/src/rules/refurb/rules/delete_full_slice.rs index a6584201fc..1b0e610bdb 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/delete_full_slice.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/delete_full_slice.rs @@ -14,7 +14,7 @@ use crate::rules::refurb::helpers::generate_method_call; /// dictionary. /// /// ## Why is this bad? -/// It's is faster and more succinct to remove all items via the `clear()` +/// It is faster and more succinct to remove all items via the `clear()` /// method. /// /// ## Known problems diff --git a/crates/ruff_linter/src/rules/refurb/rules/hashlib_digest_hex.rs b/crates/ruff_linter/src/rules/refurb/rules/hashlib_digest_hex.rs new file mode 100644 index 0000000000..b8c7adf7cc --- /dev/null +++ b/crates/ruff_linter/src/rules/refurb/rules/hashlib_digest_hex.rs @@ -0,0 +1,120 @@ +use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation}; +use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::{Expr, ExprAttribute, ExprCall}; +use ruff_text_size::{Ranged, TextRange}; + +use crate::checkers::ast::Checker; + +/// ## What it does +/// Checks for the use of `.digest().hex()` on a hashlib hash, like `sha512`. +/// +/// ## Why is this bad? +/// When generating a hex digest from a hash, it's preferable to use the +/// `.hexdigest()` method, rather than calling `.digest()` and then `.hex()`, +/// as the former is more concise and readable. +/// +/// ## Example +/// ```python +/// from hashlib import sha512 +/// +/// hashed = sha512(b"some data").digest().hex() +/// ``` +/// +/// Use instead: +/// ```python +/// from hashlib import sha512 +/// +/// hashed = sha512(b"some data").hexdigest() +/// ``` +/// +/// ## Fix safety +/// This rule's fix is marked as unsafe, as the target of the `.digest()` call +/// could be a user-defined class that implements a `.hex()` method, rather +/// than a hashlib hash object. +/// +/// ## References +/// - [Python documentation: `hashlib`](https://docs.python.org/3/library/hashlib.html) +#[violation] +pub struct HashlibDigestHex; + +impl Violation for HashlibDigestHex { + const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes; + + #[derive_message_formats] + fn message(&self) -> String { + format!("Use of hashlib's `.digest().hex()`") + } + + fn fix_title(&self) -> Option { + Some("Replace with `.hexdigest()`".to_string()) + } +} + +/// FURB181 +pub(crate) fn hashlib_digest_hex(checker: &mut Checker, call: &ExprCall) { + if !call.arguments.is_empty() { + return; + } + + let Expr::Attribute(ExprAttribute { attr, value, .. }) = call.func.as_ref() else { + return; + }; + + if attr.as_str() != "hex" { + return; + } + + let Expr::Call(ExprCall { + func, arguments, .. + }) = value.as_ref() + else { + return; + }; + + let Expr::Attribute(ExprAttribute { attr, value, .. }) = func.as_ref() else { + return; + }; + + if attr.as_str() != "digest" { + return; + } + + let Expr::Call(ExprCall { func, .. }) = value.as_ref() else { + return; + }; + + if checker.semantic().resolve_call_path(func).is_some_and( + |call_path: smallvec::SmallVec<[&str; 8]>| { + matches!( + call_path.as_slice(), + [ + "hashlib", + "md5" + | "sha1" + | "sha224" + | "sha256" + | "sha384" + | "sha512" + | "blake2b" + | "blake2s" + | "sha3_224" + | "sha3_256" + | "sha3_384" + | "sha3_512" + | "shake_128" + | "shake_256" + | "_Hash" + ] + ) + }, + ) { + let mut diagnostic = Diagnostic::new(HashlibDigestHex, call.range()); + if arguments.is_empty() { + diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement( + ".hexdigest".to_string(), + TextRange::new(value.end(), call.func.end()), + ))); + } + checker.diagnostics.push(diagnostic); + } +} diff --git a/crates/ruff_linter/src/rules/refurb/rules/math_constant.rs b/crates/ruff_linter/src/rules/refurb/rules/math_constant.rs index 6b590275a3..23ef1aa8c1 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/math_constant.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/math_constant.rs @@ -53,23 +53,17 @@ pub(crate) fn math_constant(checker: &mut Checker, literal: &ast::ExprNumberLite let Number::Float(value) = literal.value else { return; }; - for (real_value, constant) in [ - (std::f64::consts::PI, "pi"), - (std::f64::consts::E, "e"), - (std::f64::consts::TAU, "tau"), - ] { - if (value - real_value).abs() < 1e-2 { - let mut diagnostic = Diagnostic::new( - MathConstant { - literal: checker.locator().slice(literal).into(), - constant, - }, - literal.range(), - ); - diagnostic.try_set_fix(|| convert_to_constant(literal, constant, checker)); - checker.diagnostics.push(diagnostic); - return; - } + + if let Some(constant) = Constant::from_value(value) { + let mut diagnostic = Diagnostic::new( + MathConstant { + literal: checker.locator().slice(literal).into(), + constant: constant.name(), + }, + literal.range(), + ); + diagnostic.try_set_fix(|| convert_to_constant(literal, constant.name(), checker)); + checker.diagnostics.push(diagnostic); } } @@ -88,3 +82,33 @@ fn convert_to_constant( [edit], )) } + +#[derive(Debug, Clone, Copy)] +enum Constant { + Pi, + E, + Tau, +} + +impl Constant { + #[allow(clippy::approx_constant)] + fn from_value(value: f64) -> Option { + if (3.14..3.15).contains(&value) { + Some(Self::Pi) + } else if (2.71..2.72).contains(&value) { + Some(Self::E) + } else if (6.28..6.29).contains(&value) { + Some(Self::Tau) + } else { + None + } + } + + fn name(self) -> &'static str { + match self { + Constant::Pi => "pi", + Constant::E => "e", + Constant::Tau => "tau", + } + } +} diff --git a/crates/ruff_linter/src/rules/refurb/rules/mod.rs b/crates/ruff_linter/src/rules/refurb/rules/mod.rs index d43f87bced..c5536a029b 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/mod.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/mod.rs @@ -1,5 +1,6 @@ pub(crate) use check_and_remove_from_set::*; pub(crate) use delete_full_slice::*; +pub(crate) use hashlib_digest_hex::*; pub(crate) use if_expr_min_max::*; pub(crate) use implicit_cwd::*; pub(crate) use isinstance_type_none::*; @@ -7,6 +8,7 @@ pub(crate) use math_constant::*; pub(crate) use print_empty_string::*; pub(crate) use read_whole_file::*; pub(crate) use redundant_log_base::*; +pub(crate) use reimplemented_operator::*; pub(crate) use reimplemented_starmap::*; pub(crate) use repeated_append::*; pub(crate) use single_item_membership_test::*; @@ -16,6 +18,7 @@ pub(crate) use unnecessary_enumerate::*; mod check_and_remove_from_set; mod delete_full_slice; +mod hashlib_digest_hex; mod if_expr_min_max; mod implicit_cwd; mod isinstance_type_none; @@ -23,6 +26,7 @@ mod math_constant; mod print_empty_string; mod read_whole_file; mod redundant_log_base; +mod reimplemented_operator; mod reimplemented_starmap; mod repeated_append; mod single_item_membership_test; diff --git a/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs b/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs index a4a6e58959..c7d6837d75 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs @@ -130,6 +130,9 @@ fn is_number_literal(expr: &Expr, value: i8) -> bool { if let Expr::NumberLiteral(number_literal) = expr { if let Number::Int(number) = &number_literal.value { return number.as_i8().is_some_and(|number| number == value); + } else if let Number::Float(number) = number_literal.value { + #[allow(clippy::float_cmp)] + return number == f64::from(value); } } false diff --git a/crates/ruff_linter/src/rules/refurb/rules/reimplemented_operator.rs b/crates/ruff_linter/src/rules/refurb/rules/reimplemented_operator.rs new file mode 100644 index 0000000000..0ee71d6f96 --- /dev/null +++ b/crates/ruff_linter/src/rules/refurb/rules/reimplemented_operator.rs @@ -0,0 +1,319 @@ +use anyhow::{bail, Result}; +use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation}; +use ruff_macros::{derive_message_formats, violation}; +use ruff_python_ast::{self as ast, Expr, Stmt}; +use ruff_python_semantic::SemanticModel; +use ruff_text_size::{Ranged, TextRange}; + +use crate::checkers::ast::Checker; +use crate::importer::{ImportRequest, Importer}; + +/// ## What it does +/// Checks for lambda expressions and function definitions that can be replaced +/// with a function from the `operator` module. +/// +/// ## Why is this bad? +/// The `operator` module provides functions that implement the same functionality +/// as the corresponding operators. For example, `operator.add` is equivalent to +/// `lambda x, y: x + y`. Using the functions from the `operator` module is more +/// concise and communicates the intent of the code more clearly. +/// +/// ## Example +/// ```python +/// import functools +/// +/// nums = [1, 2, 3] +/// sum = functools.reduce(lambda x, y: x + y, nums) +/// ``` +/// +/// Use instead: +/// ```python +/// import functools +/// import operator +/// +/// nums = [1, 2, 3] +/// sum = functools.reduce(operator.add, nums) +/// ``` +/// +/// ## References +#[violation] +pub struct ReimplementedOperator { + target: &'static str, + operator: &'static str, +} + +impl Violation for ReimplementedOperator { + const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes; + + #[derive_message_formats] + fn message(&self) -> String { + let ReimplementedOperator { operator, target } = self; + format!("Use `operator.{operator}` instead of defining a {target}") + } + + fn fix_title(&self) -> Option { + let ReimplementedOperator { operator, .. } = self; + Some(format!("Replace with `operator.{operator}`")) + } +} + +/// FURB118 +pub(crate) fn reimplemented_operator(checker: &mut Checker, target: &FunctionLike) { + let Some(params) = target.parameters() else { + return; + }; + let Some(body) = target.body() else { return }; + let Some(operator) = get_operator(body, params) else { + return; + }; + let mut diagnostic = Diagnostic::new( + ReimplementedOperator { + operator, + target: target.kind(), + }, + target.range(), + ); + diagnostic.try_set_fix(|| target.try_fix(operator, checker.importer(), checker.semantic())); + checker.diagnostics.push(diagnostic); +} + +/// Candidate for lambda expression or function definition consisting of a return statement. +#[derive(Debug)] +pub(crate) enum FunctionLike<'a> { + Lambda(&'a ast::ExprLambda), + Function(&'a ast::StmtFunctionDef), +} + +impl<'a> From<&'a ast::ExprLambda> for FunctionLike<'a> { + fn from(lambda: &'a ast::ExprLambda) -> Self { + Self::Lambda(lambda) + } +} + +impl<'a> From<&'a ast::StmtFunctionDef> for FunctionLike<'a> { + fn from(function: &'a ast::StmtFunctionDef) -> Self { + Self::Function(function) + } +} + +impl Ranged for FunctionLike<'_> { + fn range(&self) -> TextRange { + match self { + Self::Lambda(expr) => expr.range(), + Self::Function(stmt) => stmt.range(), + } + } +} + +impl FunctionLike<'_> { + /// Return the [`ast::Parameters`] of the function-like node. + fn parameters(&self) -> Option<&ast::Parameters> { + match self { + Self::Lambda(expr) => expr.parameters.as_deref(), + Self::Function(stmt) => Some(&stmt.parameters), + } + } + + /// Return the body of the function-like node. + /// + /// If the node is a function definition that consists of more than a single return statement, + /// returns `None`. + fn body(&self) -> Option<&Expr> { + match self { + Self::Lambda(expr) => Some(&expr.body), + Self::Function(stmt) => match stmt.body.as_slice() { + [Stmt::Return(ast::StmtReturn { value, .. })] => value.as_deref(), + _ => None, + }, + } + } + + /// Return the display kind of the function-like node. + fn kind(&self) -> &'static str { + match self { + Self::Lambda(_) => "lambda", + Self::Function(_) => "function", + } + } + + /// Attempt to fix the function-like node by replacing it with a call to the corresponding + /// function from `operator` module. + fn try_fix( + &self, + operator: &'static str, + importer: &Importer, + semantic: &SemanticModel, + ) -> Result { + match self { + Self::Lambda(_) => { + let (edit, binding) = importer.get_or_import_symbol( + &ImportRequest::import("operator", operator), + self.start(), + semantic, + )?; + Ok(Fix::safe_edits( + Edit::range_replacement(binding, self.range()), + [edit], + )) + } + Self::Function(_) => bail!("No fix available"), + } + } +} + +/// Return the name of the `operator` implemented by the given expression. +fn get_operator(expr: &Expr, params: &ast::Parameters) -> Option<&'static str> { + match expr { + Expr::UnaryOp(expr) => unary_op(expr, params), + Expr::BinOp(expr) => bin_op(expr, params), + Expr::Compare(expr) => cmp_op(expr, params), + _ => None, + } +} + +/// Return the name of the `operator` implemented by the given unary expression. +fn unary_op(expr: &ast::ExprUnaryOp, params: &ast::Parameters) -> Option<&'static str> { + let [arg] = params.args.as_slice() else { + return None; + }; + if !is_same_expression(arg, &expr.operand) { + return None; + } + Some(match expr.op { + ast::UnaryOp::Invert => "invert", + ast::UnaryOp::Not => "not_", + ast::UnaryOp::UAdd => "pos", + ast::UnaryOp::USub => "neg", + }) +} + +/// Return the name of the `operator` implemented by the given binary expression. +fn bin_op(expr: &ast::ExprBinOp, params: &ast::Parameters) -> Option<&'static str> { + let [arg1, arg2] = params.args.as_slice() else { + return None; + }; + if !is_same_expression(arg1, &expr.left) || !is_same_expression(arg2, &expr.right) { + return None; + } + Some(match expr.op { + ast::Operator::Add => "add", + ast::Operator::Sub => "sub", + ast::Operator::Mult => "mul", + ast::Operator::MatMult => "matmul", + ast::Operator::Div => "truediv", + ast::Operator::Mod => "mod", + ast::Operator::Pow => "pow", + ast::Operator::LShift => "lshift", + ast::Operator::RShift => "rshift", + ast::Operator::BitOr => "or_", + ast::Operator::BitXor => "xor", + ast::Operator::BitAnd => "and_", + ast::Operator::FloorDiv => "floordiv", + }) +} + +/// Return the name of the `operator` implemented by the given comparison expression. +fn cmp_op(expr: &ast::ExprCompare, params: &ast::Parameters) -> Option<&'static str> { + let [arg1, arg2] = params.args.as_slice() else { + return None; + }; + let [op] = expr.ops.as_slice() else { + return None; + }; + let [right] = expr.comparators.as_slice() else { + return None; + }; + + match op { + ast::CmpOp::Eq => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("eq") + } else { + None + } + } + ast::CmpOp::NotEq => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("ne") + } else { + None + } + } + ast::CmpOp::Lt => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("lt") + } else { + None + } + } + ast::CmpOp::LtE => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("le") + } else { + None + } + } + ast::CmpOp::Gt => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("gt") + } else { + None + } + } + ast::CmpOp::GtE => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("ge") + } else { + None + } + } + ast::CmpOp::Is => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("is_") + } else { + None + } + } + ast::CmpOp::IsNot => { + if match_arguments(arg1, arg2, &expr.left, right) { + Some("is_not") + } else { + None + } + } + ast::CmpOp::In => { + // Note: `operator.contains` reverses the order of arguments. That is: + // `operator.contains` is equivalent to `lambda x, y: y in x`, rather than + // `lambda x, y: x in y`. + if match_arguments(arg1, arg2, right, &expr.left) { + Some("contains") + } else { + None + } + } + ast::CmpOp::NotIn => None, + } +} + +/// Returns `true` if the given arguments match the expected operands. +fn match_arguments( + arg1: &ast::ParameterWithDefault, + arg2: &ast::ParameterWithDefault, + operand1: &Expr, + operand2: &Expr, +) -> bool { + is_same_expression(arg1, operand1) && is_same_expression(arg2, operand2) +} + +/// Returns `true` if the given argument is the "same" as the given expression. For example, if +/// the argument has a default, it is not considered the same as any expression; if both match the +/// same name, they are considered the same. +fn is_same_expression(arg: &ast::ParameterWithDefault, expr: &Expr) -> bool { + if arg.default.is_some() { + false + } else if let Expr::Name(name) = expr { + name.id == arg.parameter.name.as_str() + } else { + false + } +} diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB118_FURB118.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB118_FURB118.py.snap new file mode 100644 index 0000000000..828a95ee3b --- /dev/null +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB118_FURB118.py.snap @@ -0,0 +1,701 @@ +--- +source: crates/ruff_linter/src/rules/refurb/mod.rs +--- +FURB118.py:2:13: FURB118 [*] Use `operator.invert` instead of defining a lambda + | +1 | # Errors. +2 | op_bitnot = lambda x: ~x + | ^^^^^^^^^^^^ FURB118 +3 | op_not = lambda x: not x +4 | op_pos = lambda x: +x + | + = help: Replace with `operator.invert` + +ℹ Safe fix +1 1 | # Errors. +2 |-op_bitnot = lambda x: ~x + 2 |+import operator + 3 |+op_bitnot = operator.invert +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +5 6 | op_neg = lambda x: -x + +FURB118.py:3:10: FURB118 [*] Use `operator.not_` instead of defining a lambda + | +1 | # Errors. +2 | op_bitnot = lambda x: ~x +3 | op_not = lambda x: not x + | ^^^^^^^^^^^^^^^ FURB118 +4 | op_pos = lambda x: +x +5 | op_neg = lambda x: -x + | + = help: Replace with `operator.not_` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 |-op_not = lambda x: not x + 4 |+op_not = operator.not_ +4 5 | op_pos = lambda x: +x +5 6 | op_neg = lambda x: -x +6 7 | + +FURB118.py:4:10: FURB118 [*] Use `operator.pos` instead of defining a lambda + | +2 | op_bitnot = lambda x: ~x +3 | op_not = lambda x: not x +4 | op_pos = lambda x: +x + | ^^^^^^^^^^^^ FURB118 +5 | op_neg = lambda x: -x + | + = help: Replace with `operator.pos` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 |-op_pos = lambda x: +x + 5 |+op_pos = operator.pos +5 6 | op_neg = lambda x: -x +6 7 | +7 8 | op_add = lambda x, y: x + y + +FURB118.py:5:10: FURB118 [*] Use `operator.neg` instead of defining a lambda + | +3 | op_not = lambda x: not x +4 | op_pos = lambda x: +x +5 | op_neg = lambda x: -x + | ^^^^^^^^^^^^ FURB118 +6 | +7 | op_add = lambda x, y: x + y + | + = help: Replace with `operator.neg` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +5 |-op_neg = lambda x: -x + 6 |+op_neg = operator.neg +6 7 | +7 8 | op_add = lambda x, y: x + y +8 9 | op_sub = lambda x, y: x - y + +FURB118.py:7:10: FURB118 [*] Use `operator.add` instead of defining a lambda + | +5 | op_neg = lambda x: -x +6 | +7 | op_add = lambda x, y: x + y + | ^^^^^^^^^^^^^^^^^^ FURB118 +8 | op_sub = lambda x, y: x - y +9 | op_mult = lambda x, y: x * y + | + = help: Replace with `operator.add` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +5 6 | op_neg = lambda x: -x +6 7 | +7 |-op_add = lambda x, y: x + y + 8 |+op_add = operator.add +8 9 | op_sub = lambda x, y: x - y +9 10 | op_mult = lambda x, y: x * y +10 11 | op_matmutl = lambda x, y: x @ y + +FURB118.py:8:10: FURB118 [*] Use `operator.sub` instead of defining a lambda + | + 7 | op_add = lambda x, y: x + y + 8 | op_sub = lambda x, y: x - y + | ^^^^^^^^^^^^^^^^^^ FURB118 + 9 | op_mult = lambda x, y: x * y +10 | op_matmutl = lambda x, y: x @ y + | + = help: Replace with `operator.sub` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +5 6 | op_neg = lambda x: -x +6 7 | +7 8 | op_add = lambda x, y: x + y +8 |-op_sub = lambda x, y: x - y + 9 |+op_sub = operator.sub +9 10 | op_mult = lambda x, y: x * y +10 11 | op_matmutl = lambda x, y: x @ y +11 12 | op_truediv = lambda x, y: x / y + +FURB118.py:9:11: FURB118 [*] Use `operator.mul` instead of defining a lambda + | + 7 | op_add = lambda x, y: x + y + 8 | op_sub = lambda x, y: x - y + 9 | op_mult = lambda x, y: x * y + | ^^^^^^^^^^^^^^^^^^ FURB118 +10 | op_matmutl = lambda x, y: x @ y +11 | op_truediv = lambda x, y: x / y + | + = help: Replace with `operator.mul` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +6 7 | +7 8 | op_add = lambda x, y: x + y +8 9 | op_sub = lambda x, y: x - y +9 |-op_mult = lambda x, y: x * y + 10 |+op_mult = operator.mul +10 11 | op_matmutl = lambda x, y: x @ y +11 12 | op_truediv = lambda x, y: x / y +12 13 | op_mod = lambda x, y: x % y + +FURB118.py:10:14: FURB118 [*] Use `operator.matmul` instead of defining a lambda + | + 8 | op_sub = lambda x, y: x - y + 9 | op_mult = lambda x, y: x * y +10 | op_matmutl = lambda x, y: x @ y + | ^^^^^^^^^^^^^^^^^^ FURB118 +11 | op_truediv = lambda x, y: x / y +12 | op_mod = lambda x, y: x % y + | + = help: Replace with `operator.matmul` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +7 8 | op_add = lambda x, y: x + y +8 9 | op_sub = lambda x, y: x - y +9 10 | op_mult = lambda x, y: x * y +10 |-op_matmutl = lambda x, y: x @ y + 11 |+op_matmutl = operator.matmul +11 12 | op_truediv = lambda x, y: x / y +12 13 | op_mod = lambda x, y: x % y +13 14 | op_pow = lambda x, y: x ** y + +FURB118.py:11:14: FURB118 [*] Use `operator.truediv` instead of defining a lambda + | + 9 | op_mult = lambda x, y: x * y +10 | op_matmutl = lambda x, y: x @ y +11 | op_truediv = lambda x, y: x / y + | ^^^^^^^^^^^^^^^^^^ FURB118 +12 | op_mod = lambda x, y: x % y +13 | op_pow = lambda x, y: x ** y + | + = help: Replace with `operator.truediv` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +8 9 | op_sub = lambda x, y: x - y +9 10 | op_mult = lambda x, y: x * y +10 11 | op_matmutl = lambda x, y: x @ y +11 |-op_truediv = lambda x, y: x / y + 12 |+op_truediv = operator.truediv +12 13 | op_mod = lambda x, y: x % y +13 14 | op_pow = lambda x, y: x ** y +14 15 | op_lshift = lambda x, y: x << y + +FURB118.py:12:10: FURB118 [*] Use `operator.mod` instead of defining a lambda + | +10 | op_matmutl = lambda x, y: x @ y +11 | op_truediv = lambda x, y: x / y +12 | op_mod = lambda x, y: x % y + | ^^^^^^^^^^^^^^^^^^ FURB118 +13 | op_pow = lambda x, y: x ** y +14 | op_lshift = lambda x, y: x << y + | + = help: Replace with `operator.mod` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +9 10 | op_mult = lambda x, y: x * y +10 11 | op_matmutl = lambda x, y: x @ y +11 12 | op_truediv = lambda x, y: x / y +12 |-op_mod = lambda x, y: x % y + 13 |+op_mod = operator.mod +13 14 | op_pow = lambda x, y: x ** y +14 15 | op_lshift = lambda x, y: x << y +15 16 | op_rshift = lambda x, y: x >> y + +FURB118.py:13:10: FURB118 [*] Use `operator.pow` instead of defining a lambda + | +11 | op_truediv = lambda x, y: x / y +12 | op_mod = lambda x, y: x % y +13 | op_pow = lambda x, y: x ** y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +14 | op_lshift = lambda x, y: x << y +15 | op_rshift = lambda x, y: x >> y + | + = help: Replace with `operator.pow` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +10 11 | op_matmutl = lambda x, y: x @ y +11 12 | op_truediv = lambda x, y: x / y +12 13 | op_mod = lambda x, y: x % y +13 |-op_pow = lambda x, y: x ** y + 14 |+op_pow = operator.pow +14 15 | op_lshift = lambda x, y: x << y +15 16 | op_rshift = lambda x, y: x >> y +16 17 | op_bitor = lambda x, y: x | y + +FURB118.py:14:13: FURB118 [*] Use `operator.lshift` instead of defining a lambda + | +12 | op_mod = lambda x, y: x % y +13 | op_pow = lambda x, y: x ** y +14 | op_lshift = lambda x, y: x << y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +15 | op_rshift = lambda x, y: x >> y +16 | op_bitor = lambda x, y: x | y + | + = help: Replace with `operator.lshift` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +11 12 | op_truediv = lambda x, y: x / y +12 13 | op_mod = lambda x, y: x % y +13 14 | op_pow = lambda x, y: x ** y +14 |-op_lshift = lambda x, y: x << y + 15 |+op_lshift = operator.lshift +15 16 | op_rshift = lambda x, y: x >> y +16 17 | op_bitor = lambda x, y: x | y +17 18 | op_xor = lambda x, y: x ^ y + +FURB118.py:15:13: FURB118 [*] Use `operator.rshift` instead of defining a lambda + | +13 | op_pow = lambda x, y: x ** y +14 | op_lshift = lambda x, y: x << y +15 | op_rshift = lambda x, y: x >> y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +16 | op_bitor = lambda x, y: x | y +17 | op_xor = lambda x, y: x ^ y + | + = help: Replace with `operator.rshift` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +12 13 | op_mod = lambda x, y: x % y +13 14 | op_pow = lambda x, y: x ** y +14 15 | op_lshift = lambda x, y: x << y +15 |-op_rshift = lambda x, y: x >> y + 16 |+op_rshift = operator.rshift +16 17 | op_bitor = lambda x, y: x | y +17 18 | op_xor = lambda x, y: x ^ y +18 19 | op_bitand = lambda x, y: x & y + +FURB118.py:16:12: FURB118 [*] Use `operator.or_` instead of defining a lambda + | +14 | op_lshift = lambda x, y: x << y +15 | op_rshift = lambda x, y: x >> y +16 | op_bitor = lambda x, y: x | y + | ^^^^^^^^^^^^^^^^^^ FURB118 +17 | op_xor = lambda x, y: x ^ y +18 | op_bitand = lambda x, y: x & y + | + = help: Replace with `operator.or_` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +13 14 | op_pow = lambda x, y: x ** y +14 15 | op_lshift = lambda x, y: x << y +15 16 | op_rshift = lambda x, y: x >> y +16 |-op_bitor = lambda x, y: x | y + 17 |+op_bitor = operator.or_ +17 18 | op_xor = lambda x, y: x ^ y +18 19 | op_bitand = lambda x, y: x & y +19 20 | op_floordiv = lambda x, y: x // y + +FURB118.py:17:10: FURB118 [*] Use `operator.xor` instead of defining a lambda + | +15 | op_rshift = lambda x, y: x >> y +16 | op_bitor = lambda x, y: x | y +17 | op_xor = lambda x, y: x ^ y + | ^^^^^^^^^^^^^^^^^^ FURB118 +18 | op_bitand = lambda x, y: x & y +19 | op_floordiv = lambda x, y: x // y + | + = help: Replace with `operator.xor` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +14 15 | op_lshift = lambda x, y: x << y +15 16 | op_rshift = lambda x, y: x >> y +16 17 | op_bitor = lambda x, y: x | y +17 |-op_xor = lambda x, y: x ^ y + 18 |+op_xor = operator.xor +18 19 | op_bitand = lambda x, y: x & y +19 20 | op_floordiv = lambda x, y: x // y +20 21 | + +FURB118.py:18:13: FURB118 [*] Use `operator.and_` instead of defining a lambda + | +16 | op_bitor = lambda x, y: x | y +17 | op_xor = lambda x, y: x ^ y +18 | op_bitand = lambda x, y: x & y + | ^^^^^^^^^^^^^^^^^^ FURB118 +19 | op_floordiv = lambda x, y: x // y + | + = help: Replace with `operator.and_` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +15 16 | op_rshift = lambda x, y: x >> y +16 17 | op_bitor = lambda x, y: x | y +17 18 | op_xor = lambda x, y: x ^ y +18 |-op_bitand = lambda x, y: x & y + 19 |+op_bitand = operator.and_ +19 20 | op_floordiv = lambda x, y: x // y +20 21 | +21 22 | op_eq = lambda x, y: x == y + +FURB118.py:19:15: FURB118 [*] Use `operator.floordiv` instead of defining a lambda + | +17 | op_xor = lambda x, y: x ^ y +18 | op_bitand = lambda x, y: x & y +19 | op_floordiv = lambda x, y: x // y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +20 | +21 | op_eq = lambda x, y: x == y + | + = help: Replace with `operator.floordiv` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +16 17 | op_bitor = lambda x, y: x | y +17 18 | op_xor = lambda x, y: x ^ y +18 19 | op_bitand = lambda x, y: x & y +19 |-op_floordiv = lambda x, y: x // y + 20 |+op_floordiv = operator.floordiv +20 21 | +21 22 | op_eq = lambda x, y: x == y +22 23 | op_ne = lambda x, y: x != y + +FURB118.py:21:9: FURB118 [*] Use `operator.eq` instead of defining a lambda + | +19 | op_floordiv = lambda x, y: x // y +20 | +21 | op_eq = lambda x, y: x == y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +22 | op_ne = lambda x, y: x != y +23 | op_lt = lambda x, y: x < y + | + = help: Replace with `operator.eq` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +18 19 | op_bitand = lambda x, y: x & y +19 20 | op_floordiv = lambda x, y: x // y +20 21 | +21 |-op_eq = lambda x, y: x == y + 22 |+op_eq = operator.eq +22 23 | op_ne = lambda x, y: x != y +23 24 | op_lt = lambda x, y: x < y +24 25 | op_lte = lambda x, y: x <= y + +FURB118.py:22:9: FURB118 [*] Use `operator.ne` instead of defining a lambda + | +21 | op_eq = lambda x, y: x == y +22 | op_ne = lambda x, y: x != y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +23 | op_lt = lambda x, y: x < y +24 | op_lte = lambda x, y: x <= y + | + = help: Replace with `operator.ne` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +19 20 | op_floordiv = lambda x, y: x // y +20 21 | +21 22 | op_eq = lambda x, y: x == y +22 |-op_ne = lambda x, y: x != y + 23 |+op_ne = operator.ne +23 24 | op_lt = lambda x, y: x < y +24 25 | op_lte = lambda x, y: x <= y +25 26 | op_gt = lambda x, y: x > y + +FURB118.py:23:9: FURB118 [*] Use `operator.lt` instead of defining a lambda + | +21 | op_eq = lambda x, y: x == y +22 | op_ne = lambda x, y: x != y +23 | op_lt = lambda x, y: x < y + | ^^^^^^^^^^^^^^^^^^ FURB118 +24 | op_lte = lambda x, y: x <= y +25 | op_gt = lambda x, y: x > y + | + = help: Replace with `operator.lt` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +20 21 | +21 22 | op_eq = lambda x, y: x == y +22 23 | op_ne = lambda x, y: x != y +23 |-op_lt = lambda x, y: x < y + 24 |+op_lt = operator.lt +24 25 | op_lte = lambda x, y: x <= y +25 26 | op_gt = lambda x, y: x > y +26 27 | op_gte = lambda x, y: x >= y + +FURB118.py:24:10: FURB118 [*] Use `operator.le` instead of defining a lambda + | +22 | op_ne = lambda x, y: x != y +23 | op_lt = lambda x, y: x < y +24 | op_lte = lambda x, y: x <= y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +25 | op_gt = lambda x, y: x > y +26 | op_gte = lambda x, y: x >= y + | + = help: Replace with `operator.le` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +21 22 | op_eq = lambda x, y: x == y +22 23 | op_ne = lambda x, y: x != y +23 24 | op_lt = lambda x, y: x < y +24 |-op_lte = lambda x, y: x <= y + 25 |+op_lte = operator.le +25 26 | op_gt = lambda x, y: x > y +26 27 | op_gte = lambda x, y: x >= y +27 28 | op_is = lambda x, y: x is y + +FURB118.py:25:9: FURB118 [*] Use `operator.gt` instead of defining a lambda + | +23 | op_lt = lambda x, y: x < y +24 | op_lte = lambda x, y: x <= y +25 | op_gt = lambda x, y: x > y + | ^^^^^^^^^^^^^^^^^^ FURB118 +26 | op_gte = lambda x, y: x >= y +27 | op_is = lambda x, y: x is y + | + = help: Replace with `operator.gt` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +22 23 | op_ne = lambda x, y: x != y +23 24 | op_lt = lambda x, y: x < y +24 25 | op_lte = lambda x, y: x <= y +25 |-op_gt = lambda x, y: x > y + 26 |+op_gt = operator.gt +26 27 | op_gte = lambda x, y: x >= y +27 28 | op_is = lambda x, y: x is y +28 29 | op_isnot = lambda x, y: x is not y + +FURB118.py:26:10: FURB118 [*] Use `operator.ge` instead of defining a lambda + | +24 | op_lte = lambda x, y: x <= y +25 | op_gt = lambda x, y: x > y +26 | op_gte = lambda x, y: x >= y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +27 | op_is = lambda x, y: x is y +28 | op_isnot = lambda x, y: x is not y + | + = help: Replace with `operator.ge` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +23 24 | op_lt = lambda x, y: x < y +24 25 | op_lte = lambda x, y: x <= y +25 26 | op_gt = lambda x, y: x > y +26 |-op_gte = lambda x, y: x >= y + 27 |+op_gte = operator.ge +27 28 | op_is = lambda x, y: x is y +28 29 | op_isnot = lambda x, y: x is not y +29 30 | op_in = lambda x, y: y in x + +FURB118.py:27:9: FURB118 [*] Use `operator.is_` instead of defining a lambda + | +25 | op_gt = lambda x, y: x > y +26 | op_gte = lambda x, y: x >= y +27 | op_is = lambda x, y: x is y + | ^^^^^^^^^^^^^^^^^^^ FURB118 +28 | op_isnot = lambda x, y: x is not y +29 | op_in = lambda x, y: y in x + | + = help: Replace with `operator.is_` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +24 25 | op_lte = lambda x, y: x <= y +25 26 | op_gt = lambda x, y: x > y +26 27 | op_gte = lambda x, y: x >= y +27 |-op_is = lambda x, y: x is y + 28 |+op_is = operator.is_ +28 29 | op_isnot = lambda x, y: x is not y +29 30 | op_in = lambda x, y: y in x +30 31 | + +FURB118.py:28:12: FURB118 [*] Use `operator.is_not` instead of defining a lambda + | +26 | op_gte = lambda x, y: x >= y +27 | op_is = lambda x, y: x is y +28 | op_isnot = lambda x, y: x is not y + | ^^^^^^^^^^^^^^^^^^^^^^^ FURB118 +29 | op_in = lambda x, y: y in x + | + = help: Replace with `operator.is_not` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +25 26 | op_gt = lambda x, y: x > y +26 27 | op_gte = lambda x, y: x >= y +27 28 | op_is = lambda x, y: x is y +28 |-op_isnot = lambda x, y: x is not y + 29 |+op_isnot = operator.is_not +29 30 | op_in = lambda x, y: y in x +30 31 | +31 32 | + +FURB118.py:29:9: FURB118 [*] Use `operator.contains` instead of defining a lambda + | +27 | op_is = lambda x, y: x is y +28 | op_isnot = lambda x, y: x is not y +29 | op_in = lambda x, y: y in x + | ^^^^^^^^^^^^^^^^^^^ FURB118 + | + = help: Replace with `operator.contains` + +ℹ Safe fix +1 1 | # Errors. + 2 |+import operator +2 3 | op_bitnot = lambda x: ~x +3 4 | op_not = lambda x: not x +4 5 | op_pos = lambda x: +x +-------------------------------------------------------------------------------- +26 27 | op_gte = lambda x, y: x >= y +27 28 | op_is = lambda x, y: x is y +28 29 | op_isnot = lambda x, y: x is not y +29 |-op_in = lambda x, y: y in x + 30 |+op_in = operator.contains +30 31 | +31 32 | +32 33 | def op_not2(x): + +FURB118.py:32:1: FURB118 Use `operator.not_` instead of defining a function + | +32 | / def op_not2(x): +33 | | return not x + | |________________^ FURB118 + | + = help: Replace with `operator.not_` + +FURB118.py:36:1: FURB118 Use `operator.add` instead of defining a function + | +36 | / def op_add2(x, y): +37 | | return x + y + | |________________^ FURB118 + | + = help: Replace with `operator.add` + +FURB118.py:41:5: FURB118 Use `operator.add` instead of defining a function + | +40 | class Adder: +41 | def add(x, y): + | _____^ +42 | | return x + y + | |____________________^ FURB118 +43 | +44 | # OK. + | + = help: Replace with `operator.add` + + diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB152_FURB152.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB152_FURB152.py.snap index aa97aead86..094f30df73 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB152_FURB152.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB152_FURB152.py.snap @@ -43,6 +43,7 @@ FURB152.py:5:5: FURB152 [*] Replace `6.28` with `math.tau` 6 |+C = math.tau * r # FURB152 6 7 | 7 8 | e = 2.71 # FURB152 +8 9 | FURB152.py:7:5: FURB152 [*] Replace `2.71` with `math.e` | @@ -50,6 +51,8 @@ FURB152.py:7:5: FURB152 [*] Replace `2.71` with `math.e` 6 | 7 | e = 2.71 # FURB152 | ^^^^ FURB152 +8 | +9 | r = 3.15 # OK | = help: Use `math.e` @@ -63,5 +66,59 @@ FURB152.py:7:5: FURB152 [*] Replace `2.71` with `math.e` 6 7 | 7 |-e = 2.71 # FURB152 8 |+e = math.e # FURB152 +8 9 | +9 10 | r = 3.15 # OK +10 11 | + +FURB152.py:11:5: FURB152 [*] Replace `3.141` with `math.pi` + | + 9 | r = 3.15 # OK +10 | +11 | r = 3.141 # FURB152 + | ^^^^^ FURB152 +12 | +13 | r = 3.1415 # FURB152 + | + = help: Use `math.pi` + +ℹ Safe fix + 1 |+import math +1 2 | r = 3.1 # OK +2 3 | +3 4 | A = 3.14 * r ** 2 # FURB152 +-------------------------------------------------------------------------------- +8 9 | +9 10 | r = 3.15 # OK +10 11 | +11 |-r = 3.141 # FURB152 + 12 |+r = math.pi # FURB152 +12 13 | +13 14 | r = 3.1415 # FURB152 +14 15 | + +FURB152.py:13:5: FURB152 [*] Replace `3.1415` with `math.pi` + | +11 | r = 3.141 # FURB152 +12 | +13 | r = 3.1415 # FURB152 + | ^^^^^^ FURB152 +14 | +15 | e = 2.7 # OK + | + = help: Use `math.pi` + +ℹ Safe fix + 1 |+import math +1 2 | r = 3.1 # OK +2 3 | +3 4 | A = 3.14 * r ** 2 # FURB152 +-------------------------------------------------------------------------------- +10 11 | +11 12 | r = 3.141 # FURB152 +12 13 | +13 |-r = 3.1415 # FURB152 + 14 |+r = math.pi # FURB152 +14 15 | +15 16 | e = 2.7 # OK diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB163_FURB163.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB163_FURB163.py.snap index e6441a4c1e..aeb83174da 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB163_FURB163.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB163_FURB163.py.snap @@ -187,7 +187,7 @@ FURB163.py:16:1: FURB163 [*] Prefer `math.log10(1)` over `math.log` with a redun 16 |+math.log10(1) 17 17 | special_log(1, math.e) 18 18 | special_log(1, special_e) -19 19 | +19 19 | math.log(1, 2.0) FURB163.py:17:1: FURB163 [*] Prefer `math.log(1)` over `math.log` with a redundant base | @@ -196,6 +196,7 @@ FURB163.py:17:1: FURB163 [*] Prefer `math.log(1)` over `math.log` with a redunda 17 | special_log(1, math.e) | ^^^^^^^^^^^^^^^^^^^^^^ FURB163 18 | special_log(1, special_e) +19 | math.log(1, 2.0) | = help: Replace with `math.log(1)` @@ -206,8 +207,8 @@ FURB163.py:17:1: FURB163 [*] Prefer `math.log(1)` over `math.log` with a redunda 17 |-special_log(1, math.e) 17 |+math.log(1) 18 18 | special_log(1, special_e) -19 19 | -20 20 | # Ok. +19 19 | math.log(1, 2.0) +20 20 | math.log(1, 10.0) FURB163.py:18:1: FURB163 [*] Prefer `math.log(1)` over `math.log` with a redundant base | @@ -215,8 +216,8 @@ FURB163.py:18:1: FURB163 [*] Prefer `math.log(1)` over `math.log` with a redunda 17 | special_log(1, math.e) 18 | special_log(1, special_e) | ^^^^^^^^^^^^^^^^^^^^^^^^^ FURB163 -19 | -20 | # Ok. +19 | math.log(1, 2.0) +20 | math.log(1, 10.0) | = help: Replace with `math.log(1)` @@ -226,8 +227,49 @@ FURB163.py:18:1: FURB163 [*] Prefer `math.log(1)` over `math.log` with a redunda 17 17 | special_log(1, math.e) 18 |-special_log(1, special_e) 18 |+math.log(1) -19 19 | -20 20 | # Ok. -21 21 | math.log2(1) +19 19 | math.log(1, 2.0) +20 20 | math.log(1, 10.0) +21 21 | + +FURB163.py:19:1: FURB163 [*] Prefer `math.log2(1)` over `math.log` with a redundant base + | +17 | special_log(1, math.e) +18 | special_log(1, special_e) +19 | math.log(1, 2.0) + | ^^^^^^^^^^^^^^^^ FURB163 +20 | math.log(1, 10.0) + | + = help: Replace with `math.log2(1)` + +ℹ Safe fix +16 16 | special_log(1, 10) +17 17 | special_log(1, math.e) +18 18 | special_log(1, special_e) +19 |-math.log(1, 2.0) + 19 |+math.log2(1) +20 20 | math.log(1, 10.0) +21 21 | +22 22 | # Ok. + +FURB163.py:20:1: FURB163 [*] Prefer `math.log10(1)` over `math.log` with a redundant base + | +18 | special_log(1, special_e) +19 | math.log(1, 2.0) +20 | math.log(1, 10.0) + | ^^^^^^^^^^^^^^^^^ FURB163 +21 | +22 | # Ok. + | + = help: Replace with `math.log10(1)` + +ℹ Safe fix +17 17 | special_log(1, math.e) +18 18 | special_log(1, special_e) +19 19 | math.log(1, 2.0) +20 |-math.log(1, 10.0) + 20 |+math.log10(1) +21 21 | +22 22 | # Ok. +23 23 | math.log2(1) diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB181_FURB181.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB181_FURB181.py.snap new file mode 100644 index 0000000000..cca7645c90 --- /dev/null +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB181_FURB181.py.snap @@ -0,0 +1,339 @@ +--- +source: crates/ruff_linter/src/rules/refurb/mod.rs +--- +FURB181.py:19:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +17 | # these will match +18 | +19 | blake2b().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +20 | blake2s().digest().hex() +21 | md5().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +16 16 | +17 17 | # these will match +18 18 | +19 |-blake2b().digest().hex() + 19 |+blake2b().hexdigest() +20 20 | blake2s().digest().hex() +21 21 | md5().digest().hex() +22 22 | sha1().digest().hex() + +FURB181.py:20:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +19 | blake2b().digest().hex() +20 | blake2s().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +21 | md5().digest().hex() +22 | sha1().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +17 17 | # these will match +18 18 | +19 19 | blake2b().digest().hex() +20 |-blake2s().digest().hex() + 20 |+blake2s().hexdigest() +21 21 | md5().digest().hex() +22 22 | sha1().digest().hex() +23 23 | sha224().digest().hex() + +FURB181.py:21:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +19 | blake2b().digest().hex() +20 | blake2s().digest().hex() +21 | md5().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^ FURB181 +22 | sha1().digest().hex() +23 | sha224().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +18 18 | +19 19 | blake2b().digest().hex() +20 20 | blake2s().digest().hex() +21 |-md5().digest().hex() + 21 |+md5().hexdigest() +22 22 | sha1().digest().hex() +23 23 | sha224().digest().hex() +24 24 | sha256().digest().hex() + +FURB181.py:22:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +20 | blake2s().digest().hex() +21 | md5().digest().hex() +22 | sha1().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^ FURB181 +23 | sha224().digest().hex() +24 | sha256().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +19 19 | blake2b().digest().hex() +20 20 | blake2s().digest().hex() +21 21 | md5().digest().hex() +22 |-sha1().digest().hex() + 22 |+sha1().hexdigest() +23 23 | sha224().digest().hex() +24 24 | sha256().digest().hex() +25 25 | sha384().digest().hex() + +FURB181.py:23:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +21 | md5().digest().hex() +22 | sha1().digest().hex() +23 | sha224().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +24 | sha256().digest().hex() +25 | sha384().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +20 20 | blake2s().digest().hex() +21 21 | md5().digest().hex() +22 22 | sha1().digest().hex() +23 |-sha224().digest().hex() + 23 |+sha224().hexdigest() +24 24 | sha256().digest().hex() +25 25 | sha384().digest().hex() +26 26 | sha3_224().digest().hex() + +FURB181.py:24:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +22 | sha1().digest().hex() +23 | sha224().digest().hex() +24 | sha256().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +25 | sha384().digest().hex() +26 | sha3_224().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +21 21 | md5().digest().hex() +22 22 | sha1().digest().hex() +23 23 | sha224().digest().hex() +24 |-sha256().digest().hex() + 24 |+sha256().hexdigest() +25 25 | sha384().digest().hex() +26 26 | sha3_224().digest().hex() +27 27 | sha3_256().digest().hex() + +FURB181.py:25:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +23 | sha224().digest().hex() +24 | sha256().digest().hex() +25 | sha384().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +26 | sha3_224().digest().hex() +27 | sha3_256().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +22 22 | sha1().digest().hex() +23 23 | sha224().digest().hex() +24 24 | sha256().digest().hex() +25 |-sha384().digest().hex() + 25 |+sha384().hexdigest() +26 26 | sha3_224().digest().hex() +27 27 | sha3_256().digest().hex() +28 28 | sha3_384().digest().hex() + +FURB181.py:26:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +24 | sha256().digest().hex() +25 | sha384().digest().hex() +26 | sha3_224().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +27 | sha3_256().digest().hex() +28 | sha3_384().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +23 23 | sha224().digest().hex() +24 24 | sha256().digest().hex() +25 25 | sha384().digest().hex() +26 |-sha3_224().digest().hex() + 26 |+sha3_224().hexdigest() +27 27 | sha3_256().digest().hex() +28 28 | sha3_384().digest().hex() +29 29 | sha3_512().digest().hex() + +FURB181.py:27:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +25 | sha384().digest().hex() +26 | sha3_224().digest().hex() +27 | sha3_256().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +28 | sha3_384().digest().hex() +29 | sha3_512().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +24 24 | sha256().digest().hex() +25 25 | sha384().digest().hex() +26 26 | sha3_224().digest().hex() +27 |-sha3_256().digest().hex() + 27 |+sha3_256().hexdigest() +28 28 | sha3_384().digest().hex() +29 29 | sha3_512().digest().hex() +30 30 | sha512().digest().hex() + +FURB181.py:28:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +26 | sha3_224().digest().hex() +27 | sha3_256().digest().hex() +28 | sha3_384().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +29 | sha3_512().digest().hex() +30 | sha512().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +25 25 | sha384().digest().hex() +26 26 | sha3_224().digest().hex() +27 27 | sha3_256().digest().hex() +28 |-sha3_384().digest().hex() + 28 |+sha3_384().hexdigest() +29 29 | sha3_512().digest().hex() +30 30 | sha512().digest().hex() +31 31 | shake_128().digest(10).hex() + +FURB181.py:29:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +27 | sha3_256().digest().hex() +28 | sha3_384().digest().hex() +29 | sha3_512().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +30 | sha512().digest().hex() +31 | shake_128().digest(10).hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +26 26 | sha3_224().digest().hex() +27 27 | sha3_256().digest().hex() +28 28 | sha3_384().digest().hex() +29 |-sha3_512().digest().hex() + 29 |+sha3_512().hexdigest() +30 30 | sha512().digest().hex() +31 31 | shake_128().digest(10).hex() +32 32 | shake_256().digest(10).hex() + +FURB181.py:30:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +28 | sha3_384().digest().hex() +29 | sha3_512().digest().hex() +30 | sha512().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +31 | shake_128().digest(10).hex() +32 | shake_256().digest(10).hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +27 27 | sha3_256().digest().hex() +28 28 | sha3_384().digest().hex() +29 29 | sha3_512().digest().hex() +30 |-sha512().digest().hex() + 30 |+sha512().hexdigest() +31 31 | shake_128().digest(10).hex() +32 32 | shake_256().digest(10).hex() +33 33 | + +FURB181.py:31:1: FURB181 Use of hashlib's `.digest().hex()` + | +29 | sha3_512().digest().hex() +30 | sha512().digest().hex() +31 | shake_128().digest(10).hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +32 | shake_256().digest(10).hex() + | + = help: Replace with `.hexdigest()` + +FURB181.py:32:1: FURB181 Use of hashlib's `.digest().hex()` + | +30 | sha512().digest().hex() +31 | shake_128().digest(10).hex() +32 | shake_256().digest(10).hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +33 | +34 | hashlib.sha256().digest().hex() + | + = help: Replace with `.hexdigest()` + +FURB181.py:34:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +32 | shake_256().digest(10).hex() +33 | +34 | hashlib.sha256().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +35 | +36 | sha256(b"text").digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +31 31 | shake_128().digest(10).hex() +32 32 | shake_256().digest(10).hex() +33 33 | +34 |-hashlib.sha256().digest().hex() + 34 |+hashlib.sha256().hexdigest() +35 35 | +36 36 | sha256(b"text").digest().hex() +37 37 | + +FURB181.py:36:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +34 | hashlib.sha256().digest().hex() +35 | +36 | sha256(b"text").digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +37 | +38 | hash_algo().digest().hex() + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +33 33 | +34 34 | hashlib.sha256().digest().hex() +35 35 | +36 |-sha256(b"text").digest().hex() + 36 |+sha256(b"text").hexdigest() +37 37 | +38 38 | hash_algo().digest().hex() +39 39 | + +FURB181.py:38:1: FURB181 [*] Use of hashlib's `.digest().hex()` + | +36 | sha256(b"text").digest().hex() +37 | +38 | hash_algo().digest().hex() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ FURB181 +39 | +40 | # not yet supported + | + = help: Replace with `.hexdigest()` + +ℹ Unsafe fix +35 35 | +36 36 | sha256(b"text").digest().hex() +37 37 | +38 |-hash_algo().digest().hex() + 38 |+hash_algo().hexdigest() +39 39 | +40 40 | # not yet supported +41 41 | h = sha256() + + diff --git a/crates/ruff_linter/src/rules/ruff/rules/asyncio_dangling_task.rs b/crates/ruff_linter/src/rules/ruff/rules/asyncio_dangling_task.rs index f6f03fc8d9..15188b5880 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/asyncio_dangling_task.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/asyncio_dangling_task.rs @@ -1,14 +1,12 @@ use std::fmt; -use ruff_python_ast::{self as ast, Expr}; - +use ast::Stmt; use ruff_diagnostics::{Diagnostic, Violation}; use ruff_macros::{derive_message_formats, violation}; -use ruff_python_semantic::analyze::typing; +use ruff_python_ast::{self as ast, Expr}; +use ruff_python_semantic::{analyze::typing, Scope, SemanticModel}; use ruff_text_size::Ranged; -use crate::checkers::ast::Checker; - /// ## What it does /// Checks for `asyncio.create_task` and `asyncio.ensure_future` calls /// that do not store a reference to the returned result. @@ -66,35 +64,34 @@ impl Violation for AsyncioDanglingTask { } /// RUF006 -pub(crate) fn asyncio_dangling_task(checker: &mut Checker, expr: &Expr) { +pub(crate) fn asyncio_dangling_task(expr: &Expr, semantic: &SemanticModel) -> Option { let Expr::Call(ast::ExprCall { func, .. }) = expr else { - return; + return None; }; // Ex) `asyncio.create_task(...)` - if let Some(method) = checker - .semantic() - .resolve_call_path(func) - .and_then(|call_path| match call_path.as_slice() { - ["asyncio", "create_task"] => Some(Method::CreateTask), - ["asyncio", "ensure_future"] => Some(Method::EnsureFuture), - _ => None, - }) + if let Some(method) = + semantic + .resolve_call_path(func) + .and_then(|call_path| match call_path.as_slice() { + ["asyncio", "create_task"] => Some(Method::CreateTask), + ["asyncio", "ensure_future"] => Some(Method::EnsureFuture), + _ => None, + }) { - checker.diagnostics.push(Diagnostic::new( + return Some(Diagnostic::new( AsyncioDanglingTask { method }, expr.range(), )); - return; } // Ex) `loop = asyncio.get_running_loop(); loop.create_task(...)` if let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func.as_ref() { if attr == "create_task" { - if typing::resolve_assignment(value, checker.semantic()).is_some_and(|call_path| { + if typing::resolve_assignment(value, semantic).is_some_and(|call_path| { matches!(call_path.as_slice(), ["asyncio", "get_running_loop"]) }) { - checker.diagnostics.push(Diagnostic::new( + return Some(Diagnostic::new( AsyncioDanglingTask { method: Method::CreateTask, }, @@ -103,6 +100,56 @@ pub(crate) fn asyncio_dangling_task(checker: &mut Checker, expr: &Expr) { } } } + None +} + +/// RUF006 +pub(crate) fn asyncio_dangling_binding( + scope: &Scope, + semantic: &SemanticModel, + diagnostics: &mut Vec, +) { + for binding_id in scope.binding_ids() { + // If the binding itself is used, or it's not an assignment, skip it. + let binding = semantic.binding(binding_id); + if binding.is_used() || !binding.kind.is_assignment() { + continue; + } + + // Otherwise, any dangling tasks, including those that are shadowed, as in: + // ```python + // if x > 0: + // task = asyncio.create_task(make_request()) + // else: + // task = asyncio.create_task(make_request()) + // ``` + for binding_id in + std::iter::successors(Some(binding_id), |id| semantic.shadowed_binding(*id)) + { + let binding = semantic.binding(binding_id); + if binding.is_used() || !binding.kind.is_assignment() { + continue; + } + + let Some(source) = binding.source else { + continue; + }; + + let diagnostic = match semantic.statement(source) { + Stmt::Assign(ast::StmtAssign { value, targets, .. }) if targets.len() == 1 => { + asyncio_dangling_task(value, semantic) + } + Stmt::AnnAssign(ast::StmtAnnAssign { + value: Some(value), .. + }) => asyncio_dangling_task(value, semantic), + _ => None, + }; + + if let Some(diagnostic) = diagnostic { + diagnostics.push(diagnostic); + } + } + } } #[derive(Debug, PartialEq, Eq, Copy, Clone)] diff --git a/crates/ruff_linter/src/rules/ruff/rules/explicit_f_string_type_conversion.rs b/crates/ruff_linter/src/rules/ruff/rules/explicit_f_string_type_conversion.rs index d30a04705e..7fe11923d2 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/explicit_f_string_type_conversion.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/explicit_f_string_type_conversion.rs @@ -53,10 +53,12 @@ impl AlwaysFixableViolation for ExplicitFStringTypeConversion { /// RUF010 pub(crate) fn explicit_f_string_type_conversion(checker: &mut Checker, f_string: &ast::FString) { - for (index, expr) in f_string.values.iter().enumerate() { - let Some(ast::ExprFormattedValue { - value, conversion, .. - }) = expr.as_formatted_value_expr() + for (index, element) in f_string.elements.iter().enumerate() { + let Some(ast::FStringExpressionElement { + expression, + conversion, + .. + }) = element.as_expression() else { continue; }; @@ -75,7 +77,7 @@ pub(crate) fn explicit_f_string_type_conversion(checker: &mut Checker, f_string: range: _, }, .. - }) = value.as_ref() + }) = expression.as_ref() else { continue; }; @@ -110,7 +112,7 @@ pub(crate) fn explicit_f_string_type_conversion(checker: &mut Checker, f_string: continue; } - let mut diagnostic = Diagnostic::new(ExplicitFStringTypeConversion, value.range()); + let mut diagnostic = Diagnostic::new(ExplicitFStringTypeConversion, expression.range()); diagnostic.try_set_fix(|| { convert_call_to_conversion_flag(f_string, index, checker.locator(), checker.stylist()) }); diff --git a/crates/ruff_linter/src/rules/ruff/rules/helpers.rs b/crates/ruff_linter/src/rules/ruff/rules/helpers.rs index 6cf01db9db..e7b4bfabc5 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/helpers.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/helpers.rs @@ -1,7 +1,6 @@ -use ruff_python_ast::{self as ast, Arguments, Expr}; - use ruff_python_ast::helpers::{map_callable, map_subscript}; -use ruff_python_semantic::{BindingKind, SemanticModel}; +use ruff_python_ast::{self as ast, Expr}; +use ruff_python_semantic::{analyze, BindingKind, SemanticModel}; /// Return `true` if the given [`Expr`] is a special class attribute, like `__slots__`. /// @@ -57,19 +56,13 @@ pub(super) fn has_default_copy_semantics( class_def: &ast::StmtClassDef, semantic: &SemanticModel, ) -> bool { - let Some(Arguments { args: bases, .. }) = class_def.arguments.as_deref() else { - return false; - }; - - bases.iter().any(|expr| { - semantic.resolve_call_path(expr).is_some_and(|call_path| { - matches!( - call_path.as_slice(), - ["pydantic", "BaseModel" | "BaseSettings"] - | ["pydantic_settings", "BaseSettings"] - | ["msgspec", "Struct"] - ) - }) + analyze::class::any_over_body(class_def, semantic, &|call_path| { + matches!( + call_path.as_slice(), + ["pydantic", "BaseModel" | "BaseSettings"] + | ["pydantic_settings", "BaseSettings"] + | ["msgspec", "Struct"] + ) }) } diff --git a/crates/ruff_linter/src/rules/ruff/rules/unreachable.rs b/crates/ruff_linter/src/rules/ruff/rules/unreachable.rs index e94cb4179d..5445aabd3e 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/unreachable.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/unreachable.rs @@ -635,7 +635,6 @@ impl<'stmt> BasicBlocksBuilder<'stmt> { | Expr::Set(_) | Expr::Compare(_) | Expr::Call(_) - | Expr::FormattedValue(_) | Expr::FString(_) | Expr::StringLiteral(_) | Expr::BytesLiteral(_) diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF006_RUF006.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF006_RUF006.py.snap index 11a0bababe..def73e5a2e 100644 --- a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF006_RUF006.py.snap +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF006_RUF006.py.snap @@ -17,11 +17,27 @@ RUF006.py:11:5: RUF006 Store a reference to the return value of `asyncio.ensure_ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF006 | -RUF006.py:79:5: RUF006 Store a reference to the return value of `asyncio.create_task` +RUF006.py:68:12: RUF006 Store a reference to the return value of `asyncio.create_task` | -77 | def f(): -78 | loop = asyncio.get_running_loop() -79 | loop.create_task(coordinator.ws_connect()) # Error +66 | # Error +67 | def f(): +68 | task = asyncio.create_task(coordinator.ws_connect()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF006 + | + +RUF006.py:74:26: RUF006 Store a reference to the return value of `asyncio.create_task` + | +72 | def f(): +73 | loop = asyncio.get_running_loop() +74 | task: asyncio.Task = loop.create_task(coordinator.ws_connect()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF006 + | + +RUF006.py:97:5: RUF006 Store a reference to the return value of `asyncio.create_task` + | +95 | def f(): +96 | loop = asyncio.get_running_loop() +97 | loop.create_task(coordinator.ws_connect()) # Error | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ RUF006 | diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__ruf100_3.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__ruf100_3.snap index 019c140aec..5f38b489e2 100644 --- a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__ruf100_3.snap +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__ruf100_3.snap @@ -344,6 +344,7 @@ RUF100_3.py:23:11: RUF100 [*] Unused `noqa` directive (unused: `E501`) 23 |+print(a) # noqa: F821 # comment 24 24 | print(a) # noqa: E501, F821 comment 25 25 | print(a) # noqa: E501, F821 comment +26 26 | RUF100_3.py:24:11: RUF100 [*] Unused `noqa` directive (unused: `E501`) | @@ -362,6 +363,8 @@ RUF100_3.py:24:11: RUF100 [*] Unused `noqa` directive (unused: `E501`) 24 |-print(a) # noqa: E501, F821 comment 24 |+print(a) # noqa: F821 comment 25 25 | print(a) # noqa: E501, F821 comment +26 26 | +27 27 | print(a) # comment with unicode µ # noqa: E501 RUF100_3.py:25:11: RUF100 [*] Unused `noqa` directive (unused: `E501`) | @@ -369,6 +372,8 @@ RUF100_3.py:25:11: RUF100 [*] Unused `noqa` directive (unused: `E501`) 24 | print(a) # noqa: E501, F821 comment 25 | print(a) # noqa: E501, F821 comment | ^^^^^^^^^^^^^^^^^^ RUF100 +26 | +27 | print(a) # comment with unicode µ # noqa: E501 | = help: Remove unused `noqa` directive @@ -378,5 +383,50 @@ RUF100_3.py:25:11: RUF100 [*] Unused `noqa` directive (unused: `E501`) 24 24 | print(a) # noqa: E501, F821 comment 25 |-print(a) # noqa: E501, F821 comment 25 |+print(a) # noqa: F821 comment +26 26 | +27 27 | print(a) # comment with unicode µ # noqa: E501 +28 28 | print(a) # comment with unicode µ # noqa: E501, F821 + +RUF100_3.py:27:7: F821 Undefined name `a` + | +25 | print(a) # noqa: E501, F821 comment +26 | +27 | print(a) # comment with unicode µ # noqa: E501 + | ^ F821 +28 | print(a) # comment with unicode µ # noqa: E501, F821 + | + +RUF100_3.py:27:39: RUF100 [*] Unused `noqa` directive (unused: `E501`) + | +25 | print(a) # noqa: E501, F821 comment +26 | +27 | print(a) # comment with unicode µ # noqa: E501 + | ^^^^^^^^^^^^ RUF100 +28 | print(a) # comment with unicode µ # noqa: E501, F821 + | + = help: Remove unused `noqa` directive + +ℹ Safe fix +24 24 | print(a) # noqa: E501, F821 comment +25 25 | print(a) # noqa: E501, F821 comment +26 26 | +27 |-print(a) # comment with unicode µ # noqa: E501 + 27 |+print(a) # comment with unicode µ +28 28 | print(a) # comment with unicode µ # noqa: E501, F821 + +RUF100_3.py:28:39: RUF100 [*] Unused `noqa` directive (unused: `E501`) + | +27 | print(a) # comment with unicode µ # noqa: E501 +28 | print(a) # comment with unicode µ # noqa: E501, F821 + | ^^^^^^^^^^^^^^^^^^ RUF100 + | + = help: Remove unused `noqa` directive + +ℹ Safe fix +25 25 | print(a) # noqa: E501, F821 comment +26 26 | +27 27 | print(a) # comment with unicode µ # noqa: E501 +28 |-print(a) # comment with unicode µ # noqa: E501, F821 + 28 |+print(a) # comment with unicode µ # noqa: F821 diff --git a/crates/ruff_linter/src/rules/tryceratops/rules/raise_vanilla_args.rs b/crates/ruff_linter/src/rules/tryceratops/rules/raise_vanilla_args.rs index acb2577050..b50d6fd7e7 100644 --- a/crates/ruff_linter/src/rules/tryceratops/rules/raise_vanilla_args.rs +++ b/crates/ruff_linter/src/rules/tryceratops/rules/raise_vanilla_args.rs @@ -90,7 +90,7 @@ pub(crate) fn raise_vanilla_args(checker: &mut Checker, expr: &Expr) { fn contains_message(expr: &Expr) -> bool { match expr { Expr::FString(ast::ExprFString { value, .. }) => { - for f_string_part in value.parts() { + for f_string_part in value { match f_string_part { ast::FStringPart::Literal(literal) => { if literal.chars().any(char::is_whitespace) { @@ -98,8 +98,12 @@ fn contains_message(expr: &Expr) -> bool { } } ast::FStringPart::FString(f_string) => { - for value in &f_string.values { - if contains_message(value) { + for literal in f_string + .elements + .iter() + .filter_map(|element| element.as_literal()) + { + if literal.chars().any(char::is_whitespace) { return true; } } diff --git a/crates/ruff_linter/src/settings/types.rs b/crates/ruff_linter/src/settings/types.rs index 8c8d84efb7..10bfb5189e 100644 --- a/crates/ruff_linter/src/settings/types.rs +++ b/crates/ruff_linter/src/settings/types.rs @@ -119,16 +119,21 @@ impl From for PreviewMode { } } +/// Toggle for unsafe fixes. +/// `Hint` will not apply unsafe fixes but a message will be shown when they are available. +/// `Disabled` will not apply unsafe fixes or show a message. +/// `Enabled` will apply unsafe fixes. #[derive(Debug, Copy, Clone, CacheKey, Default, PartialEq, Eq, is_macro::Is)] pub enum UnsafeFixes { #[default] + Hint, Disabled, Enabled, } impl From for UnsafeFixes { - fn from(version: bool) -> Self { - if version { + fn from(value: bool) -> Self { + if value { UnsafeFixes::Enabled } else { UnsafeFixes::Disabled @@ -140,7 +145,7 @@ impl UnsafeFixes { pub fn required_applicability(&self) -> Applicability { match self { Self::Enabled => Applicability::Unsafe, - Self::Disabled => Applicability::Safe, + Self::Disabled | Self::Hint => Applicability::Safe, } } } @@ -418,6 +423,7 @@ pub enum SerializationFormat { Gitlab, Pylint, Azure, + Sarif, } impl Default for SerializationFormat { diff --git a/crates/ruff_linter/src/snapshots/ruff_linter__linter__tests__ipy_escape_command.snap b/crates/ruff_linter/src/snapshots/ruff_linter__linter__tests__ipy_escape_command.snap index 58aa72d7da..8c1a547e85 100644 --- a/crates/ruff_linter/src/snapshots/ruff_linter__linter__tests__ipy_escape_command.snap +++ b/crates/ruff_linter/src/snapshots/ruff_linter__linter__tests__ipy_escape_command.snap @@ -19,5 +19,20 @@ ipy_escape_command.ipynb:cell 1:5:8: F401 [*] `os` imported but unused 5 |-import os 6 5 | 7 6 | _ = math.pi +8 7 | %%timeit + +ipy_escape_command.ipynb:cell 2:2:8: F401 [*] `sys` imported but unused + | +1 | %%timeit +2 | import sys + | ^^^ F401 + | + = help: Remove unused import: `sys` + +ℹ Safe fix +6 6 | +7 7 | _ = math.pi +8 8 | %%timeit +9 |-import sys diff --git a/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/cell_magic.json b/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/cell_magic.json index ef68b202e6..e0de8c0241 100644 --- a/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/cell_magic.json +++ b/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/cell_magic.json @@ -4,5 +4,10 @@ "id": "1", "metadata": {}, "outputs": [], - "source": ["%%timeit\n", "print('hello world')"] + "source": [ + "%%script bash\n", + "for i in 1 2 3; do\n", + " echo $i\n", + "done" + ] } diff --git a/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/unicode_magic_gh9145.json b/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/unicode_magic_gh9145.json new file mode 100644 index 0000000000..38455b3c3d --- /dev/null +++ b/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/unicode_magic_gh9145.json @@ -0,0 +1,19 @@ +{ + "execution_count": null, + "cell_type": "code", + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "def sample_func(xx):\n", + " \"\"\"\n", + " 转置 (transpose)\n", + " \"\"\"\n", + " return xx.T", + "# https://github.com/astral-sh/ruff-vscode/issues/362", + "DEFAULT_SYSTEM_PROMPT = (", + " \"Ты — Сайга, русскоязычный автоматический ассистент. \"", + " \"Ты разговариваешь с людьми и помогаешь им.\"", + ")" + ] +} diff --git a/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/valid_cell_magic.json b/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/valid_cell_magic.json new file mode 100644 index 0000000000..2cb89fa63b --- /dev/null +++ b/crates/ruff_notebook/resources/test/fixtures/jupyter/cell/valid_cell_magic.json @@ -0,0 +1,11 @@ +{ + "execution_count": null, + "cell_type": "code", + "id": "1", + "metadata": {}, + "outputs": [], + "source": [ + "%%timeit\n", + "print('hello world')" + ] +} diff --git a/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command.ipynb b/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command.ipynb index 5e9b10bb7b..6937096cc0 100644 --- a/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command.ipynb +++ b/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command.ipynb @@ -26,6 +26,18 @@ "%%timeit\n", "import sys" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36dedfd1-6c03-4894-bea6-6c1687b82b3c", + "metadata": {}, + "outputs": [], + "source": [ + "%%random\n", + "# This cell is ignored\n", + "import pathlib" + ] } ], "metadata": { diff --git a/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command_expected.ipynb b/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command_expected.ipynb index 8419f031e7..6a5eebc05f 100644 --- a/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command_expected.ipynb +++ b/crates/ruff_notebook/resources/test/fixtures/jupyter/ipy_escape_command_expected.ipynb @@ -22,8 +22,19 @@ "metadata": {}, "outputs": [], "source": [ - "%%timeit\n", - "import sys" + "%%timeit" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b6d7faa-72b3-4087-8670-fe6d35e41fb6", + "metadata": {}, + "outputs": [], + "source": [ + "%%random\n", + "# This cell is ignored\n", + "import pathlib" ] } ], diff --git a/crates/ruff_notebook/src/cell.rs b/crates/ruff_notebook/src/cell.rs index d80ef336de..eefc18918b 100644 --- a/crates/ruff_notebook/src/cell.rs +++ b/crates/ruff_notebook/src/cell.rs @@ -170,7 +170,45 @@ impl Cell { } // Detect cell magics (which operate on multiple lines). - lines.any(|line| line.trim_start().starts_with("%%")) + lines.any(|line| { + let Some(first) = line.split_whitespace().next() else { + return false; + }; + if first.len() < 2 { + return false; + } + let Some(command) = first.strip_prefix("%%") else { + return false; + }; + // These cell magics are special in that the lines following them are valid + // Python code and the variables defined in that scope are available to the + // rest of the notebook. + // + // For example: + // + // Cell 1: + // ```python + // x = 1 + // ``` + // + // Cell 2: + // ```python + // %%time + // y = x + // ``` + // + // Cell 3: + // ```python + // print(y) # Here, `y` is available. + // ``` + // + // This is to avoid false positives when these variables are referenced + // elsewhere in the notebook. + !matches!( + command, + "capture" | "debug" | "prun" | "pypy" | "python" | "python3" | "time" | "timeit" + ) + }) } } diff --git a/crates/ruff_notebook/src/notebook.rs b/crates/ruff_notebook/src/notebook.rs index a6714fa12b..ddc558ba21 100644 --- a/crates/ruff_notebook/src/notebook.rs +++ b/crates/ruff_notebook/src/notebook.rs @@ -421,16 +421,18 @@ mod tests { )); } - #[test_case(Path::new("markdown.json"), false; "markdown")] - #[test_case(Path::new("only_magic.json"), true; "only_magic")] - #[test_case(Path::new("code_and_magic.json"), true; "code_and_magic")] - #[test_case(Path::new("only_code.json"), true; "only_code")] - #[test_case(Path::new("cell_magic.json"), false; "cell_magic")] - #[test_case(Path::new("automagic.json"), false; "automagic")] - #[test_case(Path::new("automagics.json"), false; "automagics")] - #[test_case(Path::new("automagic_before_code.json"), false; "automagic_before_code")] - #[test_case(Path::new("automagic_after_code.json"), true; "automagic_after_code")] - fn test_is_valid_code_cell(path: &Path, expected: bool) -> Result<()> { + #[test_case("markdown", false)] + #[test_case("only_magic", true)] + #[test_case("code_and_magic", true)] + #[test_case("only_code", true)] + #[test_case("cell_magic", false)] + #[test_case("valid_cell_magic", true)] + #[test_case("automagic", false)] + #[test_case("automagics", false)] + #[test_case("automagic_before_code", false)] + #[test_case("automagic_after_code", true)] + #[test_case("unicode_magic_gh9145", true)] + fn test_is_valid_code_cell(cell: &str, expected: bool) -> Result<()> { /// Read a Jupyter cell from the `resources/test/fixtures/jupyter/cell` directory. fn read_jupyter_cell(path: impl AsRef) -> Result { let path = notebook_path("cell").join(path); @@ -438,7 +440,10 @@ mod tests { Ok(serde_json::from_str(&source_code)?) } - assert_eq!(read_jupyter_cell(path)?.is_valid_code_cell(), expected); + assert_eq!( + read_jupyter_cell(format!("{cell}.json"))?.is_valid_code_cell(), + expected + ); Ok(()) } diff --git a/crates/ruff_python_ast/src/comparable.rs b/crates/ruff_python_ast/src/comparable.rs index 0fe4a17f8e..b3c7faf116 100644 --- a/crates/ruff_python_ast/src/comparable.rs +++ b/crates/ruff_python_ast/src/comparable.rs @@ -509,6 +509,41 @@ impl<'a> From<&'a ast::ExceptHandler> for ComparableExceptHandler<'a> { } } +#[derive(Debug, PartialEq, Eq, Hash)] +pub enum ComparableFStringElement<'a> { + Literal(&'a str), + FStringExpressionElement(FStringExpressionElement<'a>), +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct FStringExpressionElement<'a> { + expression: ComparableExpr<'a>, + debug_text: Option<&'a ast::DebugText>, + conversion: ast::ConversionFlag, + format_spec: Option>>, +} + +impl<'a> From<&'a ast::FStringElement> for ComparableFStringElement<'a> { + fn from(fstring_element: &'a ast::FStringElement) -> Self { + match fstring_element { + ast::FStringElement::Literal(ast::FStringLiteralElement { value, .. }) => { + Self::Literal(value) + } + ast::FStringElement::Expression(formatted_value) => { + Self::FStringExpressionElement(FStringExpressionElement { + expression: (&formatted_value.expression).into(), + debug_text: formatted_value.debug_text.as_ref(), + conversion: formatted_value.conversion, + format_spec: formatted_value + .format_spec + .as_ref() + .map(|spec| spec.elements.iter().map(Into::into).collect()), + }) + } + } + } +} + #[derive(Debug, PartialEq, Eq, Hash)] pub struct ComparableElifElseClause<'a> { test: Option>, @@ -548,10 +583,10 @@ impl<'a> From> for ComparableLiteral<'a> { value, .. }) => Self::Bool(value), ast::LiteralExpressionRef::StringLiteral(ast::ExprStringLiteral { value, .. }) => { - Self::Str(value.parts().map(Into::into).collect()) + Self::Str(value.iter().map(Into::into).collect()) } ast::LiteralExpressionRef::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => { - Self::Bytes(value.parts().map(Into::into).collect()) + Self::Bytes(value.iter().map(Into::into).collect()) } ast::LiteralExpressionRef::NumberLiteral(ast::ExprNumberLiteral { value, .. }) => { Self::Number(value.into()) @@ -562,13 +597,13 @@ impl<'a> From> for ComparableLiteral<'a> { #[derive(Debug, PartialEq, Eq, Hash)] pub struct ComparableFString<'a> { - values: Vec>, + elements: Vec>, } impl<'a> From<&'a ast::FString> for ComparableFString<'a> { fn from(fstring: &'a ast::FString) -> Self { Self { - values: fstring.values.iter().map(Into::into).collect(), + elements: fstring.elements.iter().map(Into::into).collect(), } } } @@ -717,11 +752,11 @@ pub struct ExprCall<'a> { } #[derive(Debug, PartialEq, Eq, Hash)] -pub struct ExprFormattedValue<'a> { +pub struct ExprFStringExpressionElement<'a> { value: Box>, debug_text: Option<&'a ast::DebugText>, conversion: ast::ConversionFlag, - format_spec: Option>>, + format_spec: Vec>, } #[derive(Debug, PartialEq, Eq, Hash)] @@ -813,7 +848,7 @@ pub enum ComparableExpr<'a> { YieldFrom(ExprYieldFrom<'a>), Compare(ExprCompare<'a>), Call(ExprCall<'a>), - FormattedValue(ExprFormattedValue<'a>), + FStringExpressionElement(ExprFStringExpressionElement<'a>), FString(ExprFString<'a>), StringLiteral(ExprStringLiteral<'a>), BytesLiteral(ExprBytesLiteral<'a>), @@ -975,31 +1010,19 @@ impl<'a> From<&'a ast::Expr> for ComparableExpr<'a> { func: func.into(), arguments: arguments.into(), }), - ast::Expr::FormattedValue(ast::ExprFormattedValue { - value, - conversion, - debug_text, - format_spec, - range: _, - }) => Self::FormattedValue(ExprFormattedValue { - value: value.into(), - conversion: *conversion, - debug_text: debug_text.as_ref(), - format_spec: format_spec.as_ref().map(Into::into), - }), ast::Expr::FString(ast::ExprFString { value, range: _ }) => { Self::FString(ExprFString { - parts: value.parts().map(Into::into).collect(), + parts: value.iter().map(Into::into).collect(), }) } ast::Expr::StringLiteral(ast::ExprStringLiteral { value, range: _ }) => { Self::StringLiteral(ExprStringLiteral { - parts: value.parts().map(Into::into).collect(), + parts: value.iter().map(Into::into).collect(), }) } ast::Expr::BytesLiteral(ast::ExprBytesLiteral { value, range: _ }) => { Self::BytesLiteral(ExprBytesLiteral { - parts: value.parts().map(Into::into).collect(), + parts: value.iter().map(Into::into).collect(), }) } ast::Expr::NumberLiteral(ast::ExprNumberLiteral { value, range: _ }) => { diff --git a/crates/ruff_python_ast/src/expression.rs b/crates/ruff_python_ast/src/expression.rs index 4bdc46f7db..56fecca6fc 100644 --- a/crates/ruff_python_ast/src/expression.rs +++ b/crates/ruff_python_ast/src/expression.rs @@ -23,7 +23,6 @@ pub enum ExpressionRef<'a> { YieldFrom(&'a ast::ExprYieldFrom), Compare(&'a ast::ExprCompare), Call(&'a ast::ExprCall), - FormattedValue(&'a ast::ExprFormattedValue), FString(&'a ast::ExprFString), StringLiteral(&'a ast::ExprStringLiteral), BytesLiteral(&'a ast::ExprBytesLiteral), @@ -67,7 +66,6 @@ impl<'a> From<&'a Expr> for ExpressionRef<'a> { Expr::YieldFrom(value) => ExpressionRef::YieldFrom(value), Expr::Compare(value) => ExpressionRef::Compare(value), Expr::Call(value) => ExpressionRef::Call(value), - Expr::FormattedValue(value) => ExpressionRef::FormattedValue(value), Expr::FString(value) => ExpressionRef::FString(value), Expr::StringLiteral(value) => ExpressionRef::StringLiteral(value), Expr::BytesLiteral(value) => ExpressionRef::BytesLiteral(value), @@ -172,11 +170,6 @@ impl<'a> From<&'a ast::ExprCall> for ExpressionRef<'a> { Self::Call(value) } } -impl<'a> From<&'a ast::ExprFormattedValue> for ExpressionRef<'a> { - fn from(value: &'a ast::ExprFormattedValue) -> Self { - Self::FormattedValue(value) - } -} impl<'a> From<&'a ast::ExprFString> for ExpressionRef<'a> { fn from(value: &'a ast::ExprFString) -> Self { Self::FString(value) @@ -273,7 +266,6 @@ impl<'a> From> for AnyNodeRef<'a> { ExpressionRef::YieldFrom(expression) => AnyNodeRef::ExprYieldFrom(expression), ExpressionRef::Compare(expression) => AnyNodeRef::ExprCompare(expression), ExpressionRef::Call(expression) => AnyNodeRef::ExprCall(expression), - ExpressionRef::FormattedValue(expression) => AnyNodeRef::ExprFormattedValue(expression), ExpressionRef::FString(expression) => AnyNodeRef::ExprFString(expression), ExpressionRef::StringLiteral(expression) => AnyNodeRef::ExprStringLiteral(expression), ExpressionRef::BytesLiteral(expression) => AnyNodeRef::ExprBytesLiteral(expression), @@ -317,7 +309,6 @@ impl Ranged for ExpressionRef<'_> { ExpressionRef::YieldFrom(expression) => expression.range(), ExpressionRef::Compare(expression) => expression.range(), ExpressionRef::Call(expression) => expression.range(), - ExpressionRef::FormattedValue(expression) => expression.range(), ExpressionRef::FString(expression) => expression.range(), ExpressionRef::StringLiteral(expression) => expression.range(), ExpressionRef::BytesLiteral(expression) => expression.range(), @@ -402,3 +393,41 @@ impl LiteralExpressionRef<'_> { } } } + +/// An enum that holds a reference to a string-like literal from the AST. +/// This includes string literals, bytes literals, and the literal parts of +/// f-strings. +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum StringLike<'a> { + StringLiteral(&'a ast::ExprStringLiteral), + BytesLiteral(&'a ast::ExprBytesLiteral), + FStringLiteral(&'a ast::FStringLiteralElement), +} + +impl<'a> From<&'a ast::ExprStringLiteral> for StringLike<'a> { + fn from(value: &'a ast::ExprStringLiteral) -> Self { + StringLike::StringLiteral(value) + } +} + +impl<'a> From<&'a ast::ExprBytesLiteral> for StringLike<'a> { + fn from(value: &'a ast::ExprBytesLiteral) -> Self { + StringLike::BytesLiteral(value) + } +} + +impl<'a> From<&'a ast::FStringLiteralElement> for StringLike<'a> { + fn from(value: &'a ast::FStringLiteralElement) -> Self { + StringLike::FStringLiteral(value) + } +} + +impl Ranged for StringLike<'_> { + fn range(&self) -> TextRange { + match self { + StringLike::StringLiteral(literal) => literal.range(), + StringLike::BytesLiteral(literal) => literal.range(), + StringLike::FStringLiteral(literal) => literal.range(), + } + } +} diff --git a/crates/ruff_python_ast/src/helpers.rs b/crates/ruff_python_ast/src/helpers.rs index cca5909032..61e80657b7 100644 --- a/crates/ruff_python_ast/src/helpers.rs +++ b/crates/ruff_python_ast/src/helpers.rs @@ -12,8 +12,8 @@ use crate::parenthesize::parenthesized_range; use crate::statement_visitor::StatementVisitor; use crate::visitor::Visitor; use crate::{ - self as ast, Arguments, CmpOp, ExceptHandler, Expr, MatchCase, Operator, Pattern, Stmt, - TypeParam, + self as ast, Arguments, CmpOp, ExceptHandler, Expr, FStringElement, MatchCase, Operator, + Pattern, Stmt, TypeParam, }; use crate::{AnyNodeRef, ExprContext}; @@ -136,9 +136,9 @@ pub fn any_over_expr(expr: &Expr, func: &dyn Fn(&Expr) -> bool) -> bool { Expr::BoolOp(ast::ExprBoolOp { values, .. }) => { values.iter().any(|expr| any_over_expr(expr, func)) } - Expr::FString(ast::ExprFString { value, .. }) => { - value.elements().any(|expr| any_over_expr(expr, func)) - } + Expr::FString(ast::ExprFString { value, .. }) => value + .elements() + .any(|expr| any_over_f_string_element(expr, func)), Expr::NamedExpr(ast::ExprNamedExpr { target, value, @@ -231,14 +231,6 @@ pub fn any_over_expr(expr: &Expr, func: &dyn Fn(&Expr) -> bool) -> bool { .iter() .any(|keyword| any_over_expr(&keyword.value, func)) } - Expr::FormattedValue(ast::ExprFormattedValue { - value, format_spec, .. - }) => { - any_over_expr(value, func) - || format_spec - .as_ref() - .is_some_and(|value| any_over_expr(value, func)) - } Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => { any_over_expr(value, func) || any_over_expr(slice, func) } @@ -315,6 +307,24 @@ pub fn any_over_pattern(pattern: &Pattern, func: &dyn Fn(&Expr) -> bool) -> bool } } +pub fn any_over_f_string_element(element: &FStringElement, func: &dyn Fn(&Expr) -> bool) -> bool { + match element { + FStringElement::Literal(_) => false, + FStringElement::Expression(ast::FStringExpressionElement { + expression, + format_spec, + .. + }) => { + any_over_expr(expression, func) + || format_spec.as_ref().is_some_and(|spec| { + spec.elements + .iter() + .any(|spec_element| any_over_f_string_element(spec_element, func)) + }) + } + } +} + pub fn any_over_stmt(stmt: &Stmt, func: &dyn Fn(&Expr) -> bool) -> bool { match stmt { Stmt::FunctionDef(ast::StmtFunctionDef { @@ -911,178 +921,204 @@ where } } -/// Returns `true` if the function has an implicit return. -pub fn implicit_return(function: &ast::StmtFunctionDef) -> bool { - /// Returns `true` if the body may break via a `break` statement. - fn sometimes_breaks(stmts: &[Stmt]) -> bool { - for stmt in stmts { - match stmt { - Stmt::For(ast::StmtFor { body, orelse, .. }) => { - if returns(body) { - return false; - } - if sometimes_breaks(orelse) { - return true; - } - } - Stmt::While(ast::StmtWhile { body, orelse, .. }) => { - if returns(body) { - return false; - } - if sometimes_breaks(orelse) { - return true; - } - } - Stmt::If(ast::StmtIf { - body, - elif_else_clauses, - .. - }) => { - if std::iter::once(body) - .chain(elif_else_clauses.iter().map(|clause| &clause.body)) - .any(|body| sometimes_breaks(body)) - { - return true; - } - } - Stmt::Match(ast::StmtMatch { cases, .. }) => { - if cases.iter().any(|case| sometimes_breaks(&case.body)) { - return true; - } - } - Stmt::Try(ast::StmtTry { - body, - handlers, - orelse, - finalbody, - .. - }) => { - if sometimes_breaks(body) - || handlers.iter().any(|handler| { - let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler { - body, - .. - }) = handler; - sometimes_breaks(body) - }) - || sometimes_breaks(orelse) - || sometimes_breaks(finalbody) - { - return true; - } - } - Stmt::With(ast::StmtWith { body, .. }) => { - if sometimes_breaks(body) { - return true; - } - } - Stmt::Break(_) => return true, - Stmt::Return(_) => return false, - Stmt::Raise(_) => return false, - _ => {} - } - } - false - } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Terminal { + /// Every path through the function ends with a `raise` statement. + Raise, + /// Every path through the function ends with a `return` (or `raise`) statement. + Return, +} - /// Returns `true` if the body may break via a `break` statement. - fn always_breaks(stmts: &[Stmt]) -> bool { - for stmt in stmts { - match stmt { - Stmt::Break(_) => return true, - Stmt::Return(_) => return false, - Stmt::Raise(_) => return false, - _ => {} - } - } - false - } - - /// Returns `true` if the body contains a branch that ends without an explicit `return` or - /// `raise` statement. - fn returns(stmts: &[Stmt]) -> bool { - for stmt in stmts.iter().rev() { - match stmt { - Stmt::For(ast::StmtFor { body, orelse, .. }) => { - if always_breaks(body) { - return false; +impl Terminal { + /// Returns the [`Terminal`] behavior of the function, if it can be determined, or `None` if the + /// function contains at least one control flow path that does not end with a `return` or `raise` + /// statement. + pub fn from_function(function: &ast::StmtFunctionDef) -> Option { + /// Returns `true` if the body may break via a `break` statement. + fn sometimes_breaks(stmts: &[Stmt]) -> bool { + for stmt in stmts { + match stmt { + Stmt::For(ast::StmtFor { body, orelse, .. }) => { + if returns(body).is_some() { + return false; + } + if sometimes_breaks(orelse) { + return true; + } } - if returns(body) { - return true; + Stmt::While(ast::StmtWhile { body, orelse, .. }) => { + if returns(body).is_some() { + return false; + } + if sometimes_breaks(orelse) { + return true; + } } - if returns(orelse) && !sometimes_breaks(body) { - return true; - } - } - Stmt::While(ast::StmtWhile { body, orelse, .. }) => { - if always_breaks(body) { - return false; - } - if returns(body) { - return true; - } - if returns(orelse) && !sometimes_breaks(body) { - return true; - } - } - Stmt::If(ast::StmtIf { - body, - elif_else_clauses, - .. - }) => { - if elif_else_clauses.iter().any(|clause| clause.test.is_none()) - && std::iter::once(body) + Stmt::If(ast::StmtIf { + body, + elif_else_clauses, + .. + }) => { + if std::iter::once(body) .chain(elif_else_clauses.iter().map(|clause| &clause.body)) - .all(|body| returns(body)) - { - return true; + .any(|body| sometimes_breaks(body)) + { + return true; + } } + Stmt::Match(ast::StmtMatch { cases, .. }) => { + if cases.iter().any(|case| sometimes_breaks(&case.body)) { + return true; + } + } + Stmt::Try(ast::StmtTry { + body, + handlers, + orelse, + finalbody, + .. + }) => { + if sometimes_breaks(body) + || handlers.iter().any(|handler| { + let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler { + body, + .. + }) = handler; + sometimes_breaks(body) + }) + || sometimes_breaks(orelse) + || sometimes_breaks(finalbody) + { + return true; + } + } + Stmt::With(ast::StmtWith { body, .. }) => { + if sometimes_breaks(body) { + return true; + } + } + Stmt::Break(_) => return true, + Stmt::Return(_) => return false, + Stmt::Raise(_) => return false, + _ => {} } - Stmt::Match(ast::StmtMatch { cases, .. }) => { - // Note: we assume the `match` is exhaustive. - if cases.iter().all(|case| returns(&case.body)) { - return true; - } - } - Stmt::Try(ast::StmtTry { - body, - handlers, - orelse, - finalbody, - .. - }) => { - // If the `finally` block returns, the `try` block must also return. - if returns(finalbody) { - return true; - } - - // If the `body` or the `else` block returns, the `try` block must also return. - if (returns(body) || returns(orelse)) - && handlers.iter().all(|handler| { - let ExceptHandler::ExceptHandler(ast::ExceptHandlerExceptHandler { - body, - .. - }) = handler; - returns(body) - }) - { - return true; - } - } - Stmt::With(ast::StmtWith { body, .. }) => { - if returns(body) { - return true; - } - } - Stmt::Return(_) => return true, - Stmt::Raise(_) => return true, - _ => {} } + false } - false + + /// Returns `true` if the body may break via a `break` statement. + fn always_breaks(stmts: &[Stmt]) -> bool { + for stmt in stmts { + match stmt { + Stmt::Break(_) => return true, + Stmt::Return(_) => return false, + Stmt::Raise(_) => return false, + _ => {} + } + } + false + } + + /// Returns `true` if the body contains a branch that ends without an explicit `return` or + /// `raise` statement. + fn returns(stmts: &[Stmt]) -> Option { + for stmt in stmts.iter().rev() { + match stmt { + Stmt::For(ast::StmtFor { body, orelse, .. }) + | Stmt::While(ast::StmtWhile { body, orelse, .. }) => { + if always_breaks(body) { + return None; + } + if let Some(terminal) = returns(body) { + return Some(terminal); + } + if !sometimes_breaks(body) { + if let Some(terminal) = returns(orelse) { + return Some(terminal); + } + } + } + Stmt::If(ast::StmtIf { + body, + elif_else_clauses, + .. + }) => { + if elif_else_clauses.iter().any(|clause| clause.test.is_none()) { + match Terminal::combine(std::iter::once(returns(body)).chain( + elif_else_clauses.iter().map(|clause| returns(&clause.body)), + )) { + Some(Terminal::Raise) => return Some(Terminal::Raise), + Some(Terminal::Return) => return Some(Terminal::Return), + _ => {} + } + } + } + Stmt::Match(ast::StmtMatch { cases, .. }) => { + // Note: we assume the `match` is exhaustive. + match Terminal::combine(cases.iter().map(|case| returns(&case.body))) { + Some(Terminal::Raise) => return Some(Terminal::Raise), + Some(Terminal::Return) => return Some(Terminal::Return), + _ => {} + } + } + Stmt::Try(ast::StmtTry { + body, + handlers, + orelse, + finalbody, + .. + }) => { + // If the `finally` block returns, the `try` block must also return. + if let Some(terminal) = returns(finalbody) { + return Some(terminal); + } + + // If the body returns, the `try` block must also return. + if returns(body) == Some(Terminal::Return) { + return Some(Terminal::Return); + } + + // If the else block and all the handlers return, the `try` block must also + // return. + if let Some(terminal) = + Terminal::combine(std::iter::once(returns(orelse)).chain( + handlers.iter().map(|handler| { + let ExceptHandler::ExceptHandler( + ast::ExceptHandlerExceptHandler { body, .. }, + ) = handler; + returns(body) + }), + )) + { + return Some(terminal); + } + } + Stmt::With(ast::StmtWith { body, .. }) => { + if let Some(terminal) = returns(body) { + return Some(terminal); + } + } + Stmt::Return(_) => return Some(Terminal::Return), + Stmt::Raise(_) => return Some(Terminal::Raise), + _ => {} + } + } + None + } + + returns(&function.body) } - !returns(&function.body) + /// Combine a series of [`Terminal`] operators. + fn combine(iter: impl Iterator>) -> Option { + iter.reduce(|acc, terminal| match (acc, terminal) { + (Some(Self::Raise), Some(Self::Raise)) => Some(Self::Raise), + (Some(_), Some(Self::Return)) => Some(Self::Return), + (Some(Self::Return), Some(_)) => Some(Self::Return), + _ => None, + }) + .flatten() + } } /// A [`StatementVisitor`] that collects all `raise` statements in a function or method. @@ -1316,18 +1352,20 @@ impl Truthiness { Expr::NoneLiteral(_) => Self::Falsey, Expr::EllipsisLiteral(_) => Self::Truthy, Expr::FString(ast::ExprFString { value, .. }) => { - if value.parts().all(|part| match part { + if value.iter().all(|part| match part { ast::FStringPart::Literal(string_literal) => string_literal.is_empty(), - ast::FStringPart::FString(f_string) => f_string.values.is_empty(), + ast::FStringPart::FString(f_string) => f_string.elements.is_empty(), }) { Self::Falsey - } else if value.elements().any(|expr| { - if let Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) = &expr { - !value.is_empty() - } else { - false - } - }) { + } else if value + .elements() + .any(|f_string_element| match f_string_element { + ast::FStringElement::Literal(ast::FStringLiteralElement { + value, .. + }) => !value.is_empty(), + ast::FStringElement::Expression(_) => true, + }) + { Self::Truthy } else { Self::Unknown diff --git a/crates/ruff_python_ast/src/node.rs b/crates/ruff_python_ast/src/node.rs index e21a000d79..35a536004c 100644 --- a/crates/ruff_python_ast/src/node.rs +++ b/crates/ruff_python_ast/src/node.rs @@ -1,7 +1,7 @@ use crate::visitor::preorder::PreorderVisitor; use crate::{ self as ast, Alias, ArgOrKeyword, Arguments, Comprehension, Decorator, ExceptHandler, Expr, - Keyword, MatchCase, Mod, Parameter, ParameterWithDefault, Parameters, Pattern, + FStringElement, Keyword, MatchCase, Mod, Parameter, ParameterWithDefault, Parameters, Pattern, PatternArguments, PatternKeyword, Stmt, TypeParam, TypeParamParamSpec, TypeParamTypeVar, TypeParamTypeVarTuple, TypeParams, WithItem, }; @@ -71,7 +71,6 @@ pub enum AnyNode { ExprYieldFrom(ast::ExprYieldFrom), ExprCompare(ast::ExprCompare), ExprCall(ast::ExprCall), - ExprFormattedValue(ast::ExprFormattedValue), ExprFString(ast::ExprFString), ExprStringLiteral(ast::ExprStringLiteral), ExprBytesLiteral(ast::ExprBytesLiteral), @@ -88,6 +87,8 @@ pub enum AnyNode { ExprSlice(ast::ExprSlice), ExprIpyEscapeCommand(ast::ExprIpyEscapeCommand), ExceptHandlerExceptHandler(ast::ExceptHandlerExceptHandler), + FStringExpressionElement(ast::FStringExpressionElement), + FStringLiteralElement(ast::FStringLiteralElement), PatternMatchValue(ast::PatternMatchValue), PatternMatchSingleton(ast::PatternMatchSingleton), PatternMatchSequence(ast::PatternMatchSequence), @@ -166,7 +167,8 @@ impl AnyNode { | AnyNode::ExprYieldFrom(_) | AnyNode::ExprCompare(_) | AnyNode::ExprCall(_) - | AnyNode::ExprFormattedValue(_) + | AnyNode::FStringExpressionElement(_) + | AnyNode::FStringLiteralElement(_) | AnyNode::ExprFString(_) | AnyNode::ExprStringLiteral(_) | AnyNode::ExprBytesLiteral(_) @@ -233,7 +235,6 @@ impl AnyNode { AnyNode::ExprYieldFrom(node) => Some(Expr::YieldFrom(node)), AnyNode::ExprCompare(node) => Some(Expr::Compare(node)), AnyNode::ExprCall(node) => Some(Expr::Call(node)), - AnyNode::ExprFormattedValue(node) => Some(Expr::FormattedValue(node)), AnyNode::ExprFString(node) => Some(Expr::FString(node)), AnyNode::ExprStringLiteral(node) => Some(Expr::StringLiteral(node)), AnyNode::ExprBytesLiteral(node) => Some(Expr::BytesLiteral(node)), @@ -278,6 +279,8 @@ impl AnyNode { | AnyNode::StmtContinue(_) | AnyNode::StmtIpyEscapeCommand(_) | AnyNode::ExceptHandlerExceptHandler(_) + | AnyNode::FStringExpressionElement(_) + | AnyNode::FStringLiteralElement(_) | AnyNode::PatternMatchValue(_) | AnyNode::PatternMatchSingleton(_) | AnyNode::PatternMatchSequence(_) @@ -356,7 +359,8 @@ impl AnyNode { | AnyNode::ExprYieldFrom(_) | AnyNode::ExprCompare(_) | AnyNode::ExprCall(_) - | AnyNode::ExprFormattedValue(_) + | AnyNode::FStringExpressionElement(_) + | AnyNode::FStringLiteralElement(_) | AnyNode::ExprFString(_) | AnyNode::ExprStringLiteral(_) | AnyNode::ExprBytesLiteral(_) @@ -459,7 +463,8 @@ impl AnyNode { | AnyNode::ExprYieldFrom(_) | AnyNode::ExprCompare(_) | AnyNode::ExprCall(_) - | AnyNode::ExprFormattedValue(_) + | AnyNode::FStringExpressionElement(_) + | AnyNode::FStringLiteralElement(_) | AnyNode::ExprFString(_) | AnyNode::ExprStringLiteral(_) | AnyNode::ExprBytesLiteral(_) @@ -547,7 +552,8 @@ impl AnyNode { | AnyNode::ExprYieldFrom(_) | AnyNode::ExprCompare(_) | AnyNode::ExprCall(_) - | AnyNode::ExprFormattedValue(_) + | AnyNode::FStringExpressionElement(_) + | AnyNode::FStringLiteralElement(_) | AnyNode::ExprFString(_) | AnyNode::ExprStringLiteral(_) | AnyNode::ExprBytesLiteral(_) @@ -660,7 +666,8 @@ impl AnyNode { Self::ExprYieldFrom(node) => AnyNodeRef::ExprYieldFrom(node), Self::ExprCompare(node) => AnyNodeRef::ExprCompare(node), Self::ExprCall(node) => AnyNodeRef::ExprCall(node), - Self::ExprFormattedValue(node) => AnyNodeRef::ExprFormattedValue(node), + Self::FStringExpressionElement(node) => AnyNodeRef::FStringExpressionElement(node), + Self::FStringLiteralElement(node) => AnyNodeRef::FStringLiteralElement(node), Self::ExprFString(node) => AnyNodeRef::ExprFString(node), Self::ExprStringLiteral(node) => AnyNodeRef::ExprStringLiteral(node), Self::ExprBytesLiteral(node) => AnyNodeRef::ExprBytesLiteral(node), @@ -2621,12 +2628,12 @@ impl AstNode for ast::ExprCall { visitor.visit_arguments(arguments); } } -impl AstNode for ast::ExprFormattedValue { +impl AstNode for ast::FStringExpressionElement { fn cast(kind: AnyNode) -> Option where Self: Sized, { - if let AnyNode::ExprFormattedValue(node) = kind { + if let AnyNode::FStringExpressionElement(node) = kind { Some(node) } else { None @@ -2634,7 +2641,7 @@ impl AstNode for ast::ExprFormattedValue { } fn cast_ref(kind: AnyNodeRef) -> Option<&Self> { - if let AnyNodeRef::ExprFormattedValue(node) = kind { + if let AnyNodeRef::FStringExpressionElement(node) = kind { Some(node) } else { None @@ -2653,16 +2660,54 @@ impl AstNode for ast::ExprFormattedValue { where V: PreorderVisitor<'a> + ?Sized, { - let ast::ExprFormattedValue { - value, format_spec, .. + let ast::FStringExpressionElement { + expression, + format_spec, + .. } = self; - visitor.visit_expr(value); + visitor.visit_expr(expression); - if let Some(expr) = format_spec { - visitor.visit_format_spec(expr); + if let Some(format_spec) = format_spec { + for spec_part in &format_spec.elements { + visitor.visit_f_string_element(spec_part); + } } } } +impl AstNode for ast::FStringLiteralElement { + fn cast(kind: AnyNode) -> Option + where + Self: Sized, + { + if let AnyNode::FStringLiteralElement(node) = kind { + Some(node) + } else { + None + } + } + + fn cast_ref(kind: AnyNodeRef) -> Option<&Self> { + if let AnyNodeRef::FStringLiteralElement(node) = kind { + Some(node) + } else { + None + } + } + + fn as_any_node_ref(&self) -> AnyNodeRef { + AnyNodeRef::from(self) + } + + fn into_any_node(self) -> AnyNode { + AnyNode::from(self) + } + + fn visit_preorder<'a, V>(&'a self, _visitor: &mut V) + where + V: PreorderVisitor<'a> + ?Sized, + { + } +} impl AstNode for ast::ExprFString { fn cast(kind: AnyNode) -> Option where @@ -2697,7 +2742,7 @@ impl AstNode for ast::ExprFString { { let ast::ExprFString { value, range: _ } = self; - for f_string_part in value.parts() { + for f_string_part in value { match f_string_part { ast::FStringPart::Literal(string_literal) => { visitor.visit_string_literal(string_literal); @@ -2743,7 +2788,7 @@ impl AstNode for ast::ExprStringLiteral { { let ast::ExprStringLiteral { value, range: _ } = self; - for string_literal in value.parts() { + for string_literal in value { visitor.visit_string_literal(string_literal); } } @@ -2782,7 +2827,7 @@ impl AstNode for ast::ExprBytesLiteral { { let ast::ExprBytesLiteral { value, range: _ } = self; - for bytes_literal in value.parts() { + for bytes_literal in value { visitor.visit_bytes_literal(bytes_literal); } } @@ -4339,10 +4384,10 @@ impl AstNode for ast::FString { where V: PreorderVisitor<'a> + ?Sized, { - let ast::FString { values, range: _ } = self; + let ast::FString { elements, range: _ } = self; - for expr in values { - visitor.visit_expr(expr); + for fstring_element in elements { + visitor.visit_f_string_element(fstring_element); } } } @@ -4467,7 +4512,6 @@ impl From for AnyNode { Expr::YieldFrom(node) => AnyNode::ExprYieldFrom(node), Expr::Compare(node) => AnyNode::ExprCompare(node), Expr::Call(node) => AnyNode::ExprCall(node), - Expr::FormattedValue(node) => AnyNode::ExprFormattedValue(node), Expr::FString(node) => AnyNode::ExprFString(node), Expr::StringLiteral(node) => AnyNode::ExprStringLiteral(node), Expr::BytesLiteral(node) => AnyNode::ExprBytesLiteral(node), @@ -4496,6 +4540,15 @@ impl From for AnyNode { } } +impl From for AnyNode { + fn from(element: FStringElement) -> Self { + match element { + FStringElement::Literal(node) => AnyNode::FStringLiteralElement(node), + FStringElement::Expression(node) => AnyNode::FStringExpressionElement(node), + } + } +} + impl From for AnyNode { fn from(pattern: Pattern) -> Self { match pattern { @@ -4789,9 +4842,15 @@ impl From for AnyNode { } } -impl From for AnyNode { - fn from(node: ast::ExprFormattedValue) -> Self { - AnyNode::ExprFormattedValue(node) +impl From for AnyNode { + fn from(node: ast::FStringExpressionElement) -> Self { + AnyNode::FStringExpressionElement(node) + } +} + +impl From for AnyNode { + fn from(node: ast::FStringLiteralElement) -> Self { + AnyNode::FStringLiteralElement(node) } } @@ -5089,7 +5148,8 @@ impl Ranged for AnyNode { AnyNode::ExprYieldFrom(node) => node.range(), AnyNode::ExprCompare(node) => node.range(), AnyNode::ExprCall(node) => node.range(), - AnyNode::ExprFormattedValue(node) => node.range(), + AnyNode::FStringExpressionElement(node) => node.range(), + AnyNode::FStringLiteralElement(node) => node.range(), AnyNode::ExprFString(node) => node.range(), AnyNode::ExprStringLiteral(node) => node.range(), AnyNode::ExprBytesLiteral(node) => node.range(), @@ -5184,7 +5244,8 @@ pub enum AnyNodeRef<'a> { ExprYieldFrom(&'a ast::ExprYieldFrom), ExprCompare(&'a ast::ExprCompare), ExprCall(&'a ast::ExprCall), - ExprFormattedValue(&'a ast::ExprFormattedValue), + FStringExpressionElement(&'a ast::FStringExpressionElement), + FStringLiteralElement(&'a ast::FStringLiteralElement), ExprFString(&'a ast::ExprFString), ExprStringLiteral(&'a ast::ExprStringLiteral), ExprBytesLiteral(&'a ast::ExprBytesLiteral), @@ -5278,7 +5339,8 @@ impl<'a> AnyNodeRef<'a> { AnyNodeRef::ExprYieldFrom(node) => NonNull::from(*node).cast(), AnyNodeRef::ExprCompare(node) => NonNull::from(*node).cast(), AnyNodeRef::ExprCall(node) => NonNull::from(*node).cast(), - AnyNodeRef::ExprFormattedValue(node) => NonNull::from(*node).cast(), + AnyNodeRef::FStringExpressionElement(node) => NonNull::from(*node).cast(), + AnyNodeRef::FStringLiteralElement(node) => NonNull::from(*node).cast(), AnyNodeRef::ExprFString(node) => NonNull::from(*node).cast(), AnyNodeRef::ExprStringLiteral(node) => NonNull::from(*node).cast(), AnyNodeRef::ExprBytesLiteral(node) => NonNull::from(*node).cast(), @@ -5378,7 +5440,8 @@ impl<'a> AnyNodeRef<'a> { AnyNodeRef::ExprYieldFrom(_) => NodeKind::ExprYieldFrom, AnyNodeRef::ExprCompare(_) => NodeKind::ExprCompare, AnyNodeRef::ExprCall(_) => NodeKind::ExprCall, - AnyNodeRef::ExprFormattedValue(_) => NodeKind::ExprFormattedValue, + AnyNodeRef::FStringExpressionElement(_) => NodeKind::FStringExpressionElement, + AnyNodeRef::FStringLiteralElement(_) => NodeKind::FStringLiteralElement, AnyNodeRef::ExprFString(_) => NodeKind::ExprFString, AnyNodeRef::ExprStringLiteral(_) => NodeKind::ExprStringLiteral, AnyNodeRef::ExprBytesLiteral(_) => NodeKind::ExprBytesLiteral, @@ -5473,7 +5536,8 @@ impl<'a> AnyNodeRef<'a> { | AnyNodeRef::ExprYieldFrom(_) | AnyNodeRef::ExprCompare(_) | AnyNodeRef::ExprCall(_) - | AnyNodeRef::ExprFormattedValue(_) + | AnyNodeRef::FStringExpressionElement(_) + | AnyNodeRef::FStringLiteralElement(_) | AnyNodeRef::ExprFString(_) | AnyNodeRef::ExprStringLiteral(_) | AnyNodeRef::ExprBytesLiteral(_) @@ -5540,7 +5604,6 @@ impl<'a> AnyNodeRef<'a> { | AnyNodeRef::ExprYieldFrom(_) | AnyNodeRef::ExprCompare(_) | AnyNodeRef::ExprCall(_) - | AnyNodeRef::ExprFormattedValue(_) | AnyNodeRef::ExprFString(_) | AnyNodeRef::ExprStringLiteral(_) | AnyNodeRef::ExprBytesLiteral(_) @@ -5585,6 +5648,8 @@ impl<'a> AnyNodeRef<'a> { | AnyNodeRef::StmtContinue(_) | AnyNodeRef::StmtIpyEscapeCommand(_) | AnyNodeRef::ExceptHandlerExceptHandler(_) + | AnyNodeRef::FStringExpressionElement(_) + | AnyNodeRef::FStringLiteralElement(_) | AnyNodeRef::PatternMatchValue(_) | AnyNodeRef::PatternMatchSingleton(_) | AnyNodeRef::PatternMatchSequence(_) @@ -5662,7 +5727,8 @@ impl<'a> AnyNodeRef<'a> { | AnyNodeRef::ExprYieldFrom(_) | AnyNodeRef::ExprCompare(_) | AnyNodeRef::ExprCall(_) - | AnyNodeRef::ExprFormattedValue(_) + | AnyNodeRef::FStringExpressionElement(_) + | AnyNodeRef::FStringLiteralElement(_) | AnyNodeRef::ExprFString(_) | AnyNodeRef::ExprStringLiteral(_) | AnyNodeRef::ExprBytesLiteral(_) @@ -5765,7 +5831,8 @@ impl<'a> AnyNodeRef<'a> { | AnyNodeRef::ExprYieldFrom(_) | AnyNodeRef::ExprCompare(_) | AnyNodeRef::ExprCall(_) - | AnyNodeRef::ExprFormattedValue(_) + | AnyNodeRef::FStringExpressionElement(_) + | AnyNodeRef::FStringLiteralElement(_) | AnyNodeRef::ExprFString(_) | AnyNodeRef::ExprStringLiteral(_) | AnyNodeRef::ExprBytesLiteral(_) @@ -5853,7 +5920,8 @@ impl<'a> AnyNodeRef<'a> { | AnyNodeRef::ExprYieldFrom(_) | AnyNodeRef::ExprCompare(_) | AnyNodeRef::ExprCall(_) - | AnyNodeRef::ExprFormattedValue(_) + | AnyNodeRef::FStringExpressionElement(_) + | AnyNodeRef::FStringLiteralElement(_) | AnyNodeRef::ExprFString(_) | AnyNodeRef::ExprStringLiteral(_) | AnyNodeRef::ExprBytesLiteral(_) @@ -5975,7 +6043,8 @@ impl<'a> AnyNodeRef<'a> { AnyNodeRef::ExprYieldFrom(node) => node.visit_preorder(visitor), AnyNodeRef::ExprCompare(node) => node.visit_preorder(visitor), AnyNodeRef::ExprCall(node) => node.visit_preorder(visitor), - AnyNodeRef::ExprFormattedValue(node) => node.visit_preorder(visitor), + AnyNodeRef::FStringExpressionElement(node) => node.visit_preorder(visitor), + AnyNodeRef::FStringLiteralElement(node) => node.visit_preorder(visitor), AnyNodeRef::ExprFString(node) => node.visit_preorder(visitor), AnyNodeRef::ExprStringLiteral(node) => node.visit_preorder(visitor), AnyNodeRef::ExprBytesLiteral(node) => node.visit_preorder(visitor), @@ -6354,9 +6423,15 @@ impl<'a> From<&'a ast::ExprCall> for AnyNodeRef<'a> { } } -impl<'a> From<&'a ast::ExprFormattedValue> for AnyNodeRef<'a> { - fn from(node: &'a ast::ExprFormattedValue) -> Self { - AnyNodeRef::ExprFormattedValue(node) +impl<'a> From<&'a ast::FStringExpressionElement> for AnyNodeRef<'a> { + fn from(node: &'a ast::FStringExpressionElement) -> Self { + AnyNodeRef::FStringExpressionElement(node) + } +} + +impl<'a> From<&'a ast::FStringLiteralElement> for AnyNodeRef<'a> { + fn from(node: &'a ast::FStringLiteralElement) -> Self { + AnyNodeRef::FStringLiteralElement(node) } } @@ -6615,7 +6690,6 @@ impl<'a> From<&'a Expr> for AnyNodeRef<'a> { Expr::YieldFrom(node) => AnyNodeRef::ExprYieldFrom(node), Expr::Compare(node) => AnyNodeRef::ExprCompare(node), Expr::Call(node) => AnyNodeRef::ExprCall(node), - Expr::FormattedValue(node) => AnyNodeRef::ExprFormattedValue(node), Expr::FString(node) => AnyNodeRef::ExprFString(node), Expr::StringLiteral(node) => AnyNodeRef::ExprStringLiteral(node), Expr::BytesLiteral(node) => AnyNodeRef::ExprBytesLiteral(node), @@ -6644,6 +6718,15 @@ impl<'a> From<&'a Mod> for AnyNodeRef<'a> { } } +impl<'a> From<&'a FStringElement> for AnyNodeRef<'a> { + fn from(element: &'a FStringElement) -> Self { + match element { + FStringElement::Expression(node) => AnyNodeRef::FStringExpressionElement(node), + FStringElement::Literal(node) => AnyNodeRef::FStringLiteralElement(node), + } + } +} + impl<'a> From<&'a Pattern> for AnyNodeRef<'a> { fn from(pattern: &'a Pattern) -> Self { match pattern { @@ -6772,7 +6855,8 @@ impl Ranged for AnyNodeRef<'_> { AnyNodeRef::ExprYieldFrom(node) => node.range(), AnyNodeRef::ExprCompare(node) => node.range(), AnyNodeRef::ExprCall(node) => node.range(), - AnyNodeRef::ExprFormattedValue(node) => node.range(), + AnyNodeRef::FStringExpressionElement(node) => node.range(), + AnyNodeRef::FStringLiteralElement(node) => node.range(), AnyNodeRef::ExprFString(node) => node.range(), AnyNodeRef::ExprStringLiteral(node) => node.range(), AnyNodeRef::ExprBytesLiteral(node) => node.range(), @@ -6869,7 +6953,8 @@ pub enum NodeKind { ExprYieldFrom, ExprCompare, ExprCall, - ExprFormattedValue, + FStringExpressionElement, + FStringLiteralElement, ExprFString, ExprStringLiteral, ExprBytesLiteral, diff --git a/crates/ruff_python_ast/src/nodes.rs b/crates/ruff_python_ast/src/nodes.rs index eaf9881a37..f24a8063e0 100644 --- a/crates/ruff_python_ast/src/nodes.rs +++ b/crates/ruff_python_ast/src/nodes.rs @@ -4,8 +4,8 @@ use std::cell::OnceCell; use std::fmt; use std::fmt::Debug; use std::ops::Deref; +use std::slice::{Iter, IterMut}; -use itertools::Either::{Left, Right}; use itertools::Itertools; use ruff_text_size::{Ranged, TextRange, TextSize}; @@ -590,8 +590,6 @@ pub enum Expr { Compare(ExprCompare), #[is(name = "call_expr")] Call(ExprCall), - #[is(name = "formatted_value_expr")] - FormattedValue(ExprFormattedValue), #[is(name = "f_string_expr")] FString(ExprFString), #[is(name = "string_literal_expr")] @@ -919,19 +917,51 @@ impl From for Expr { } } -/// See also [FormattedValue](https://docs.python.org/3/library/ast.html#ast.FormattedValue) #[derive(Clone, Debug, PartialEq)] -pub struct ExprFormattedValue { +pub struct FStringFormatSpec { pub range: TextRange, - pub value: Box, - pub debug_text: Option, - pub conversion: ConversionFlag, - pub format_spec: Option>, + pub elements: Vec, } -impl From for Expr { - fn from(payload: ExprFormattedValue) -> Self { - Expr::FormattedValue(payload) +impl Ranged for FStringFormatSpec { + fn range(&self) -> TextRange { + self.range + } +} + +/// See also [FormattedValue](https://docs.python.org/3/library/ast.html#ast.FormattedValue) +#[derive(Clone, Debug, PartialEq)] +pub struct FStringExpressionElement { + pub range: TextRange, + pub expression: Box, + pub debug_text: Option, + pub conversion: ConversionFlag, + pub format_spec: Option>, +} + +impl Ranged for FStringExpressionElement { + fn range(&self) -> TextRange { + self.range + } +} + +#[derive(Clone, Debug, PartialEq)] +pub struct FStringLiteralElement { + pub range: TextRange, + pub value: String, +} + +impl Ranged for FStringLiteralElement { + fn range(&self) -> TextRange { + self.range + } +} + +impl Deref for FStringLiteralElement { + type Target = str; + + fn deref(&self) -> &Self::Target { + self.value.as_str() } } @@ -1021,21 +1051,31 @@ impl FStringValue { matches!(self.inner, FStringValueInner::Concatenated(_)) } - /// Returns an iterator over all the [`FStringPart`]s contained in this value. - pub fn parts(&self) -> impl Iterator { + /// Returns a slice of all the [`FStringPart`]s contained in this value. + pub fn as_slice(&self) -> &[FStringPart] { match &self.inner { - FStringValueInner::Single(part) => Left(std::iter::once(part)), - FStringValueInner::Concatenated(parts) => Right(parts.iter()), + FStringValueInner::Single(part) => std::slice::from_ref(part), + FStringValueInner::Concatenated(parts) => parts, } } + /// Returns a mutable slice of all the [`FStringPart`]s contained in this value. + fn as_mut_slice(&mut self) -> &mut [FStringPart] { + match &mut self.inner { + FStringValueInner::Single(part) => std::slice::from_mut(part), + FStringValueInner::Concatenated(parts) => parts, + } + } + + /// Returns an iterator over all the [`FStringPart`]s contained in this value. + pub fn iter(&self) -> Iter { + self.as_slice().iter() + } + /// Returns an iterator over all the [`FStringPart`]s contained in this value /// that allows modification. - pub(crate) fn parts_mut(&mut self) -> impl Iterator { - match &mut self.inner { - FStringValueInner::Single(part) => Left(std::iter::once(part)), - FStringValueInner::Concatenated(parts) => Right(parts.iter_mut()), - } + pub(crate) fn iter_mut(&mut self) -> IterMut { + self.as_mut_slice().iter_mut() } /// Returns an iterator over the [`StringLiteral`] parts contained in this value. @@ -1048,7 +1088,7 @@ impl FStringValue { /// /// Here, the string literal parts returned would be `"foo"` and `"baz"`. pub fn literals(&self) -> impl Iterator { - self.parts().filter_map(|part| part.as_literal()) + self.iter().filter_map(|part| part.as_literal()) } /// Returns an iterator over the [`FString`] parts contained in this value. @@ -1061,10 +1101,10 @@ impl FStringValue { /// /// Here, the f-string parts returned would be `f"bar {x}"` and `f"qux"`. pub fn f_strings(&self) -> impl Iterator { - self.parts().filter_map(|part| part.as_f_string()) + self.iter().filter_map(|part| part.as_f_string()) } - /// Returns an iterator over all the f-string elements contained in this value. + /// Returns an iterator over all the [`FStringElement`] contained in this value. /// /// An f-string element is what makes up an [`FString`] i.e., it is either a /// string literal or an expression. In the following example, @@ -1075,8 +1115,17 @@ impl FStringValue { /// /// The f-string elements returned would be string literal (`"bar "`), /// expression (`x`) and string literal (`"qux"`). - pub fn elements(&self) -> impl Iterator { - self.f_strings().flat_map(|fstring| fstring.values.iter()) + pub fn elements(&self) -> impl Iterator { + self.f_strings().flat_map(|fstring| fstring.elements.iter()) + } +} + +impl<'a> IntoIterator for &'a FStringValue { + type Item = &'a FStringPart; + type IntoIter = Iter<'a, FStringPart>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() } } @@ -1113,7 +1162,7 @@ impl Ranged for FStringPart { #[derive(Clone, Debug, PartialEq)] pub struct FString { pub range: TextRange, - pub values: Vec, + pub elements: Vec, } impl Ranged for FString { @@ -1132,6 +1181,21 @@ impl From for Expr { } } +#[derive(Clone, Debug, PartialEq, is_macro::Is)] +pub enum FStringElement { + Literal(FStringLiteralElement), + Expression(FStringExpressionElement), +} + +impl Ranged for FStringElement { + fn range(&self) -> TextRange { + match self { + FStringElement::Literal(node) => node.range(), + FStringElement::Expression(node) => node.range(), + } + } +} + /// An AST node that represents either a single string literal or an implicitly /// concatenated string literals. #[derive(Clone, Debug, Default, PartialEq)] @@ -1193,24 +1257,34 @@ impl StringLiteralValue { /// For an implicitly concatenated string, it returns `true` only if the first /// string literal is a unicode string. pub fn is_unicode(&self) -> bool { - self.parts().next().map_or(false, |part| part.unicode) + self.iter().next().map_or(false, |part| part.unicode) + } + + /// Returns a slice of all the [`StringLiteral`] parts contained in this value. + pub fn as_slice(&self) -> &[StringLiteral] { + match &self.inner { + StringLiteralValueInner::Single(value) => std::slice::from_ref(value), + StringLiteralValueInner::Concatenated(value) => value.strings.as_slice(), + } + } + + /// Returns a mutable slice of all the [`StringLiteral`] parts contained in this value. + fn as_mut_slice(&mut self) -> &mut [StringLiteral] { + match &mut self.inner { + StringLiteralValueInner::Single(value) => std::slice::from_mut(value), + StringLiteralValueInner::Concatenated(value) => value.strings.as_mut_slice(), + } } /// Returns an iterator over all the [`StringLiteral`] parts contained in this value. - pub fn parts(&self) -> impl Iterator { - match &self.inner { - StringLiteralValueInner::Single(value) => Left(std::iter::once(value)), - StringLiteralValueInner::Concatenated(value) => Right(value.strings.iter()), - } + pub fn iter(&self) -> Iter { + self.as_slice().iter() } /// Returns an iterator over all the [`StringLiteral`] parts contained in this value /// that allows modification. - pub(crate) fn parts_mut(&mut self) -> impl Iterator { - match &mut self.inner { - StringLiteralValueInner::Single(value) => Left(std::iter::once(value)), - StringLiteralValueInner::Concatenated(value) => Right(value.strings.iter_mut()), - } + pub(crate) fn iter_mut(&mut self) -> IterMut { + self.as_mut_slice().iter_mut() } /// Returns `true` if the string literal value is empty. @@ -1221,12 +1295,12 @@ impl StringLiteralValue { /// Returns the total length of the string literal value, in bytes, not /// [`char`]s or graphemes. pub fn len(&self) -> usize { - self.parts().fold(0, |acc, part| acc + part.value.len()) + self.iter().fold(0, |acc, part| acc + part.value.len()) } /// Returns an iterator over the [`char`]s of each string literal part. pub fn chars(&self) -> impl Iterator + '_ { - self.parts().flat_map(|part| part.value.chars()) + self.iter().flat_map(|part| part.value.chars()) } /// Returns the concatenated string value as a [`str`]. @@ -1241,6 +1315,15 @@ impl StringLiteralValue { } } +impl<'a> IntoIterator for &'a StringLiteralValue { + type Item = &'a StringLiteral; + type IntoIter = Iter<'a, StringLiteral>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + impl PartialEq for StringLiteralValue { fn eq(&self, other: &str) -> bool { if self.len() != other.len() { @@ -1412,37 +1495,55 @@ impl BytesLiteralValue { matches!(self.inner, BytesLiteralValueInner::Concatenated(_)) } - /// Returns an iterator over all the [`BytesLiteral`] parts contained in this value. - pub fn parts(&self) -> impl Iterator { + /// Returns a slice of all the [`BytesLiteral`] parts contained in this value. + pub fn as_slice(&self) -> &[BytesLiteral] { match &self.inner { - BytesLiteralValueInner::Single(value) => Left(std::iter::once(value)), - BytesLiteralValueInner::Concatenated(values) => Right(values.iter()), + BytesLiteralValueInner::Single(value) => std::slice::from_ref(value), + BytesLiteralValueInner::Concatenated(value) => value.as_slice(), } } + /// Returns a mutable slice of all the [`BytesLiteral`] parts contained in this value. + fn as_mut_slice(&mut self) -> &mut [BytesLiteral] { + match &mut self.inner { + BytesLiteralValueInner::Single(value) => std::slice::from_mut(value), + BytesLiteralValueInner::Concatenated(value) => value.as_mut_slice(), + } + } + + /// Returns an iterator over all the [`BytesLiteral`] parts contained in this value. + pub fn iter(&self) -> Iter { + self.as_slice().iter() + } + /// Returns an iterator over all the [`BytesLiteral`] parts contained in this value /// that allows modification. - pub(crate) fn parts_mut(&mut self) -> impl Iterator { - match &mut self.inner { - BytesLiteralValueInner::Single(value) => Left(std::iter::once(value)), - BytesLiteralValueInner::Concatenated(values) => Right(values.iter_mut()), - } + pub(crate) fn iter_mut(&mut self) -> IterMut { + self.as_mut_slice().iter_mut() } /// Returns `true` if the concatenated bytes has a length of zero. pub fn is_empty(&self) -> bool { - self.parts().all(|part| part.is_empty()) + self.iter().all(|part| part.is_empty()) } /// Returns the length of the concatenated bytes. pub fn len(&self) -> usize { - self.parts().map(|part| part.len()).sum() + self.iter().map(|part| part.len()).sum() } /// Returns an iterator over the bytes of the concatenated bytes. fn bytes(&self) -> impl Iterator + '_ { - self.parts() - .flat_map(|part| part.as_slice().iter().copied()) + self.iter().flat_map(|part| part.as_slice().iter().copied()) + } +} + +impl<'a> IntoIterator for &'a BytesLiteralValue { + type Item = &'a BytesLiteral; + type IntoIter = Iter<'a, BytesLiteral>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() } } @@ -3483,11 +3584,6 @@ impl Ranged for crate::nodes::ExprCall { self.range } } -impl Ranged for crate::nodes::ExprFormattedValue { - fn range(&self) -> TextRange { - self.range - } -} impl Ranged for crate::nodes::ExprFString { fn range(&self) -> TextRange { self.range @@ -3553,7 +3649,6 @@ impl Ranged for crate::Expr { Self::YieldFrom(node) => node.range(), Self::Compare(node) => node.range(), Self::Call(node) => node.range(), - Self::FormattedValue(node) => node.range(), Self::FString(node) => node.range(), Self::StringLiteral(node) => node.range(), Self::BytesLiteral(node) => node.range(), @@ -3729,204 +3824,6 @@ impl Ranged for crate::nodes::ParameterWithDefault { } } -/// An expression that may be parenthesized. -#[derive(Clone, Debug)] -pub struct ParenthesizedExpr { - /// The range of the expression, including any parentheses. - pub range: TextRange, - /// The underlying expression. - pub expr: Expr, -} -impl ParenthesizedExpr { - /// Returns `true` if the expression is may be parenthesized. - pub fn is_parenthesized(&self) -> bool { - self.range != self.expr.range() - } -} -impl Ranged for ParenthesizedExpr { - fn range(&self) -> TextRange { - self.range - } -} -impl From for ParenthesizedExpr { - fn from(expr: Expr) -> Self { - ParenthesizedExpr { - range: expr.range(), - expr, - } - } -} -impl From for Expr { - fn from(parenthesized_expr: ParenthesizedExpr) -> Self { - parenthesized_expr.expr - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprIpyEscapeCommand) -> Self { - Expr::IpyEscapeCommand(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprBoolOp) -> Self { - Expr::BoolOp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprNamedExpr) -> Self { - Expr::NamedExpr(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprBinOp) -> Self { - Expr::BinOp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprUnaryOp) -> Self { - Expr::UnaryOp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprLambda) -> Self { - Expr::Lambda(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprIfExp) -> Self { - Expr::IfExp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprDict) -> Self { - Expr::Dict(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprSet) -> Self { - Expr::Set(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprListComp) -> Self { - Expr::ListComp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprSetComp) -> Self { - Expr::SetComp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprDictComp) -> Self { - Expr::DictComp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprGeneratorExp) -> Self { - Expr::GeneratorExp(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprAwait) -> Self { - Expr::Await(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprYield) -> Self { - Expr::Yield(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprYieldFrom) -> Self { - Expr::YieldFrom(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprCompare) -> Self { - Expr::Compare(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprCall) -> Self { - Expr::Call(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprFormattedValue) -> Self { - Expr::FormattedValue(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprFString) -> Self { - Expr::FString(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprStringLiteral) -> Self { - Expr::StringLiteral(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprBytesLiteral) -> Self { - Expr::BytesLiteral(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprNumberLiteral) -> Self { - Expr::NumberLiteral(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprBooleanLiteral) -> Self { - Expr::BooleanLiteral(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprNoneLiteral) -> Self { - Expr::NoneLiteral(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprEllipsisLiteral) -> Self { - Expr::EllipsisLiteral(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprAttribute) -> Self { - Expr::Attribute(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprSubscript) -> Self { - Expr::Subscript(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprStarred) -> Self { - Expr::Starred(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprName) -> Self { - Expr::Name(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprList) -> Self { - Expr::List(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprTuple) -> Self { - Expr::Tuple(payload).into() - } -} -impl From for ParenthesizedExpr { - fn from(payload: ExprSlice) -> Self { - Expr::Slice(payload).into() - } -} - #[cfg(target_pointer_width = "64")] mod size_assertions { use static_assertions::assert_eq_size; diff --git a/crates/ruff_python_ast/src/relocate.rs b/crates/ruff_python_ast/src/relocate.rs index 19a126b356..5c189fb4c6 100644 --- a/crates/ruff_python_ast/src/relocate.rs +++ b/crates/ruff_python_ast/src/relocate.rs @@ -68,9 +68,6 @@ impl Transformer for Relocator { Expr::Call(nodes::ExprCall { range, .. }) => { *range = self.range; } - Expr::FormattedValue(nodes::ExprFormattedValue { range, .. }) => { - *range = self.range; - } Expr::FString(nodes::ExprFString { range, .. }) => { *range = self.range; } diff --git a/crates/ruff_python_ast/src/visitor.rs b/crates/ruff_python_ast/src/visitor.rs index 3a0781f072..2d8773fcfd 100644 --- a/crates/ruff_python_ast/src/visitor.rs +++ b/crates/ruff_python_ast/src/visitor.rs @@ -5,9 +5,9 @@ pub mod transformer; use crate::{ self as ast, Alias, Arguments, BoolOp, BytesLiteral, CmpOp, Comprehension, Decorator, - ElifElseClause, ExceptHandler, Expr, ExprContext, FString, FStringPart, Keyword, MatchCase, - Operator, Parameter, Parameters, Pattern, PatternArguments, PatternKeyword, Stmt, - StringLiteral, TypeParam, TypeParamTypeVar, TypeParams, UnaryOp, WithItem, + ElifElseClause, ExceptHandler, Expr, ExprContext, FString, FStringElement, FStringPart, + Keyword, MatchCase, Operator, Parameter, Parameters, Pattern, PatternArguments, PatternKeyword, + Stmt, StringLiteral, TypeParam, TypeParamTypeVar, TypeParams, UnaryOp, WithItem, }; /// A trait for AST visitors. Visits all nodes in the AST recursively in evaluation-order. @@ -53,9 +53,6 @@ pub trait Visitor<'a> { fn visit_except_handler(&mut self, except_handler: &'a ExceptHandler) { walk_except_handler(self, except_handler); } - fn visit_format_spec(&mut self, format_spec: &'a Expr) { - walk_format_spec(self, format_spec); - } fn visit_arguments(&mut self, arguments: &'a Arguments) { walk_arguments(self, arguments); } @@ -101,6 +98,9 @@ pub trait Visitor<'a> { fn visit_f_string(&mut self, f_string: &'a FString) { walk_f_string(self, f_string); } + fn visit_f_string_element(&mut self, f_string_element: &'a FStringElement) { + walk_f_string_element(self, f_string_element); + } fn visit_string_literal(&mut self, string_literal: &'a StringLiteral) { walk_string_literal(self, string_literal); } @@ -476,16 +476,8 @@ pub fn walk_expr<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, expr: &'a Expr) { visitor.visit_expr(func); visitor.visit_arguments(arguments); } - Expr::FormattedValue(ast::ExprFormattedValue { - value, format_spec, .. - }) => { - visitor.visit_expr(value); - if let Some(expr) = format_spec { - visitor.visit_format_spec(expr); - } - } Expr::FString(ast::ExprFString { value, .. }) => { - for part in value.parts() { + for part in value { match part { FStringPart::Literal(string_literal) => { visitor.visit_string_literal(string_literal); @@ -495,12 +487,12 @@ pub fn walk_expr<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, expr: &'a Expr) { } } Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => { - for string_literal in value.parts() { + for string_literal in value { visitor.visit_string_literal(string_literal); } } Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => { - for bytes_literal in value.parts() { + for bytes_literal in value { visitor.visit_bytes_literal(bytes_literal); } } @@ -598,16 +590,6 @@ pub fn walk_except_handler<'a, V: Visitor<'a> + ?Sized>( } } -pub fn walk_f_string<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, f_string: &'a FString) { - for expr in &f_string.values { - visitor.visit_expr(expr); - } -} - -pub fn walk_format_spec<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, format_spec: &'a Expr) { - visitor.visit_expr(format_spec); -} - pub fn walk_arguments<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, arguments: &'a Arguments) { // Note that the there might be keywords before the last arg, e.g. in // f(*args, a=2, *args2, **kwargs)`, but we follow Python in evaluating first `args` and then @@ -757,6 +739,31 @@ pub fn walk_pattern_keyword<'a, V: Visitor<'a> + ?Sized>( visitor.visit_pattern(&pattern_keyword.pattern); } +pub fn walk_f_string<'a, V: Visitor<'a> + ?Sized>(visitor: &mut V, f_string: &'a FString) { + for f_string_element in &f_string.elements { + visitor.visit_f_string_element(f_string_element); + } +} + +pub fn walk_f_string_element<'a, V: Visitor<'a> + ?Sized>( + visitor: &mut V, + f_string_element: &'a FStringElement, +) { + if let ast::FStringElement::Expression(ast::FStringExpressionElement { + expression, + format_spec, + .. + }) = f_string_element + { + visitor.visit_expr(expression); + if let Some(format_spec) = format_spec { + for spec_element in &format_spec.elements { + visitor.visit_f_string_element(spec_element); + } + } + } +} + pub fn walk_expr_context<'a, V: Visitor<'a> + ?Sized>( _visitor: &V, _expr_context: &'a ExprContext, diff --git a/crates/ruff_python_ast/src/visitor/preorder.rs b/crates/ruff_python_ast/src/visitor/preorder.rs index 5b2e3ad793..d560cb5fb0 100644 --- a/crates/ruff_python_ast/src/visitor/preorder.rs +++ b/crates/ruff_python_ast/src/visitor/preorder.rs @@ -1,6 +1,6 @@ use crate::{ Alias, Arguments, BoolOp, BytesLiteral, CmpOp, Comprehension, Decorator, ElifElseClause, - ExceptHandler, Expr, FString, Keyword, MatchCase, Mod, Operator, Parameter, + ExceptHandler, Expr, FString, FStringElement, Keyword, MatchCase, Mod, Operator, Parameter, ParameterWithDefault, Parameters, Pattern, PatternArguments, PatternKeyword, Singleton, Stmt, StringLiteral, TypeParam, TypeParams, UnaryOp, WithItem, }; @@ -74,11 +74,6 @@ pub trait PreorderVisitor<'a> { walk_except_handler(self, except_handler); } - #[inline] - fn visit_format_spec(&mut self, format_spec: &'a Expr) { - walk_format_spec(self, format_spec); - } - #[inline] fn visit_arguments(&mut self, arguments: &'a Arguments) { walk_arguments(self, arguments); @@ -139,7 +134,6 @@ pub trait PreorderVisitor<'a> { } #[inline] - fn visit_pattern_keyword(&mut self, pattern_keyword: &'a PatternKeyword) { walk_pattern_keyword(self, pattern_keyword); } @@ -159,6 +153,11 @@ pub trait PreorderVisitor<'a> { walk_f_string(self, f_string); } + #[inline] + fn visit_f_string_element(&mut self, f_string_element: &'a FStringElement) { + walk_f_string_element(self, f_string_element); + } + #[inline] fn visit_string_literal(&mut self, string_literal: &'a StringLiteral) { walk_string_literal(self, string_literal); @@ -290,7 +289,6 @@ where Expr::YieldFrom(expr) => expr.visit_preorder(visitor), Expr::Compare(expr) => expr.visit_preorder(visitor), Expr::Call(expr) => expr.visit_preorder(visitor), - Expr::FormattedValue(expr) => expr.visit_preorder(visitor), Expr::FString(expr) => expr.visit_preorder(visitor), Expr::StringLiteral(expr) => expr.visit_preorder(visitor), Expr::BytesLiteral(expr) => expr.visit_preorder(visitor), @@ -519,6 +517,20 @@ where visitor.leave_node(node); } +pub fn walk_f_string_element<'a, V: PreorderVisitor<'a> + ?Sized>( + visitor: &mut V, + f_string_element: &'a FStringElement, +) { + let node = AnyNodeRef::from(f_string_element); + if visitor.enter_node(node).is_traverse() { + match f_string_element { + FStringElement::Expression(element) => element.visit_preorder(visitor), + FStringElement::Literal(element) => element.visit_preorder(visitor), + } + } + visitor.leave_node(node); +} + pub fn walk_bool_op<'a, V>(_visitor: &mut V, _bool_op: &'a BoolOp) where V: PreorderVisitor<'a> + ?Sized, diff --git a/crates/ruff_python_ast/src/visitor/transformer.rs b/crates/ruff_python_ast/src/visitor/transformer.rs index b193aa6c29..caa111c43f 100644 --- a/crates/ruff_python_ast/src/visitor/transformer.rs +++ b/crates/ruff_python_ast/src/visitor/transformer.rs @@ -1,8 +1,8 @@ use crate::{ self as ast, Alias, Arguments, BoolOp, BytesLiteral, CmpOp, Comprehension, Decorator, - ElifElseClause, ExceptHandler, Expr, ExprContext, FString, Keyword, MatchCase, Operator, - Parameter, Parameters, Pattern, PatternArguments, PatternKeyword, Stmt, StringLiteral, - TypeParam, TypeParamTypeVar, TypeParams, UnaryOp, WithItem, + ElifElseClause, ExceptHandler, Expr, ExprContext, FString, FStringElement, Keyword, MatchCase, + Operator, Parameter, Parameters, Pattern, PatternArguments, PatternKeyword, Stmt, + StringLiteral, TypeParam, TypeParamTypeVar, TypeParams, UnaryOp, WithItem, }; /// A trait for transforming ASTs. Visits all nodes in the AST recursively in evaluation-order. @@ -40,9 +40,6 @@ pub trait Transformer { fn visit_except_handler(&self, except_handler: &mut ExceptHandler) { walk_except_handler(self, except_handler); } - fn visit_format_spec(&self, format_spec: &mut Expr) { - walk_format_spec(self, format_spec); - } fn visit_arguments(&self, arguments: &mut Arguments) { walk_arguments(self, arguments); } @@ -88,6 +85,9 @@ pub trait Transformer { fn visit_f_string(&self, f_string: &mut FString) { walk_f_string(self, f_string); } + fn visit_f_string_element(&self, f_string_element: &mut FStringElement) { + walk_f_string_element(self, f_string_element); + } fn visit_string_literal(&self, string_literal: &mut StringLiteral) { walk_string_literal(self, string_literal); } @@ -463,16 +463,8 @@ pub fn walk_expr(visitor: &V, expr: &mut Expr) { visitor.visit_expr(func); visitor.visit_arguments(arguments); } - Expr::FormattedValue(ast::ExprFormattedValue { - value, format_spec, .. - }) => { - visitor.visit_expr(value); - if let Some(expr) = format_spec { - visitor.visit_format_spec(expr); - } - } Expr::FString(ast::ExprFString { value, .. }) => { - for f_string_part in value.parts_mut() { + for f_string_part in value.iter_mut() { match f_string_part { ast::FStringPart::Literal(string_literal) => { visitor.visit_string_literal(string_literal); @@ -484,12 +476,12 @@ pub fn walk_expr(visitor: &V, expr: &mut Expr) { } } Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => { - for string_literal in value.parts_mut() { + for string_literal in value.iter_mut() { visitor.visit_string_literal(string_literal); } } Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => { - for bytes_literal in value.parts_mut() { + for bytes_literal in value.iter_mut() { visitor.visit_bytes_literal(bytes_literal); } } @@ -584,16 +576,6 @@ pub fn walk_except_handler( } } -pub fn walk_f_string(visitor: &V, f_string: &mut FString) { - for expr in &mut f_string.values { - visitor.visit_expr(expr); - } -} - -pub fn walk_format_spec(visitor: &V, format_spec: &mut Expr) { - visitor.visit_expr(format_spec); -} - pub fn walk_arguments(visitor: &V, arguments: &mut Arguments) { // Note that the there might be keywords before the last arg, e.g. in // f(*args, a=2, *args2, **kwargs)`, but we follow Python in evaluating first `args` and then @@ -743,6 +725,31 @@ pub fn walk_pattern_keyword( visitor.visit_pattern(&mut pattern_keyword.pattern); } +pub fn walk_f_string(visitor: &V, f_string: &mut FString) { + for element in &mut f_string.elements { + visitor.visit_f_string_element(element); + } +} + +pub fn walk_f_string_element( + visitor: &V, + f_string_element: &mut FStringElement, +) { + if let ast::FStringElement::Expression(ast::FStringExpressionElement { + expression, + format_spec, + .. + }) = f_string_element + { + visitor.visit_expr(expression); + if let Some(format_spec) = format_spec { + for spec_element in &mut format_spec.elements { + visitor.visit_f_string_element(spec_element); + } + } + } +} + pub fn walk_expr_context(_visitor: &V, _expr_context: &mut ExprContext) {} pub fn walk_bool_op(_visitor: &V, _bool_op: &mut BoolOp) {} diff --git a/crates/ruff_python_ast/tests/preorder.rs b/crates/ruff_python_ast/tests/preorder.rs index 106ce91389..ea0542d9f1 100644 --- a/crates/ruff_python_ast/tests/preorder.rs +++ b/crates/ruff_python_ast/tests/preorder.rs @@ -2,16 +2,8 @@ use std::fmt::{Debug, Write}; use insta::assert_snapshot; -use ruff_python_ast::visitor::preorder::{ - walk_alias, walk_comprehension, walk_except_handler, walk_expr, walk_keyword, walk_match_case, - walk_module, walk_parameter, walk_parameters, walk_pattern, walk_stmt, walk_type_param, - walk_with_item, PreorderVisitor, -}; -use ruff_python_ast::AnyNodeRef; -use ruff_python_ast::{ - Alias, BoolOp, CmpOp, Comprehension, ExceptHandler, Expr, Keyword, MatchCase, Mod, Operator, - Parameter, Parameters, Pattern, Singleton, Stmt, TypeParam, UnaryOp, WithItem, -}; +use ruff_python_ast::visitor::preorder::{PreorderVisitor, TraversalSignal}; +use ruff_python_ast::{AnyNodeRef, BoolOp, CmpOp, Operator, Singleton, UnaryOp}; use ruff_python_parser::lexer::lex; use ruff_python_parser::{parse_tokens, Mode}; @@ -128,6 +120,33 @@ fn function_type_parameters() { assert_snapshot!(trace); } +#[test] +fn string_literals() { + let source = r"'a' 'b' 'c'"; + + let trace = trace_preorder_visitation(source); + + assert_snapshot!(trace); +} + +#[test] +fn bytes_literals() { + let source = r"b'a' b'b' b'c'"; + + let trace = trace_preorder_visitation(source); + + assert_snapshot!(trace); +} + +#[test] +fn f_strings() { + let source = r"'pre' f'foo {bar:.{x}f} baz'"; + + let trace = trace_preorder_visitation(source); + + assert_snapshot!(trace); +} + fn trace_preorder_visitation(source: &str) -> String { let tokens = lex(source, Mode::Module); let parsed = parse_tokens(tokens, source, Mode::Module, "test.py").unwrap(); @@ -147,18 +166,6 @@ struct RecordVisitor { } impl RecordVisitor { - fn enter_node<'a, T>(&mut self, node: T) - where - T: Into>, - { - self.emit(&node.into().kind()); - self.depth += 1; - } - - fn exit_node(&mut self) { - self.depth -= 1; - } - fn emit(&mut self, text: &dyn Debug) { for _ in 0..self.depth { self.output.push_str(" "); @@ -168,29 +175,16 @@ impl RecordVisitor { } } -impl PreorderVisitor<'_> for RecordVisitor { - fn visit_mod(&mut self, module: &Mod) { - self.enter_node(module); - walk_module(self, module); - self.exit_node(); +impl<'a> PreorderVisitor<'a> for RecordVisitor { + fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal { + self.emit(&node.kind()); + self.depth += 1; + + TraversalSignal::Traverse } - fn visit_stmt(&mut self, stmt: &Stmt) { - self.enter_node(stmt); - walk_stmt(self, stmt); - self.exit_node(); - } - - fn visit_annotation(&mut self, expr: &Expr) { - self.enter_node(expr); - walk_expr(self, expr); - self.exit_node(); - } - - fn visit_expr(&mut self, expr: &Expr) { - self.enter_node(expr); - walk_expr(self, expr); - self.exit_node(); + fn leave_node(&mut self, _node: AnyNodeRef<'a>) { + self.depth -= 1; } fn visit_singleton(&mut self, singleton: &Singleton) { @@ -212,70 +206,4 @@ impl PreorderVisitor<'_> for RecordVisitor { fn visit_cmp_op(&mut self, cmp_op: &CmpOp) { self.emit(&cmp_op); } - - fn visit_comprehension(&mut self, comprehension: &Comprehension) { - self.enter_node(comprehension); - walk_comprehension(self, comprehension); - self.exit_node(); - } - - fn visit_except_handler(&mut self, except_handler: &ExceptHandler) { - self.enter_node(except_handler); - walk_except_handler(self, except_handler); - self.exit_node(); - } - - fn visit_format_spec(&mut self, format_spec: &Expr) { - self.enter_node(format_spec); - walk_expr(self, format_spec); - self.exit_node(); - } - - fn visit_parameters(&mut self, parameters: &Parameters) { - self.enter_node(parameters); - walk_parameters(self, parameters); - self.exit_node(); - } - - fn visit_parameter(&mut self, parameter: &Parameter) { - self.enter_node(parameter); - walk_parameter(self, parameter); - self.exit_node(); - } - - fn visit_keyword(&mut self, keyword: &Keyword) { - self.enter_node(keyword); - walk_keyword(self, keyword); - self.exit_node(); - } - - fn visit_alias(&mut self, alias: &Alias) { - self.enter_node(alias); - walk_alias(self, alias); - self.exit_node(); - } - - fn visit_with_item(&mut self, with_item: &WithItem) { - self.enter_node(with_item); - walk_with_item(self, with_item); - self.exit_node(); - } - - fn visit_match_case(&mut self, match_case: &MatchCase) { - self.enter_node(match_case); - walk_match_case(self, match_case); - self.exit_node(); - } - - fn visit_pattern(&mut self, pattern: &Pattern) { - self.enter_node(pattern); - walk_pattern(self, pattern); - self.exit_node(); - } - - fn visit_type_param(&mut self, type_param: &TypeParam) { - self.enter_node(type_param); - walk_type_param(self, type_param); - self.exit_node(); - } } diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__bytes_literals.snap b/crates/ruff_python_ast/tests/snapshots/preorder__bytes_literals.snap new file mode 100644 index 0000000000..d71ea07e19 --- /dev/null +++ b/crates/ruff_python_ast/tests/snapshots/preorder__bytes_literals.snap @@ -0,0 +1,11 @@ +--- +source: crates/ruff_python_ast/tests/preorder.rs +expression: trace +--- +- ModModule + - StmtExpr + - ExprBytesLiteral + - BytesLiteral + - BytesLiteral + - BytesLiteral + diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__class_type_parameters.snap b/crates/ruff_python_ast/tests/snapshots/preorder__class_type_parameters.snap index bf2a7eccb5..20ef6b71fa 100644 --- a/crates/ruff_python_ast/tests/snapshots/preorder__class_type_parameters.snap +++ b/crates/ruff_python_ast/tests/snapshots/preorder__class_type_parameters.snap @@ -4,11 +4,12 @@ expression: trace --- - ModModule - StmtClassDef - - TypeParamTypeVar - - ExprName - - TypeParamTypeVar - - TypeParamTypeVarTuple - - TypeParamParamSpec + - TypeParams + - TypeParamTypeVar + - ExprName + - TypeParamTypeVar + - TypeParamTypeVarTuple + - TypeParamParamSpec - StmtExpr - ExprEllipsisLiteral diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__decorators.snap b/crates/ruff_python_ast/tests/snapshots/preorder__decorators.snap index ff57b138de..e91bd2e83b 100644 --- a/crates/ruff_python_ast/tests/snapshots/preorder__decorators.snap +++ b/crates/ruff_python_ast/tests/snapshots/preorder__decorators.snap @@ -4,10 +4,12 @@ expression: trace --- - ModModule - StmtFunctionDef - - ExprName + - Decorator + - ExprName - Parameters - StmtPass - StmtClassDef - - ExprName + - Decorator + - ExprName - StmtPass diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__f_strings.snap b/crates/ruff_python_ast/tests/snapshots/preorder__f_strings.snap new file mode 100644 index 0000000000..043c58064b --- /dev/null +++ b/crates/ruff_python_ast/tests/snapshots/preorder__f_strings.snap @@ -0,0 +1,18 @@ +--- +source: crates/ruff_python_ast/tests/preorder.rs +expression: trace +--- +- ModModule + - StmtExpr + - ExprFString + - StringLiteral + - FString + - FStringLiteralElement + - FStringExpressionElement + - ExprName + - FStringLiteralElement + - FStringExpressionElement + - ExprName + - FStringLiteralElement + - FStringLiteralElement + diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__function_arguments.snap b/crates/ruff_python_ast/tests/snapshots/preorder__function_arguments.snap index 5ecec46ee0..da83fc10f8 100644 --- a/crates/ruff_python_ast/tests/snapshots/preorder__function_arguments.snap +++ b/crates/ruff_python_ast/tests/snapshots/preorder__function_arguments.snap @@ -5,16 +5,22 @@ expression: trace - ModModule - StmtFunctionDef - Parameters + - ParameterWithDefault + - Parameter + - ParameterWithDefault + - Parameter + - ParameterWithDefault + - Parameter + - ParameterWithDefault + - Parameter + - ExprNumberLiteral - Parameter - - Parameter - - Parameter - - Parameter - - ExprNumberLiteral - - Parameter - - Parameter - - ExprNumberLiteral - - Parameter - - ExprNumberLiteral + - ParameterWithDefault + - Parameter + - ExprNumberLiteral + - ParameterWithDefault + - Parameter + - ExprNumberLiteral - Parameter - StmtPass diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__function_positional_only_with_default.snap b/crates/ruff_python_ast/tests/snapshots/preorder__function_positional_only_with_default.snap index fdc755b1f0..6511aa3468 100644 --- a/crates/ruff_python_ast/tests/snapshots/preorder__function_positional_only_with_default.snap +++ b/crates/ruff_python_ast/tests/snapshots/preorder__function_positional_only_with_default.snap @@ -5,11 +5,14 @@ expression: trace - ModModule - StmtFunctionDef - Parameters - - Parameter - - Parameter - - ExprNumberLiteral - - Parameter - - ExprNumberLiteral + - ParameterWithDefault + - Parameter + - ParameterWithDefault + - Parameter + - ExprNumberLiteral + - ParameterWithDefault + - Parameter + - ExprNumberLiteral - Parameter - StmtPass diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__function_type_parameters.snap b/crates/ruff_python_ast/tests/snapshots/preorder__function_type_parameters.snap index 47aa1ecf6a..11b98d0fa7 100644 --- a/crates/ruff_python_ast/tests/snapshots/preorder__function_type_parameters.snap +++ b/crates/ruff_python_ast/tests/snapshots/preorder__function_type_parameters.snap @@ -4,11 +4,12 @@ expression: trace --- - ModModule - StmtFunctionDef - - TypeParamTypeVar - - ExprName - - TypeParamTypeVar - - TypeParamTypeVarTuple - - TypeParamParamSpec + - TypeParams + - TypeParamTypeVar + - ExprName + - TypeParamTypeVar + - TypeParamTypeVarTuple + - TypeParamParamSpec - Parameters - StmtExpr - ExprEllipsisLiteral diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__match_class_pattern.snap b/crates/ruff_python_ast/tests/snapshots/preorder__match_class_pattern.snap index da4fe2cd97..dbe56f11ff 100644 --- a/crates/ruff_python_ast/tests/snapshots/preorder__match_class_pattern.snap +++ b/crates/ruff_python_ast/tests/snapshots/preorder__match_class_pattern.snap @@ -8,21 +8,26 @@ expression: trace - MatchCase - PatternMatchClass - ExprName - - PatternMatchValue - - ExprNumberLiteral - - PatternMatchValue - - ExprNumberLiteral + - PatternArguments + - PatternMatchValue + - ExprNumberLiteral + - PatternMatchValue + - ExprNumberLiteral - StmtExpr - ExprEllipsisLiteral - MatchCase - PatternMatchClass - ExprName - - PatternMatchValue - - ExprNumberLiteral - - PatternMatchValue - - ExprNumberLiteral - - PatternMatchValue - - ExprNumberLiteral + - PatternArguments + - PatternKeyword + - PatternMatchValue + - ExprNumberLiteral + - PatternKeyword + - PatternMatchValue + - ExprNumberLiteral + - PatternKeyword + - PatternMatchValue + - ExprNumberLiteral - StmtExpr - ExprEllipsisLiteral diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__string_literals.snap b/crates/ruff_python_ast/tests/snapshots/preorder__string_literals.snap new file mode 100644 index 0000000000..7b62ce78b7 --- /dev/null +++ b/crates/ruff_python_ast/tests/snapshots/preorder__string_literals.snap @@ -0,0 +1,11 @@ +--- +source: crates/ruff_python_ast/tests/preorder.rs +expression: trace +--- +- ModModule + - StmtExpr + - ExprStringLiteral + - StringLiteral + - StringLiteral + - StringLiteral + diff --git a/crates/ruff_python_ast/tests/snapshots/preorder__type_aliases.snap b/crates/ruff_python_ast/tests/snapshots/preorder__type_aliases.snap index 56c170d9b8..56d6aec5c0 100644 --- a/crates/ruff_python_ast/tests/snapshots/preorder__type_aliases.snap +++ b/crates/ruff_python_ast/tests/snapshots/preorder__type_aliases.snap @@ -5,11 +5,12 @@ expression: trace - ModModule - StmtTypeAlias - ExprName - - TypeParamTypeVar - - ExprName - - TypeParamTypeVar - - TypeParamTypeVarTuple - - TypeParamParamSpec + - TypeParams + - TypeParamTypeVar + - ExprName + - TypeParamTypeVar + - TypeParamTypeVarTuple + - TypeParamParamSpec - ExprSubscript - ExprName - ExprName diff --git a/crates/ruff_python_ast/tests/snapshots/visitor__bytes_literals.snap b/crates/ruff_python_ast/tests/snapshots/visitor__bytes_literals.snap new file mode 100644 index 0000000000..57bed3a9c7 --- /dev/null +++ b/crates/ruff_python_ast/tests/snapshots/visitor__bytes_literals.snap @@ -0,0 +1,10 @@ +--- +source: crates/ruff_python_ast/tests/visitor.rs +expression: trace +--- +- StmtExpr + - ExprBytesLiteral + - BytesLiteral + - BytesLiteral + - BytesLiteral + diff --git a/crates/ruff_python_ast/tests/snapshots/visitor__f_strings.snap b/crates/ruff_python_ast/tests/snapshots/visitor__f_strings.snap new file mode 100644 index 0000000000..a5c8a8b905 --- /dev/null +++ b/crates/ruff_python_ast/tests/snapshots/visitor__f_strings.snap @@ -0,0 +1,17 @@ +--- +source: crates/ruff_python_ast/tests/visitor.rs +expression: trace +--- +- StmtExpr + - ExprFString + - StringLiteral + - FString + - FStringLiteralElement + - FStringExpressionElement + - ExprName + - FStringLiteralElement + - FStringExpressionElement + - ExprName + - FStringLiteralElement + - FStringLiteralElement + diff --git a/crates/ruff_python_ast/tests/snapshots/visitor__string_literals.snap b/crates/ruff_python_ast/tests/snapshots/visitor__string_literals.snap new file mode 100644 index 0000000000..7c066eae56 --- /dev/null +++ b/crates/ruff_python_ast/tests/snapshots/visitor__string_literals.snap @@ -0,0 +1,10 @@ +--- +source: crates/ruff_python_ast/tests/visitor.rs +expression: trace +--- +- StmtExpr + - ExprStringLiteral + - StringLiteral + - StringLiteral + - StringLiteral + diff --git a/crates/ruff_python_ast/tests/visitor.rs b/crates/ruff_python_ast/tests/visitor.rs index a503009566..46af0903b6 100644 --- a/crates/ruff_python_ast/tests/visitor.rs +++ b/crates/ruff_python_ast/tests/visitor.rs @@ -6,14 +6,16 @@ use ruff_python_parser::lexer::lex; use ruff_python_parser::{parse_tokens, Mode}; use ruff_python_ast::visitor::{ - walk_alias, walk_comprehension, walk_except_handler, walk_expr, walk_keyword, walk_match_case, - walk_parameter, walk_parameters, walk_pattern, walk_stmt, walk_type_param, walk_with_item, + walk_alias, walk_bytes_literal, walk_comprehension, walk_except_handler, walk_expr, + walk_f_string, walk_f_string_element, walk_keyword, walk_match_case, walk_parameter, + walk_parameters, walk_pattern, walk_stmt, walk_string_literal, walk_type_param, walk_with_item, Visitor, }; use ruff_python_ast::AnyNodeRef; use ruff_python_ast::{ - Alias, BoolOp, CmpOp, Comprehension, ExceptHandler, Expr, Keyword, MatchCase, Operator, - Parameter, Parameters, Pattern, Stmt, TypeParam, UnaryOp, WithItem, + Alias, BoolOp, BytesLiteral, CmpOp, Comprehension, ExceptHandler, Expr, FString, + FStringElement, Keyword, MatchCase, Operator, Parameter, Parameters, Pattern, Stmt, + StringLiteral, TypeParam, UnaryOp, WithItem, }; #[test] @@ -129,6 +131,33 @@ fn function_type_parameters() { assert_snapshot!(trace); } +#[test] +fn string_literals() { + let source = r"'a' 'b' 'c'"; + + let trace = trace_visitation(source); + + assert_snapshot!(trace); +} + +#[test] +fn bytes_literals() { + let source = r"b'a' b'b' b'c'"; + + let trace = trace_visitation(source); + + assert_snapshot!(trace); +} + +#[test] +fn f_strings() { + let source = r"'pre' f'foo {bar:.{x}f} baz'"; + + let trace = trace_visitation(source); + + assert_snapshot!(trace); +} + fn trace_visitation(source: &str) -> String { let tokens = lex(source, Mode::Module); let parsed = parse_tokens(tokens, source, Mode::Module, "test.py").unwrap(); @@ -228,12 +257,6 @@ impl Visitor<'_> for RecordVisitor { self.exit_node(); } - fn visit_format_spec(&mut self, format_spec: &Expr) { - self.enter_node(format_spec); - walk_expr(self, format_spec); - self.exit_node(); - } - fn visit_parameters(&mut self, parameters: &Parameters) { self.enter_node(parameters); walk_parameters(self, parameters); @@ -281,4 +304,28 @@ impl Visitor<'_> for RecordVisitor { walk_type_param(self, type_param); self.exit_node(); } + + fn visit_string_literal(&mut self, string_literal: &StringLiteral) { + self.enter_node(string_literal); + walk_string_literal(self, string_literal); + self.exit_node(); + } + + fn visit_bytes_literal(&mut self, bytes_literal: &BytesLiteral) { + self.enter_node(bytes_literal); + walk_bytes_literal(self, bytes_literal); + self.exit_node(); + } + + fn visit_f_string(&mut self, f_string: &FString) { + self.enter_node(f_string); + walk_f_string(self, f_string); + self.exit_node(); + } + + fn visit_f_string_element(&mut self, f_string_element: &FStringElement) { + self.enter_node(f_string_element); + walk_f_string_element(self, f_string_element); + self.exit_node(); + } } diff --git a/crates/ruff_python_codegen/src/generator.rs b/crates/ruff_python_codegen/src/generator.rs index cb1a8fedab..7f9d5d4f8e 100644 --- a/crates/ruff_python_codegen/src/generator.rs +++ b/crates/ruff_python_codegen/src/generator.rs @@ -1069,18 +1069,6 @@ impl<'a> Generator<'a> { } self.p(")"); } - Expr::FormattedValue(ast::ExprFormattedValue { - value, - debug_text, - conversion, - format_spec, - range: _, - }) => self.unparse_formatted( - value, - debug_text.as_ref(), - *conversion, - format_spec.as_deref(), - ), Expr::FString(ast::ExprFString { value, .. }) => { self.unparse_f_string_value(value, false); } @@ -1089,7 +1077,7 @@ impl<'a> Generator<'a> { } Expr::BytesLiteral(ast::ExprBytesLiteral { value, .. }) => { let mut first = true; - for bytes_literal in value.parts() { + for bytes_literal in value { self.p_delim(&mut first, " "); self.p_bytes_repr(&bytes_literal.value); } @@ -1285,7 +1273,7 @@ impl<'a> Generator<'a> { fn unparse_string_literal_value(&mut self, value: &ast::StringLiteralValue) { let mut first = true; - for string_literal in value.parts() { + for string_literal in value { self.p_delim(&mut first, " "); self.unparse_string_literal(string_literal); } @@ -1293,31 +1281,31 @@ impl<'a> Generator<'a> { fn unparse_f_string_value(&mut self, value: &ast::FStringValue, is_spec: bool) { let mut first = true; - for f_string_part in value.parts() { + for f_string_part in value { self.p_delim(&mut first, " "); match f_string_part { ast::FStringPart::Literal(string_literal) => { self.unparse_string_literal(string_literal); } ast::FStringPart::FString(f_string) => { - self.unparse_f_string(&f_string.values, is_spec); + self.unparse_f_string(&f_string.elements, is_spec); } } } } - fn unparse_f_string_body(&mut self, values: &[Expr], is_spec: bool) { + fn unparse_f_string_body(&mut self, values: &[ast::FStringElement]) { for value in values { - self.unparse_f_string_elem(value, is_spec); + self.unparse_f_string_element(value); } } - fn unparse_formatted( + fn unparse_f_string_expression_element( &mut self, val: &Expr, debug_text: Option<&DebugText>, conversion: ConversionFlag, - spec: Option<&Expr>, + spec: Option<&ast::FStringFormatSpec>, ) { let mut generator = Generator::new(self.indent, self.quote, self.line_ending); generator.unparse_expr(val, precedence::FORMATTED_VALUE); @@ -1347,44 +1335,40 @@ impl<'a> Generator<'a> { if let Some(spec) = spec { self.p(":"); - self.unparse_f_string_elem(spec, true); + self.unparse_f_string(&spec.elements, true); } self.p("}"); } - fn unparse_f_string_elem(&mut self, expr: &Expr, is_spec: bool) { - match expr { - Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => { - self.unparse_f_string_literal(value.to_str()); + fn unparse_f_string_element(&mut self, element: &ast::FStringElement) { + match element { + ast::FStringElement::Literal(ast::FStringLiteralElement { value, .. }) => { + self.unparse_f_string_literal_element(value); } - Expr::FString(ast::ExprFString { value, .. }) => { - self.unparse_f_string_value(value, is_spec); - } - Expr::FormattedValue(ast::ExprFormattedValue { - value, + ast::FStringElement::Expression(ast::FStringExpressionElement { + expression, debug_text, conversion, format_spec, range: _, - }) => self.unparse_formatted( - value, + }) => self.unparse_f_string_expression_element( + expression, debug_text.as_ref(), *conversion, format_spec.as_deref(), ), - _ => unreachable!(), } } - fn unparse_f_string_literal(&mut self, s: &str) { + fn unparse_f_string_literal_element(&mut self, s: &str) { let s = s.replace('{', "{{").replace('}', "}}"); self.p(&s); } - fn unparse_f_string(&mut self, values: &[Expr], is_spec: bool) { + fn unparse_f_string(&mut self, values: &[ast::FStringElement], is_spec: bool) { if is_spec { - self.unparse_f_string_body(values, is_spec); + self.unparse_f_string_body(values); } else { self.p("f"); let mut generator = Generator::new( @@ -1395,7 +1379,7 @@ impl<'a> Generator<'a> { }, self.line_ending, ); - generator.unparse_f_string_body(values, is_spec); + generator.unparse_f_string_body(values); let body = &generator.buffer; self.p_str_repr(body); } @@ -1716,7 +1700,7 @@ class Foo: } #[test] - fn self_documenting_f_string() { + fn self_documenting_fstring() { assert_round_trip!(r#"f"{ chr(65) = }""#); assert_round_trip!(r#"f"{ chr(65) = !s}""#); assert_round_trip!(r#"f"{ chr(65) = !r}""#); diff --git a/crates/ruff_python_formatter/CONTRIBUTING.md b/crates/ruff_python_formatter/CONTRIBUTING.md index c302a4dc49..193ff8ef32 100644 --- a/crates/ruff_python_formatter/CONTRIBUTING.md +++ b/crates/ruff_python_formatter/CONTRIBUTING.md @@ -218,7 +218,7 @@ call, for single items `.format().fmt(f)` or `.fmt(f)` is sufficient. impl FormatNodeRule for FormatStmtReturn { fn fmt_fields(&self, item: &StmtReturn, f: &mut PyFormatter) -> FormatResult<()> { // Here we destructure item and make sure each field is listed. - // We generally don't need range is it's underscore-ignored + // We generally don't need range if it's underscore-ignored let StmtReturn { range: _, value } = item; // Implement some formatting logic, in this case no space (and no value) after a return with // no value diff --git a/crates/ruff_python_formatter/Cargo.toml b/crates/ruff_python_formatter/Cargo.toml index 5bc38d4960..1c8825b3af 100644 --- a/crates/ruff_python_formatter/Cargo.toml +++ b/crates/ruff_python_formatter/Cargo.toml @@ -28,6 +28,7 @@ countme = "3.0.1" itertools = { workspace = true } memchr = { workspace = true } once_cell = { workspace = true } +regex = { workspace = true } rustc-hash = { workspace = true } serde = { workspace = true, optional = true } schemars = { workspace = true, optional = true } diff --git a/crates/ruff_python_formatter/generate.py b/crates/ruff_python_formatter/generate.py index 52c256dd17..bf89ac1a4b 100755 --- a/crates/ruff_python_formatter/generate.py +++ b/crates/ruff_python_formatter/generate.py @@ -30,10 +30,20 @@ nodes_file = ( node_lines = ( nodes_file.split("pub enum AnyNode {")[1].split("}")[0].strip().splitlines() ) -nodes = [ - node_line.split("(")[1].split(")")[0].split("::")[-1].split("<")[0] - for node_line in node_lines -] +nodes = [] +for node_line in node_lines: + node = node_line.split("(")[1].split(")")[0].split("::")[-1].split("<")[0] + # `FString` and `StringLiteral` has a custom implementation while the formatting for + # `FStringLiteralElement` and `FStringExpressionElement` are handled by the `FString` + # implementation. + if node in ( + "FString", + "StringLiteral", + "FStringLiteralElement", + "FStringExpressionElement", + ): + continue + nodes.append(node) print(nodes) # %% diff --git a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/ignore_pyi.pyi b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/ignore_pyi.pyi index 6423059067..5e643ea38e 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/ignore_pyi.pyi +++ b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/ignore_pyi.pyi @@ -15,7 +15,6 @@ def g(): # hi ... -# FIXME(#8905): Uncomment, leads to unstable formatting -# def h(): -# ... -# # bye +def h(): + ... + # bye diff --git a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py index 3c9e616d83..8ae63e2171 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py @@ -9,3 +9,12 @@ class MyClass: # fmt: on def method(): print ( "str" ) + + @decor( + a=1, + # fmt: off + b=(2, 3), + # fmt: on + ) + def func(): + pass diff --git a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py.expect b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py.expect index 326b48df6b..905336810b 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py.expect +++ b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/line_ranges_fmt_off_decorator.py.expect @@ -1,4 +1,4 @@ -# flags: --line-ranges=12-12 +# flags: --line-ranges=12-12 --line-ranges=21-21 # NOTE: If you need to modify this file, pay special attention to the --line-ranges= # flag above as it's formatting specifically these lines. @@ -10,3 +10,12 @@ class MyClass: # fmt: on def method(): print("str") + + @decor( + a=1, + # fmt: off + b=(2, 3), + # fmt: on + ) + def func(): + pass diff --git a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/linelength6.options.json b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/linelength6.options.json index f6d0b5fa4c..d5eda5fc94 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/linelength6.options.json +++ b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/linelength6.options.json @@ -1 +1 @@ -{"line_length": 6} \ No newline at end of file +{"line_width": 6} \ No newline at end of file diff --git a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/power_op_newline.options.json b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/power_op_newline.options.json index 80ad04dcfc..70fa4ec1b9 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/black/cases/power_op_newline.options.json +++ b/crates/ruff_python_formatter/resources/test/fixtures/black/cases/power_op_newline.options.json @@ -1 +1 @@ -{"line_length": 0} \ No newline at end of file +{"line_width": 1} \ No newline at end of file diff --git a/crates/ruff_python_formatter/resources/test/fixtures/import_black_tests.py b/crates/ruff_python_formatter/resources/test/fixtures/import_black_tests.py index 31d1515aef..b196876e70 100755 --- a/crates/ruff_python_formatter/resources/test/fixtures/import_black_tests.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/import_black_tests.py @@ -48,7 +48,8 @@ def import_fixture(fixture: Path, fixture_set: str): if "--line-length=" in flags: [_, length_and_rest] = flags.split("--line-length=", 1) length = length_and_rest.split(" ", 1)[0] - options["line_length"] = int(length) + length = int(length) + options["line_width"] = 1 if length == 0 else length if "--skip-magic-trailing-comma" in flags: options["magic_trailing_comma"] = "ignore" diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/blank_line_before_class_docstring.options.json b/crates/ruff_python_formatter/resources/test/fixtures/ruff/blank_line_before_class_docstring.options.json new file mode 100644 index 0000000000..8925dd0a82 --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/blank_line_before_class_docstring.options.json @@ -0,0 +1,5 @@ +[ + { + "preview": "enabled" + } +] diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/blank_line_before_class_docstring.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/blank_line_before_class_docstring.py new file mode 100644 index 0000000000..a8dbbafebb --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/blank_line_before_class_docstring.py @@ -0,0 +1,38 @@ +class NormalDocstring: + + """This is a docstring.""" + + +class DocstringWithComment0: + # This is a comment + """This is a docstring.""" + + +class DocstringWithComment1: + # This is a comment + + """This is a docstring.""" + + +class DocstringWithComment2: + + # This is a comment + """This is a docstring.""" + + +class DocstringWithComment3: + + # This is a comment + + """This is a docstring.""" + + +class DocstringWithComment4: + + + # This is a comment + + + """This is a docstring.""" + + diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.options.json b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.options.json index 28553e727b..e1a76386a0 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.options.json +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.options.json @@ -14,5 +14,8 @@ { "indent_style": "tab", "indent_width": 4 + }, + { + "quote_style": "single" } ] diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.py index d7d4d9b119..98a5a730f4 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring.py @@ -150,3 +150,8 @@ class TabbedIndent: Normal indented line - autor """ + + +def single_quoted(): + ' content\ ' + return diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.options.json b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.options.json index b2a2d8a4e5..9400ae4bea 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.options.json +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.options.json @@ -38,5 +38,17 @@ "docstring_code": "enabled", "indent_style": "tab", "indent_width": 4 + }, + { + "docstring_code": "enabled", + "docstring_code_line_width": 60, + "indent_style": "space", + "indent_width": 4 + }, + { + "docstring_code": "enabled", + "docstring_code_line_width": "dynamic", + "indent_style": "space", + "indent_width": 4 } ] diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.py index 6f5fc0b30b..381f04b757 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples.py @@ -67,6 +67,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff( x )""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff( x ) + ... more( y )""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -323,3 +344,1007 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff( 1 )""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff( 1 ) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff( 1 ) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff( 1 ) + cool_stuff( 2 ) + cool_stuff( 3 ) + cool_stuff( 4 ) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff( x ): + print( f"hi {x}" ); + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff( 1 ) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ```invalid + ''' + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( ''' + ``` + did i trick you? + ``` + ''' ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + + + """ + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 )""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + + + cool_stuff( 1 ) + + + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff( 1 ) + + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print( 5 ) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py +cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_dynamic_line_width.options.json b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_dynamic_line_width.options.json new file mode 100644 index 0000000000..3367effa00 --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_dynamic_line_width.options.json @@ -0,0 +1,26 @@ +[ + { + "docstring_code": "enabled", + "docstring_code_line_width": "dynamic", + "indent_style": "space", + "indent_width": 4 + }, + { + "docstring_code": "enabled", + "docstring_code_line_width": "dynamic", + "indent_style": "space", + "indent_width": 2 + }, + { + "docstring_code": "enabled", + "docstring_code_line_width": "dynamic", + "indent_style": "tab", + "indent_width": 4 + }, + { + "docstring_code": "enabled", + "docstring_code_line_width": "dynamic", + "indent_style": "tab", + "indent_width": 8 + } +] diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_dynamic_line_width.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_dynamic_line_width.py new file mode 100644 index 0000000000..e84d3b0707 --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_dynamic_line_width.py @@ -0,0 +1,221 @@ +def simple(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + ``` + + Done. + """ + pass + + +# Like simple, but we double everything up to ensure the indent level is +# tracked correctly. +def repeated(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + ``` + + Done. + """ + pass + + +# Like simple, but we make one line exactly one character longer than the limit +# (for 4-space indents) and make sure it gets wrapped. +def barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678) + return 5 + self.x = doit( 5 ) + ``` + + Done. + """ + pass + + +# This tests that if the code block is unindented, that it gets indented and +# the dynamic line width setting is applied correctly. +def unindented(): + """ + First line. + +```py +class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) +``` + + Done. + """ + pass + + +# Like unindented, but contains a `print` line where it just barely exceeds the +# globally configured line width *after* its indentation has been corrected. +def unindented_barely_exceeds_limit(): + """ + First line. + +```py +class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678) + return 5 + self.x = doit( 5 ) +``` + + Done. + """ + pass + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent1(): + """ + Docstring example containing a class. + + Examples + -------- + >>> @pl.api.register_dataframe_namespace("split") + ... class SplitFrame: + ... def __init__(self, df: pl.DataFrame): + ... self._df = df + ... + ... def by_first_letter_of_column_values(self, col: str) -> list[pl.DataFrame]: + ... return [ + ... self._df.filter(pl.col(col).str.starts_with(c)) + ... for c in sorted( + ... set(df.select(pl.col(col).str.slice(0, 1)).to_series()) + ... ) + ... ] + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +class DoctestExtraIndent2: + def example2(): + """ + Regular docstring of class method. + + Examples + -------- + >>> df = pl.DataFrame( + ... {"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]} + ... ) + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent3(): + """ + Pragma comment. + + Examples + -------- + >>> af1, af2, af3 = pl.align_frames( + ... df1, df2, df3, on="dt" + ... ) # doctest: +IGNORE_RESULT + """ diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/binary.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/binary.py index 5d91dc500a..83c6f0ff9f 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/binary.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/binary.py @@ -319,6 +319,18 @@ expected_content = ( ) ) +# Skip FString content when determining whether to omit optional parentheses or not.0 +# The below expression should be parenthesized because it ends with an fstring and starts with a name. +# (Call expressions at the beginning don't count as parenthesized because they don't start with parens). +assert ( + format.format_event(spec) + == f'Event("_remove_cookie", {{key:`testkey`,options:{json.dumps(options)}}})' +) +# Avoid parentheses for this example because it starts with a tuple expression. +assert ( + (spec, format) + == f'Event("_remove_cookie", {{key:`testkey`,options:{json.dumps(options)}}})' +) rowuses = [(1 << j) | # column ordinal (1 << (n + i-j + n-1)) | # NW-SE ordinal diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/quote_style.options.json b/crates/ruff_python_formatter/resources/test/fixtures/ruff/quote_style.options.json new file mode 100644 index 0000000000..59431bf1c4 --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/quote_style.options.json @@ -0,0 +1,11 @@ +[ + { + "quote_style": "single" + }, + { + "quote_style": "double" + }, + { + "quote_style": "preserve" + } +] diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/quote_style.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/quote_style.py new file mode 100644 index 0000000000..8f0d159beb --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/quote_style.py @@ -0,0 +1,50 @@ +'single' +"double" +r'r single' +r"r double" +f'f single' +f"f double" +fr'fr single' +fr"fr double" +rf'rf single' +rf"rf double" +b'b single' +b"b double" +rb'rb single' +rb"rb double" +br'br single' +br"br double" + +'''single triple''' +"""double triple""" +r'''r single triple''' +r"""r double triple""" +f'''f single triple''' +f"""f double triple""" +fr'''fr single triple''' +fr"""fr double triple""" +rf'''rf single triple''' +rf"""rf double triple""" +b'''b single triple''' +b"""b double triple""" +rb'''rb single triple''' +rb"""rb double triple""" +br'''br single triple''' +br"""br double triple""" + +'single1' 'single2' +'single1' "double2" +"double1" 'single2' +"double1" "double2" + +def docstring_single_triple(): + '''single triple''' + +def docstring_double_triple(): + """double triple""" + +def docstring_double(): + "double triple" + +def docstring_single(): + 'single' diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assign.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assign.py index 36022ddd7f..e70ab7c9a6 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assign.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assign.py @@ -67,3 +67,12 @@ def main() -> None: db_request.POST["name"] ) )[0] + + +c = b[dddddd, aaaaaa] = ( + a[ + aaaaaaa, + bbbbbbbbbbbbbbbbbbb + ] + # comment +) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assignment_split_value_first.options.json b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assignment_split_value_first.options.json new file mode 100644 index 0000000000..8925dd0a82 --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assignment_split_value_first.options.json @@ -0,0 +1,5 @@ +[ + { + "preview": "enabled" + } +] diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assignment_split_value_first.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assignment_split_value_first.py new file mode 100644 index 0000000000..4c266a1ad5 --- /dev/null +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assignment_split_value_first.py @@ -0,0 +1,218 @@ +####### +# Unsplittable target and value + +# Only parenthesize the value if it makes it fit, otherwise avoid parentheses. +b = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee + +bbbbbbbbbbbbbbbb = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvv + +# Avoid parenthesizing the value even if the target exceeds the configured width +bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb = bbb + + +############ +# Splittable targets + +# Does not double-parenthesize tuples +( + first_item, + second_item, +) = some_looooooooong_module.some_loooooog_function_name( + first_argument, second_argument, third_argument +) + + +# Preserve parentheses around the first target +( + req["ticket"]["steps"]["step"][0]["tasks"]["task"]["fields"]["field"][ + "access_request" + ]["destinations"]["destination"][0]["ip_address"] +) = dst + +# Augmented assignment +req["ticket"]["steps"]["step"][0]["tasks"]["task"]["fields"]["field"][ + "access_request" +] += dst + +# Always parenthesize the value if it avoids splitting the target, regardless of the value's width. +_a: a[aaaa] = ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv +) + +##### +# Avoid parenthesizing the value if the expression right before the `=` splits to avoid an unnecessary pair of parentheses + +# The type annotation is guaranteed to split because it is too long. +_a: a[ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv +] = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv + +# The target is too long +( + aaaaaaaaaaa, + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv + +# The target splits because of a magic trailing comma +( + a, + b, +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +# The targets split because of a comment +( + # leading + a +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +( + a + # trailing +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +( + a, # nested + b +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +####### +# Multi targets + +# Black always parenthesizes the right if using multiple targets regardless if the parenthesized value exceeds the +# the configured line width or not +aaaa = bbbbbbbbbbbbbbbb = ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee +) + +# Black does parenthesize the target if the target itself exceeds the line width and only parenthesizes +# the values if it makes it fit. +# The second target is too long to ever fit into the configured line width. +aaaa = ( + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbdddd +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee + +# Does also apply for other multi target assignments, as soon as a single target exceeds the configured +# width +aaaaaa = a["aaa"] = bbbbb[aa, bbb, cccc] = dddddddddd = eeeeee = ( + fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + + +###################### +# Call expressions: +# For unsplittable targets: Parenthesize the call expression if it makes it fit. +# +# For splittable targets: +# Only parenthesize a call expression if the parens of the call don't fit on the same line +# as the target. Don't parenthesize the call expression if the target (or annotation) right before +# splits. + +# Don't parenthesize the function call if the left is unsplittable. +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = a.b.function( + arg1, arg2, arg3 +) +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function( + [1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3 +) +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function( + [1, 2, 3], + arg1, + [1, 2, 3], + arg2, + [1, 2, 3], + arg3, + dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, + eeeeeeeeeeeeee, +) + +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = ( + function() +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = ( + a.b.function(arg1, arg2, arg3) +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function() +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function( + [1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3 +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function( + [1, 2, 3], + arg1, + [1, 2, 3], + arg2, + [1, 2, 3], + arg3, + dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, + eeeeeeeeeeeeee, +) + +####### Fluent call expressions +# Uses the regular `Multiline` layout where the entire `value` gets parenthesized +# if it doesn't fit on the line. +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use = ( + function().b().c([1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3) +) + + +####### +# Test comment inlining +value.__dict__[key] = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) +value.__dict__.keye = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) +value.__dict__.keye = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) + + +# Don't parenthesize the value because the target's trailing comma forces it to split. +a[ + aaaaaaa, + b, +] = cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc # comment + +# Parenthesize the value, but don't duplicate the comment. +a[aaaaaaa, b] = ( + cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc # comment +) + +# Format both as flat, but don't loos the comment. +a[aaaaaaa, b] = bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb # comment + +####################################################### +# Test the case where a parenthesized value now fits: +a[ + aaaaaaa, + b +] = ( + cccccccc # comment +) + +# Splits the target but not the value because of the magic trailing comma. +a[ + aaaaaaa, + b, +] = ( + cccccccc # comment +) + +# Splits the second target because of the comment and the first target because of the trailing comma. +a[ + aaaaaaa, + b, +] = ( + # leading comment + b +) = ( + cccccccc # comment +) + + +######## +# Type Alias Statement +type A[str, int, number] = VeryLongTypeNameThatShouldBreakFirstToTheRightBeforeSplitngtin + +type A[VeryLongTypeNameThatShouldBreakFirstToTheRightBeforeSplitngtinthatExceedsTheWidth] = str + diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/ellipsis.pyi b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/ellipsis.pyi index 499ef0aacc..aa693c3443 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/ellipsis.pyi +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/ellipsis.pyi @@ -68,6 +68,10 @@ with True: with True: ... # comment +with True: + ... + # comment + match x: case 1: ... @@ -99,4 +103,4 @@ try: except: ... # comment finally: - ... # comment \ No newline at end of file + ... # comment diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/function.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/function.py index 98eb09eefa..20eca87039 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/function.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/function.py @@ -410,3 +410,13 @@ def default_arg_comments2(# # ): print(x) + +def function_with_one_argument_and_a_positional_separator( + argument: str, / +) -> ReallyReallyReallyReallyReallyReallyReallyReallyLongName: + pass + +def function_with_one_argument_and_a_keyword_separator( + *, argument: str +) -> ReallyReallyReallyReallyReallyReallyReallyReallyLongName: + pass diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py index 3d9855a154..f24f9416cc 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py @@ -578,3 +578,8 @@ match n % 3, n % 5: print("Buzz") case _: print(n) + +# Unparenthesized tuples +match x: + case Child(aaaaaaaaa, bbbbbbbbbbbbbbb, cccccc), Doc(aaaaa, bbbbbbbbbb, ddddddddddddd): + pass diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/type_alias.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/type_alias.py index 8e29cd3730..ad13b9d84c 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/type_alias.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/type_alias.py @@ -16,10 +16,12 @@ type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx[Aaaaaaaaaaaaaaaaaaaaaaaaaaaa] = int type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx[Aaaaaaaaaaaaaaaaaaaaaaaaaaaa, Bbbbbbbbbbbbb] = int type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx = Tttttttttttttttttttttttttttttttttttttttttttttttttttttttt +type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx = Tttttttttttttttttttttttttttttttttttttttttttttttttttttttt # with comment # long value type X = Ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt type X = Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb | Ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +type XXXXXXXXXXXXX = Tttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt # with comment # soft keyword as alias name type type = int diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/with.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/with.py index 4dc166009a..b222747733 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/with.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/with.py @@ -303,3 +303,7 @@ if True: if True: with anyio.CancelScope(shield=True) if get_running_loop() else contextlib.nullcontext(): pass + + +with Child(aaaaaaaaa, bbbbbbbbbbbbbbb, cccccc), Document(aaaaa, bbbbbbbbbb, ddddddddddddd): + pass diff --git a/crates/ruff_python_formatter/src/context.rs b/crates/ruff_python_formatter/src/context.rs index eb8fe7edf4..b5dc85fcb3 100644 --- a/crates/ruff_python_formatter/src/context.rs +++ b/crates/ruff_python_formatter/src/context.rs @@ -1,6 +1,7 @@ use crate::comments::Comments; -use crate::{PyFormatOptions, QuoteStyle}; -use ruff_formatter::{Buffer, FormatContext, GroupId, SourceCode}; +use crate::string::QuoteChar; +use crate::PyFormatOptions; +use ruff_formatter::{Buffer, FormatContext, GroupId, IndentWidth, SourceCode}; use ruff_source_file::Locator; use std::fmt::{Debug, Formatter}; use std::ops::{Deref, DerefMut}; @@ -11,15 +12,16 @@ pub struct PyFormatContext<'a> { contents: &'a str, comments: Comments<'a>, node_level: NodeLevel, + indent_level: IndentLevel, /// Set to a non-None value when the formatter is running on a code - /// snippet within a docstring. The value should be the quote style of the + /// snippet within a docstring. The value should be the quote character of the /// docstring containing the code snippet. /// /// Various parts of the formatter may inspect this state to change how it /// works. For example, multi-line strings will always be written with a /// quote style that is inverted from the one here in order to ensure that /// the formatted Python code will be valid. - docstring: Option, + docstring: Option, } impl<'a> PyFormatContext<'a> { @@ -29,6 +31,7 @@ impl<'a> PyFormatContext<'a> { contents, comments, node_level: NodeLevel::TopLevel(TopLevelStatementPosition::Other), + indent_level: IndentLevel::new(0), docstring: None, } } @@ -50,6 +53,14 @@ impl<'a> PyFormatContext<'a> { self.node_level } + pub(crate) fn set_indent_level(&mut self, level: IndentLevel) { + self.indent_level = level; + } + + pub(crate) fn indent_level(&self) -> IndentLevel { + self.indent_level + } + pub(crate) fn comments(&self) -> &Comments<'a> { &self.comments } @@ -57,23 +68,28 @@ impl<'a> PyFormatContext<'a> { /// Returns a non-None value only if the formatter is running on a code /// snippet within a docstring. /// - /// The quote style returned corresponds to the quoting used for the + /// The quote character returned corresponds to the quoting used for the /// docstring containing the code snippet currently being formatted. - pub(crate) fn docstring(&self) -> Option { + pub(crate) fn docstring(&self) -> Option { self.docstring } /// Return a new context suitable for formatting code snippets within a /// docstring. /// - /// The quote style given should correspond to the style of quoting used + /// The quote character given should correspond to the quote character used /// for the docstring containing the code snippets. - pub(crate) fn in_docstring(self, style: QuoteStyle) -> PyFormatContext<'a> { + pub(crate) fn in_docstring(self, quote: QuoteChar) -> PyFormatContext<'a> { PyFormatContext { - docstring: Some(style), + docstring: Some(quote), ..self } } + + /// Returns `true` if preview mode is enabled. + pub(crate) const fn is_preview(&self) -> bool { + self.options.preview().is_enabled() + } } impl FormatContext for PyFormatContext<'_> { @@ -204,3 +220,115 @@ where .set_node_level(self.saved_level); } } + +/// The current indent level of the formatter. +/// +/// One can determine the the width of the indent itself (in number of ASCII +/// space characters) by multiplying the indent level by the configured indent +/// width. +/// +/// This is specifically used inside the docstring code formatter for +/// implementing its "dynamic" line width mode. Namely, in the nested call to +/// the formatter, when "dynamic" mode is enabled, the line width is set to +/// `min(1, line_width - indent_level * indent_width)`, where `line_width` in +/// this context is the global line width setting. +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub(crate) struct IndentLevel { + /// The numeric level. It is incremented for every whole indent in Python + /// source code. + /// + /// Note that the first indentation level is actually 1, since this starts + /// at 0 and is incremented when the first top-level statement is seen. So + /// even though the first top-level statement in Python source will have no + /// indentation, its indentation level is 1. + level: u16, +} + +impl IndentLevel { + /// Returns a new indent level for the given value. + pub(crate) fn new(level: u16) -> IndentLevel { + IndentLevel { level } + } + + /// Returns the next indent level. + pub(crate) fn increment(self) -> IndentLevel { + IndentLevel { + level: self.level.saturating_add(1), + } + } + + /// Convert this indent level into a specific number of ASCII whitespace + /// characters based on the given indent width. + pub(crate) fn to_ascii_spaces(self, width: IndentWidth) -> u16 { + let width = u16::try_from(width.value()).unwrap_or(u16::MAX); + // Why the subtraction? IndentLevel starts at 0 and asks for the "next" + // indent level before seeing the first top-level statement. So it's + // always 1 more than what we expect it to be. + let level = self.level.saturating_sub(1); + width.saturating_mul(level) + } +} + +/// Change the [`IndentLevel`] of the formatter for the lifetime of this +/// struct. +pub(crate) struct WithIndentLevel<'a, B, D> +where + D: DerefMut, + B: Buffer>, +{ + buffer: D, + saved_level: IndentLevel, +} + +impl<'a, B, D> WithIndentLevel<'a, B, D> +where + D: DerefMut, + B: Buffer>, +{ + pub(crate) fn new(level: IndentLevel, mut buffer: D) -> Self { + let context = buffer.state_mut().context_mut(); + let saved_level = context.indent_level(); + + context.set_indent_level(level); + + Self { + buffer, + saved_level, + } + } +} + +impl<'a, B, D> Deref for WithIndentLevel<'a, B, D> +where + D: DerefMut, + B: Buffer>, +{ + type Target = B; + + fn deref(&self) -> &Self::Target { + &self.buffer + } +} + +impl<'a, B, D> DerefMut for WithIndentLevel<'a, B, D> +where + D: DerefMut, + B: Buffer>, +{ + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.buffer + } +} + +impl<'a, B, D> Drop for WithIndentLevel<'a, B, D> +where + D: DerefMut, + B: Buffer>, +{ + fn drop(&mut self) { + self.buffer + .state_mut() + .context_mut() + .set_indent_level(self.saved_level); + } +} diff --git a/crates/ruff_python_formatter/src/expression/binary_like.rs b/crates/ruff_python_formatter/src/expression/binary_like.rs index 149449d61c..3e153ad8c2 100644 --- a/crates/ruff_python_formatter/src/expression/binary_like.rs +++ b/crates/ruff_python_formatter/src/expression/binary_like.rs @@ -3,7 +3,7 @@ use std::ops::{Deref, Index}; use smallvec::SmallVec; -use ruff_formatter::{write, FormatContext}; +use ruff_formatter::write; use ruff_python_ast::{ Expr, ExprAttribute, ExprBinOp, ExprBoolOp, ExprCompare, ExprUnaryOp, UnaryOp, }; @@ -18,9 +18,10 @@ use crate::expression::parentheses::{ is_expression_parenthesized, write_in_parentheses_only_group_end_tag, write_in_parentheses_only_group_start_tag, Parentheses, }; -use crate::expression::string::{AnyString, FormatString, StringLayout}; use crate::expression::OperatorPrecedence; use crate::prelude::*; +use crate::preview::is_fix_power_op_line_length_enabled; +use crate::string::{AnyString, FormatStringContinuation}; #[derive(Copy, Clone, Debug)] pub(super) enum BinaryLike<'a> { @@ -394,9 +395,10 @@ impl Format> for BinaryLike<'_> { [ operand.leading_binary_comments().map(leading_comments), leading_comments(comments.leading(&string_constant)), - FormatString::new(&string_constant).with_layout( - StringLayout::ImplicitConcatenatedStringInBinaryLike, - ), + // Call `FormatStringContinuation` directly to avoid formatting + // the implicitly concatenated string with the enclosing group + // because the group is added by the binary like formatting. + FormatStringContinuation::new(&string_constant), trailing_comments(comments.trailing(&string_constant)), operand.trailing_binary_comments().map(trailing_comments), line_suffix_boundary(), @@ -412,9 +414,10 @@ impl Format> for BinaryLike<'_> { f, [ leading_comments(comments.leading(&string_constant)), - FormatString::new(&string_constant).with_layout( - StringLayout::ImplicitConcatenatedStringInBinaryLike - ), + // Call `FormatStringContinuation` directly to avoid formatting + // the implicitly concatenated string with the enclosing group + // because the group is added by the binary like formatting. + FormatStringContinuation::new(&string_constant), trailing_comments(comments.trailing(&string_constant)), ] )?; @@ -719,7 +722,7 @@ impl Format> for FlatBinaryExpressionSlice<'_> { { hard_line_break().fmt(f)?; } else if is_pow { - if f.context().options().preview().is_enabled() { + if is_fix_power_op_line_length_enabled(f.context()) { in_parentheses_only_if_group_breaks(&space()).fmt(f)?; } } else { diff --git a/crates/ruff_python_formatter/src/expression/expr_bytes_literal.rs b/crates/ruff_python_formatter/src/expression/expr_bytes_literal.rs index 2fc0cd474c..4869a2d536 100644 --- a/crates/ruff_python_formatter/src/expression/expr_bytes_literal.rs +++ b/crates/ruff_python_formatter/src/expression/expr_bytes_literal.rs @@ -3,16 +3,24 @@ use ruff_python_ast::ExprBytesLiteral; use crate::comments::SourceComment; use crate::expression::expr_string_literal::is_multiline_string; -use crate::expression::parentheses::{NeedsParentheses, OptionalParentheses}; -use crate::expression::string::{AnyString, FormatString}; +use crate::expression::parentheses::{ + in_parentheses_only_group, NeedsParentheses, OptionalParentheses, +}; use crate::prelude::*; +use crate::string::{AnyString, FormatStringContinuation}; #[derive(Default)] pub struct FormatExprBytesLiteral; impl FormatNodeRule for FormatExprBytesLiteral { fn fmt_fields(&self, item: &ExprBytesLiteral, f: &mut PyFormatter) -> FormatResult<()> { - FormatString::new(&AnyString::Bytes(item)).fmt(f) + let ExprBytesLiteral { value, .. } = item; + + match value.as_slice() { + [bytes_literal] => bytes_literal.format().fmt(f), + _ => in_parentheses_only_group(&FormatStringContinuation::new(&AnyString::Bytes(item))) + .fmt(f), + } } fn fmt_dangling_comments( diff --git a/crates/ruff_python_formatter/src/expression/expr_f_string.rs b/crates/ruff_python_formatter/src/expression/expr_f_string.rs index 12e112ecc1..8a8ac81d35 100644 --- a/crates/ruff_python_formatter/src/expression/expr_f_string.rs +++ b/crates/ruff_python_formatter/src/expression/expr_f_string.rs @@ -1,21 +1,35 @@ use memchr::memchr2; +use ruff_python_ast::{AnyNodeRef, ExprFString}; +use ruff_source_file::Locator; +use ruff_text_size::Ranged; + use crate::comments::SourceComment; -use ruff_formatter::FormatResult; -use ruff_python_ast::AnyNodeRef; -use ruff_python_ast::ExprFString; - -use crate::expression::parentheses::{NeedsParentheses, OptionalParentheses}; +use crate::expression::parentheses::{ + in_parentheses_only_group, NeedsParentheses, OptionalParentheses, +}; +use crate::other::f_string_part::FormatFStringPart; use crate::prelude::*; - -use super::string::{AnyString, FormatString}; +use crate::string::{AnyString, FormatStringContinuation, Quoting}; #[derive(Default)] pub struct FormatExprFString; impl FormatNodeRule for FormatExprFString { fn fmt_fields(&self, item: &ExprFString, f: &mut PyFormatter) -> FormatResult<()> { - FormatString::new(&AnyString::FString(item)).fmt(f) + let ExprFString { value, .. } = item; + + match value.as_slice() { + [f_string_part] => FormatFStringPart::new( + f_string_part, + f_string_quoting(item, &f.context().locator()), + ) + .fmt(f), + _ => { + in_parentheses_only_group(&FormatStringContinuation::new(&AnyString::FString(item))) + .fmt(f) + } + } } fn fmt_dangling_comments( @@ -43,3 +57,28 @@ impl NeedsParentheses for ExprFString { } } } + +pub(crate) fn f_string_quoting(f_string: &ExprFString, locator: &Locator) -> Quoting { + let unprefixed = locator + .slice(f_string.range()) + .trim_start_matches(|c| c != '"' && c != '\''); + let triple_quoted = unprefixed.starts_with(r#"""""#) || unprefixed.starts_with(r"'''"); + + if f_string + .value + .elements() + .filter_map(|element| element.as_expression()) + .any(|expression| { + let string_content = locator.slice(expression.range()); + if triple_quoted { + string_content.contains(r#"""""#) || string_content.contains("'''") + } else { + string_content.contains(['"', '\'']) + } + }) + { + Quoting::Preserve + } else { + Quoting::CanChange + } +} diff --git a/crates/ruff_python_formatter/src/expression/expr_formatted_value.rs b/crates/ruff_python_formatter/src/expression/expr_formatted_value.rs deleted file mode 100644 index a1939891a3..0000000000 --- a/crates/ruff_python_formatter/src/expression/expr_formatted_value.rs +++ /dev/null @@ -1,24 +0,0 @@ -use ruff_python_ast::AnyNodeRef; -use ruff_python_ast::ExprFormattedValue; - -use crate::expression::parentheses::{NeedsParentheses, OptionalParentheses}; -use crate::prelude::*; - -#[derive(Default)] -pub struct FormatExprFormattedValue; - -impl FormatNodeRule for FormatExprFormattedValue { - fn fmt_fields(&self, _item: &ExprFormattedValue, _f: &mut PyFormatter) -> FormatResult<()> { - unreachable!("Handled inside of `FormatExprFString"); - } -} - -impl NeedsParentheses for ExprFormattedValue { - fn needs_parentheses( - &self, - _parent: AnyNodeRef, - _context: &PyFormatContext, - ) -> OptionalParentheses { - OptionalParentheses::Multiline - } -} diff --git a/crates/ruff_python_formatter/src/expression/expr_string_literal.rs b/crates/ruff_python_formatter/src/expression/expr_string_literal.rs index 199fb740ef..442081886d 100644 --- a/crates/ruff_python_formatter/src/expression/expr_string_literal.rs +++ b/crates/ruff_python_formatter/src/expression/expr_string_literal.rs @@ -1,34 +1,66 @@ use ruff_formatter::FormatRuleWithOptions; -use ruff_python_ast::AnyNodeRef; -use ruff_python_ast::ExprStringLiteral; +use ruff_python_ast::{AnyNodeRef, ExprStringLiteral}; use ruff_text_size::{Ranged, TextLen, TextRange}; use crate::comments::SourceComment; -use crate::expression::parentheses::{NeedsParentheses, OptionalParentheses}; -use crate::expression::string::{ - AnyString, FormatString, StringLayout, StringPrefix, StringQuotes, +use crate::expression::parentheses::{ + in_parentheses_only_group, NeedsParentheses, OptionalParentheses, }; +use crate::other::string_literal::{FormatStringLiteral, StringLiteralKind}; use crate::prelude::*; +use crate::string::{AnyString, FormatStringContinuation, StringPrefix, StringQuotes}; #[derive(Default)] pub struct FormatExprStringLiteral { - layout: StringLayout, + kind: ExprStringLiteralKind, +} + +#[derive(Default, Copy, Clone, Debug)] +pub enum ExprStringLiteralKind { + #[default] + String, + Docstring, +} + +impl ExprStringLiteralKind { + const fn string_literal_kind(self) -> StringLiteralKind { + match self { + ExprStringLiteralKind::String => StringLiteralKind::String, + ExprStringLiteralKind::Docstring => StringLiteralKind::Docstring, + } + } + + const fn is_docstring(self) -> bool { + matches!(self, ExprStringLiteralKind::Docstring) + } } impl FormatRuleWithOptions> for FormatExprStringLiteral { - type Options = StringLayout; + type Options = ExprStringLiteralKind; fn with_options(mut self, options: Self::Options) -> Self { - self.layout = options; + self.kind = options; self } } impl FormatNodeRule for FormatExprStringLiteral { fn fmt_fields(&self, item: &ExprStringLiteral, f: &mut PyFormatter) -> FormatResult<()> { - FormatString::new(&AnyString::String(item)) - .with_layout(self.layout) - .fmt(f) + let ExprStringLiteral { value, .. } = item; + + match value.as_slice() { + [string_literal] => { + FormatStringLiteral::new(string_literal, self.kind.string_literal_kind()).fmt(f) + } + _ => { + // This is just a sanity check because [`DocstringStmt::try_from_statement`] + // ensures that the docstring is a *single* string literal. + assert!(!self.kind.is_docstring()); + + in_parentheses_only_group(&FormatStringContinuation::new(&AnyString::String(item))) + } + .fmt(f), + } } fn fmt_dangling_comments( diff --git a/crates/ruff_python_formatter/src/expression/mod.rs b/crates/ruff_python_formatter/src/expression/mod.rs index b9dc9e8520..7d858694c0 100644 --- a/crates/ruff_python_formatter/src/expression/mod.rs +++ b/crates/ruff_python_formatter/src/expression/mod.rs @@ -12,9 +12,7 @@ use ruff_python_trivia::CommentRanges; use ruff_text_size::Ranged; use crate::builders::parenthesize_if_expands; -use crate::comments::{ - leading_comments, trailing_comments, LeadingDanglingTrailingComments, SourceComment, -}; +use crate::comments::{leading_comments, trailing_comments, LeadingDanglingTrailingComments}; use crate::context::{NodeLevel, WithNodeLevel}; use crate::expression::expr_generator_exp::is_generator_parenthesized; use crate::expression::expr_tuple::is_tuple_parenthesized; @@ -23,7 +21,7 @@ use crate::expression::parentheses::{ OptionalParentheses, Parentheses, Parenthesize, }; use crate::prelude::*; -use crate::PyFormatOptions; +use crate::preview::is_hug_parens_with_braces_and_square_brackets_enabled; mod binary_like; pub(crate) mod expr_attribute; @@ -38,7 +36,6 @@ pub(crate) mod expr_dict; pub(crate) mod expr_dict_comp; pub(crate) mod expr_ellipsis_literal; pub(crate) mod expr_f_string; -pub(crate) mod expr_formatted_value; pub(crate) mod expr_generator_exp; pub(crate) mod expr_if_exp; pub(crate) mod expr_ipy_escape_command; @@ -61,7 +58,6 @@ pub(crate) mod expr_yield; pub(crate) mod expr_yield_from; mod operator; pub(crate) mod parentheses; -pub(crate) mod string; #[derive(Copy, Clone, PartialEq, Eq, Default)] pub struct FormatExpr { @@ -99,7 +95,6 @@ impl FormatRule> for FormatExpr { Expr::YieldFrom(expr) => expr.format().fmt(f), Expr::Compare(expr) => expr.format().fmt(f), Expr::Call(expr) => expr.format().fmt(f), - Expr::FormattedValue(expr) => expr.format().fmt(f), Expr::FString(expr) => expr.format().fmt(f), Expr::StringLiteral(expr) => expr.format().fmt(f), Expr::BytesLiteral(expr) => expr.format().fmt(f), @@ -131,7 +126,7 @@ impl FormatRule> for FormatExpr { let node_comments = comments.leading_dangling_trailing(expression); if !node_comments.has_leading() && !node_comments.has_trailing() { parenthesized("(", &format_expr, ")") - .with_indent(!is_expression_huggable(expression, f.options())) + .with_indent(!is_expression_huggable(expression, f.context())) .fmt(f) } else { format_with_parentheses_comments(expression, &node_comments, f) @@ -288,7 +283,6 @@ fn format_with_parentheses_comments( Expr::YieldFrom(expr) => FormatNodeRule::fmt_fields(expr.format().rule(), expr, f), Expr::Compare(expr) => FormatNodeRule::fmt_fields(expr.format().rule(), expr, f), Expr::Call(expr) => FormatNodeRule::fmt_fields(expr.format().rule(), expr, f), - Expr::FormattedValue(expr) => FormatNodeRule::fmt_fields(expr.format().rule(), expr, f), Expr::FString(expr) => FormatNodeRule::fmt_fields(expr.format().rule(), expr, f), Expr::StringLiteral(expr) => FormatNodeRule::fmt_fields(expr.format().rule(), expr, f), Expr::BytesLiteral(expr) => FormatNodeRule::fmt_fields(expr.format().rule(), expr, f), @@ -434,113 +428,23 @@ impl Format> for MaybeParenthesizeExpression<'_> { } Parenthesize::IfBreaks => { - // Is the expression the last token in the parent statement. - // Excludes `await` and `yield` for which Black doesn't seem to apply the layout? - let last_expression = parent.is_stmt_assign() - || parent.is_stmt_ann_assign() - || parent.is_stmt_aug_assign() - || parent.is_stmt_return(); - - // Format the statements and value's trailing end of line comments: - // * after the expression if the expression needs no parentheses (necessary or the `expand_parent` makes the group never fit). - // * inside the parentheses if the expression exceeds the line-width. - // - // ```python - // a = long # with_comment - // b = ( - // short # with_comment - // ) - // - // # formatted - // a = ( - // long # with comment - // ) - // b = short # with comment - // ``` - // This matches Black's formatting with the exception that ruff applies this style also for - // attribute chains and non-fluent call expressions. See https://github.com/psf/black/issues/4001#issuecomment-1786681792 - // - // This logic isn't implemented in [`place_comment`] by associating trailing statement comments to the expression because - // doing so breaks the suite empty lines formatting that relies on trailing comments to be stored on the statement. - let (inline_comments, expression_trailing_comments) = if last_expression - && !( - // Ignore non-fluent attribute chains for black compatibility. - // See https://github.com/psf/black/issues/4001#issuecomment-1786681792 - expression.is_attribute_expr() - || expression.is_call_expr() - || expression.is_yield_from_expr() - || expression.is_yield_expr() - || expression.is_await_expr() - ) { - let parent_trailing_comments = comments.trailing(*parent); - let after_end_of_line = parent_trailing_comments - .partition_point(|comment| comment.line_position().is_end_of_line()); - let (stmt_inline_comments, _) = - parent_trailing_comments.split_at(after_end_of_line); - - let after_end_of_line = node_comments - .trailing - .partition_point(|comment| comment.line_position().is_end_of_line()); - - let (expression_inline_comments, expression_trailing_comments) = - node_comments.trailing.split_at(after_end_of_line); - - ( - OptionalParenthesesInlinedComments { - expression: expression_inline_comments, - statement: stmt_inline_comments, - }, - expression_trailing_comments, - ) + if node_comments.has_trailing() { + expression.format().with_options(Parentheses::Always).fmt(f) } else { - ( - OptionalParenthesesInlinedComments::default(), - node_comments.trailing, - ) - }; - - if expression_trailing_comments.is_empty() { // The group id is necessary because the nested expressions may reference it. let group_id = f.group_id("optional_parentheses"); let f = &mut WithNodeLevel::new(NodeLevel::Expression(Some(group_id)), f); - best_fit_parenthesize(&format_with(|f| { - inline_comments.mark_formatted(); - - expression - .format() - .with_options(Parentheses::Never) - .fmt(f)?; - - if !inline_comments.is_empty() { - // If the expressions exceeds the line width, format the comments in the parentheses - if_group_breaks(&inline_comments) - .with_group_id(Some(group_id)) - .fmt(f)?; - } - - Ok(()) - })) - .with_group_id(Some(group_id)) - .fmt(f)?; - - if !inline_comments.is_empty() { - // If the line fits into the line width, format the comments after the parenthesized expression - if_group_fits_on_line(&inline_comments) - .with_group_id(Some(group_id)) - .fmt(f)?; - } - - Ok(()) - } else { - expression.format().with_options(Parentheses::Always).fmt(f) + best_fit_parenthesize(&expression.format().with_options(Parentheses::Never)) + .with_group_id(Some(group_id)) + .fmt(f) } } }, OptionalParentheses::Never => match parenthesize { Parenthesize::IfBreaksOrIfRequired => { parenthesize_if_expands(&expression.format().with_options(Parentheses::Never)) - .with_indent(!is_expression_huggable(expression, f.options())) + .with_indent(!is_expression_huggable(expression, f.context())) .fmt(f) } @@ -580,7 +484,6 @@ impl NeedsParentheses for Expr { Expr::YieldFrom(expr) => expr.needs_parentheses(parent, context), Expr::Compare(expr) => expr.needs_parentheses(parent, context), Expr::Call(expr) => expr.needs_parentheses(parent, context), - Expr::FormattedValue(expr) => expr.needs_parentheses(parent, context), Expr::FString(expr) => expr.needs_parentheses(parent, context), Expr::StringLiteral(expr) => expr.needs_parentheses(parent, context), Expr::BytesLiteral(expr) => expr.needs_parentheses(parent, context), @@ -625,19 +528,63 @@ impl<'ast> IntoFormat> for Expr { /// * The expression contains at least one parenthesized sub expression (optimization to avoid unnecessary work) /// /// This mimics Black's [`_maybe_split_omitting_optional_parens`](https://github.com/psf/black/blob/d1248ca9beaf0ba526d265f4108836d89cf551b7/src/black/linegen.py#L746-L820) +#[allow(clippy::if_same_then_else)] fn can_omit_optional_parentheses(expr: &Expr, context: &PyFormatContext) -> bool { let mut visitor = CanOmitOptionalParenthesesVisitor::new(context); visitor.visit_subexpression(expr); - if visitor.max_precedence == OperatorPrecedence::None { - true - } else if visitor.max_precedence_count > 1 { - false - } else if visitor.max_precedence == OperatorPrecedence::Attribute { - true - } else if !visitor.any_parenthesized_expressions { + if !visitor.any_parenthesized_expressions { // Only use the more complex IR when there is any expression that we can possibly split by false + } else if visitor.max_precedence_count > 1 { + false + } else if visitor.max_precedence == OperatorPrecedence::None && expr.is_lambda_expr() { + // Micha: This seems to exclusively apply for lambda expressions where the body ends in a subscript. + // Subscripts are excluded by default because breaking them looks odd, but it seems to be fine for lambda expression. + // + // ```python + // mapper = lambda x: dict_with_default[ + // np.nan if isinstance(x, float) and np.isnan(x) else x + // ] + // ``` + // + // to prevent that it gets formatted as: + // + // ```python + // mapper = ( + // lambda x: dict_with_default[ + // np.nan if isinstance(x, float) and np.isnan(x) else x + // ] + // ) + // ``` + // I think we should remove this check in the future and instead parenthesize the body of the lambda expression: + // + // ```python + // mapper = lambda x: ( + // dict_with_default[ + // np.nan if isinstance(x, float) and np.isnan(x) else x + // ] + // ) + // ``` + true + } else if visitor.max_precedence == OperatorPrecedence::Attribute + && (expr.is_lambda_expr() || expr.is_named_expr_expr()) + { + // A single method call inside a named expression (`:=`) or as the body of a lambda function: + // ```python + // kwargs["open_with"] = lambda path, _: fsspec.open( + // path, "wb", **(storage_options or {}) + // ).open() + // + // if ret := subprocess.run( + // ["git", "rev-parse", "--short", "HEAD"], + // cwd=package_dir, + // capture_output=True, + // encoding="ascii", + // errors="surrogateescape", + // ).stdout: + // ``` + true } else { fn is_parenthesized(expr: &Expr, context: &PyFormatContext) -> bool { // Don't break subscripts except in parenthesized context. It looks weird. @@ -648,17 +595,13 @@ fn can_omit_optional_parentheses(expr: &Expr, context: &PyFormatContext) -> bool // Only use the layout if the first expression starts with parentheses // or the last expression ends with parentheses of some sort, and // those parentheses are non-empty. - if visitor + visitor .last .is_some_and(|last| is_parenthesized(last, context)) - { - true - } else { - visitor + || visitor .first .expression() .is_some_and(|first| is_parenthesized(first, context)) - } } } @@ -785,16 +728,6 @@ impl<'input> CanOmitOptionalParenthesesVisitor<'input> { // Don't walk the slice, because the slice is always parenthesized. return; } - Expr::UnaryOp(ast::ExprUnaryOp { - range: _, - op, - operand: _, - }) => { - if op.is_invert() { - self.update_max_precedence(OperatorPrecedence::BitwiseInversion); - } - self.first.set_if_none(First::Token); - } // `[a, b].test.test[300].dot` Expr::Attribute(ast::ExprAttribute { @@ -823,10 +756,26 @@ impl<'input> CanOmitOptionalParenthesesVisitor<'input> { } Expr::FString(ast::ExprFString { value, .. }) if value.is_implicit_concatenated() => { self.update_max_precedence(OperatorPrecedence::String); + return; } + // Non terminal nodes that don't have a termination token. + Expr::NamedExpr(_) | Expr::GeneratorExp(_) | Expr::Tuple(_) => {} + // Expressions with sub expressions but a preceding token // Mark this expression as first expression and not the sub expression. + // Visit the sub-expressions because the sub expressions may be the end of the entire expression. + Expr::UnaryOp(ast::ExprUnaryOp { + range: _, + op, + operand: _, + }) => { + if op.is_invert() { + self.update_max_precedence(OperatorPrecedence::BitwiseInversion); + } + self.first.set_if_none(First::Token); + } + Expr::Lambda(_) | Expr::Await(_) | Expr::Yield(_) @@ -835,11 +784,8 @@ impl<'input> CanOmitOptionalParenthesesVisitor<'input> { self.first.set_if_none(First::Token); } - Expr::Tuple(_) - | Expr::NamedExpr(_) - | Expr::GeneratorExp(_) - | Expr::FormattedValue(_) - | Expr::FString(_) + // Terminal nodes or nodes that wrap a sub-expression (where the sub expression can never be at the end). + Expr::FString(_) | Expr::StringLiteral(_) | Expr::BytesLiteral(_) | Expr::NumberLiteral(_) @@ -848,7 +794,9 @@ impl<'input> CanOmitOptionalParenthesesVisitor<'input> { | Expr::EllipsisLiteral(_) | Expr::Name(_) | Expr::Slice(_) - | Expr::IpyEscapeCommand(_) => {} + | Expr::IpyEscapeCommand(_) => { + return; + } }; walk_expr(self, expr); @@ -1049,7 +997,7 @@ impl OwnParentheses { /// Differs from [`has_own_parentheses`] in that it returns [`OwnParentheses::NonEmpty`] for /// parenthesized expressions, like `(1)` or `([1])`, regardless of whether those expression have /// their _own_ parentheses. -fn has_parentheses(expr: &Expr, context: &PyFormatContext) -> Option { +pub(crate) fn has_parentheses(expr: &Expr, context: &PyFormatContext) -> Option { let own_parentheses = has_own_parentheses(expr, context); // If the node has its own non-empty parentheses, we don't need to check for surrounding @@ -1153,8 +1101,8 @@ pub(crate) fn has_own_parentheses( /// ] /// ) /// ``` -pub(crate) fn is_expression_huggable(expr: &Expr, options: &PyFormatOptions) -> bool { - if !options.preview().is_enabled() { +pub(crate) fn is_expression_huggable(expr: &Expr, context: &PyFormatContext) -> bool { + if !is_hug_parens_with_braces_and_square_brackets_enabled(context) { return false; } @@ -1190,7 +1138,6 @@ pub(crate) fn is_expression_huggable(expr: &Expr, options: &PyFormatOptions) -> | Expr::YieldFrom(_) | Expr::Compare(_) | Expr::Call(_) - | Expr::FormattedValue(_) | Expr::FString(_) | Expr::Attribute(_) | Expr::Subscript(_) @@ -1248,41 +1195,3 @@ impl From for OperatorPrecedence { } } } - -#[derive(Debug, Default)] -struct OptionalParenthesesInlinedComments<'a> { - expression: &'a [SourceComment], - statement: &'a [SourceComment], -} - -impl<'a> OptionalParenthesesInlinedComments<'a> { - fn is_empty(&self) -> bool { - self.expression.is_empty() && self.statement.is_empty() - } - - fn iter_comments(&self) -> impl Iterator { - self.expression.iter().chain(self.statement) - } - - fn mark_formatted(&self) { - for comment in self.iter_comments() { - comment.mark_formatted(); - } - } -} - -impl Format> for OptionalParenthesesInlinedComments<'_> { - fn fmt(&self, f: &mut Formatter>) -> FormatResult<()> { - for comment in self.iter_comments() { - comment.mark_unformatted(); - } - - write!( - f, - [ - trailing_comments(self.expression), - trailing_comments(self.statement) - ] - ) - } -} diff --git a/crates/ruff_python_formatter/src/expression/string/docstring.rs b/crates/ruff_python_formatter/src/expression/string/docstring.rs deleted file mode 100644 index bed24c3419..0000000000 --- a/crates/ruff_python_formatter/src/expression/string/docstring.rs +++ /dev/null @@ -1,871 +0,0 @@ -use std::borrow::Cow; - -use ruff_python_trivia::PythonWhitespace; -use { - ruff_formatter::{write, Printed}, - ruff_source_file::Locator, - ruff_text_size::{Ranged, TextLen, TextRange, TextSize}, -}; - -use crate::{prelude::*, FormatModuleError, QuoteStyle}; - -use super::NormalizedString; - -/// Format a docstring by trimming whitespace and adjusting the indentation. -/// -/// Summary of changes we make: -/// * Normalize the string like all other strings -/// * Ignore docstring that have an escaped newline -/// * Trim all trailing whitespace, except for a chaperone space that avoids quotes or backslashes -/// in the last line. -/// * Trim leading whitespace on the first line, again except for a chaperone space -/// * If there is only content in the first line and after that only whitespace, collapse the -/// docstring into one line -/// * Adjust the indentation (see below) -/// -/// # Docstring indentation -/// -/// Unlike any other string, like black we change the indentation of docstring lines. -/// -/// We want to preserve the indentation inside the docstring relative to the suite statement/block -/// indent that the docstring statement is in, but also want to apply the change of the outer -/// indentation in the docstring, e.g. -/// ```python -/// def sparkle_sky(): -/// """Make a pretty sparkly sky. -/// * * ✨ *. . -/// * * ✨ . -/// . * . ✨ * . . -/// """ -/// ``` -/// should become -/// ```python -/// def sparkle_sky(): -/// """Make a pretty sparkly sky. -/// * * ✨ *. . -/// * * ✨ . -/// . * . ✨ * . . -/// """ -/// ``` -/// We can't compute the full indentation here since we don't know what the block indent of -/// the doc comment will be yet and which we can only have added by formatting each line -/// separately with a hard line break. This means we need to strip shared indentation from -/// docstring while preserving the in-docstring bigger-than-suite-statement indentation. Example: -/// ```python -/// def f(): -/// """first line -/// line a -/// line b -/// """ -/// ``` -/// The docstring indentation is 2, the block indents will change this to 4 (but we can't -/// determine this at this point). The indentation of line a is 2, so we trim ` line a` -/// to `line a`. For line b it's 5, so we trim it to `line b` and pad with 5-2=3 spaces to -/// ` line b`. The closing quotes, being on their own line, are stripped get only the -/// default indentation. Fully formatted: -/// ```python -/// def f(): -/// """first line -/// line a -/// line b -/// """ -/// ``` -/// -/// Tabs are counted by padding them to the next multiple of 8 according to -/// [`str.expandtabs`](https://docs.python.org/3/library/stdtypes.html#str.expandtabs). When -/// we see indentation that contains a tab or any other none ascii-space whitespace we rewrite the -/// string. -/// -/// Additionally, if any line in the docstring has less indentation than the docstring -/// (effectively a negative indentation wrt. to the current level), we pad all lines to the -/// level of the docstring with spaces. -/// ```python -/// def f(): -/// """first line -/// line a -/// line b -/// line c -/// """ -/// ``` -/// Here line a is 3 columns negatively indented, so we pad all lines by an extra 3 spaces: -/// ```python -/// def f(): -/// """first line -/// line a -/// line b -/// line c -/// """ -/// ``` -pub(super) fn format(normalized: &NormalizedString, f: &mut PyFormatter) -> FormatResult<()> { - let docstring = &normalized.text; - - // Black doesn't change the indentation of docstrings that contain an escaped newline - if contains_unescaped_newline(docstring) { - return normalized.fmt(f); - } - - // is_borrowed is unstable :/ - let already_normalized = matches!(docstring, Cow::Borrowed(_)); - - let mut lines = docstring.lines().peekable(); - - // Start the string - write!( - f, - [ - normalized.prefix, - normalized.quotes, - source_position(normalized.start()), - ] - )?; - // We track where in the source docstring we are (in source code byte offsets) - let mut offset = normalized.start(); - - // The first line directly after the opening quotes has different rules than the rest, mainly - // that we remove all leading whitespace as there's no indentation - let first = lines.next().unwrap_or_default(); - // Black trims whitespace using [`str.strip()`](https://docs.python.org/3/library/stdtypes.html#str.strip) - // https://github.com/psf/black/blob/b4dca26c7d93f930bbd5a7b552807370b60d4298/src/black/strings.py#L77-L85 - // So we use the unicode whitespace definition through `trim_{start,end}` instead of the python - // tokenizer whitespace definition in `trim_whitespace_{start,end}`. - let trim_end = first.trim_end(); - let trim_both = trim_end.trim_start(); - - // Edge case: The first line is `""" "content`, so we need to insert chaperone space that keep - // inner quotes and closing quotes from getting to close to avoid `""""content` - if trim_both.starts_with(normalized.quotes.style.as_char()) { - space().fmt(f)?; - } - - if !trim_end.is_empty() { - // For the first line of the docstring we strip the leading and trailing whitespace, e.g. - // `""" content ` to `"""content` - let leading_whitespace = trim_end.text_len() - trim_both.text_len(); - let trimmed_line_range = - TextRange::at(offset, trim_end.text_len()).add_start(leading_whitespace); - if already_normalized { - source_text_slice(trimmed_line_range).fmt(f)?; - } else { - text(trim_both, Some(trimmed_line_range.start())).fmt(f)?; - } - } - offset += first.text_len(); - - // Check if we have a single line (or empty) docstring - if docstring[first.len()..].trim().is_empty() { - // For `"""\n"""` or other whitespace between the quotes, black keeps a single whitespace, - // but `""""""` doesn't get one inserted. - if needs_chaperone_space(normalized, trim_end) - || (trim_end.is_empty() && !docstring.is_empty()) - { - space().fmt(f)?; - } - normalized.quotes.fmt(f)?; - return Ok(()); - } - - hard_line_break().fmt(f)?; - // We know that the normalized string has \n line endings - offset += "\n".text_len(); - - // If some line of the docstring is less indented than the function body, we pad all lines to - // align it with the docstring statement. Conversely, if all lines are over-indented, we strip - // the extra indentation. We call this stripped indentation since it's relative to the block - // indent printer-made indentation. - let stripped_indentation_length = lines - .clone() - // We don't want to count whitespace-only lines as miss-indented - .filter(|line| !line.trim().is_empty()) - .map(indentation_length) - .min() - .unwrap_or_default(); - - DocstringLinePrinter { - f, - offset, - stripped_indentation_length, - already_normalized, - quote_style: normalized.quotes.style, - code_example: CodeExample::default(), - } - .add_iter(lines)?; - - // Same special case in the last line as for the first line - let trim_end = docstring - .as_ref() - .trim_end_matches(|c: char| c.is_whitespace() && c != '\n'); - if needs_chaperone_space(normalized, trim_end) { - space().fmt(f)?; - } - - write!(f, [source_position(normalized.end()), normalized.quotes]) -} - -fn contains_unescaped_newline(haystack: &str) -> bool { - let mut rest = haystack; - - while let Some(index) = memchr::memchr(b'\\', rest.as_bytes()) { - rest = &rest[index + 1..].trim_whitespace_start(); - - if rest.starts_with('\n') { - return true; - } - } - - false -} - -/// An abstraction for printing each line of a docstring. -struct DocstringLinePrinter<'ast, 'buf, 'fmt, 'src> { - f: &'fmt mut PyFormatter<'ast, 'buf>, - /// The source offset of the beginning of the line that is currently being - /// printed. - offset: TextSize, - /// Indentation alignment based on the least indented line in the - /// docstring. - stripped_indentation_length: TextSize, - /// Whether the docstring is overall already considered normalized. When it - /// is, the formatter can take a fast path. - already_normalized: bool, - /// The quote style used by the docstring being printed. - quote_style: QuoteStyle, - /// The current code example detected in the docstring. - code_example: CodeExample<'src>, -} - -impl<'ast, 'buf, 'fmt, 'src> DocstringLinePrinter<'ast, 'buf, 'fmt, 'src> { - /// Print all of the lines in the given iterator to this - /// printer's formatter. - /// - /// Note that callers may treat the first line specially, such that the - /// iterator given contains all lines except for the first. - fn add_iter( - &mut self, - mut lines: std::iter::Peekable>, - ) -> FormatResult<()> { - while let Some(line) = lines.next() { - let line = InputDocstringLine { - line, - offset: self.offset, - next: lines.peek().copied(), - }; - // We know that the normalized string has \n line endings. - self.offset += line.line.text_len() + "\n".text_len(); - self.add_one(line)?; - } - Ok(()) - } - - /// Adds the given line to this printer. - /// - /// Depending on what's in the line, this may or may not print the line - /// immediately to the underlying buffer. If the line starts or is part - /// of an existing code snippet, then the lines will get buffered until - /// the code snippet is complete. - fn add_one(&mut self, line: InputDocstringLine<'src>) -> FormatResult<()> { - // Just pass through the line as-is without looking for a code snippet - // when docstring code formatting is disabled. And also when we are - // formatting a code snippet so as to avoid arbitrarily nested code - // snippet formatting. We avoid this because it's likely quite tricky - // to get right 100% of the time, although perhaps not impossible. It's - // not clear that it's worth the effort to support. - if !self.f.options().docstring_code().is_enabled() || self.f.context().docstring().is_some() - { - return self.print_one(&line.as_output()); - } - match self.code_example.add(line) { - CodeExampleAddAction::Print { original } => self.print_one(&original.as_output())?, - CodeExampleAddAction::Kept => {} - CodeExampleAddAction::Reset { code, original } => { - for codeline in code { - self.print_one(&codeline.original.as_output())?; - } - self.print_one(&original.as_output())?; - } - CodeExampleAddAction::Format { mut kind, original } => { - let Some(formatted_lines) = self.format(kind.code())? else { - // If formatting failed in a way that should not be - // allowed, we back out what we're doing and print the - // original lines we found as-is as if we did nothing. - for codeline in kind.code() { - self.print_one(&codeline.original.as_output())?; - } - if let Some(original) = original { - self.print_one(&original.as_output())?; - } - return Ok(()); - }; - - self.already_normalized = false; - match kind { - CodeExampleKind::Doctest(CodeExampleDoctest { ps1_indent, .. }) => { - let mut lines = formatted_lines.into_iter(); - if let Some(first) = lines.next() { - self.print_one( - &first.map(|line| std::format!("{ps1_indent}>>> {line}")), - )?; - for docline in lines { - self.print_one( - &docline.map(|line| std::format!("{ps1_indent}... {line}")), - )?; - } - } - } - } - if let Some(original) = original { - self.print_one(&original.as_output())?; - } - } - } - Ok(()) - } - - /// Prints the single line given. - /// - /// This mostly just handles indentation and ensuring line breaks are - /// inserted as appropriate before passing it on to the formatter to - /// print to the buffer. - fn print_one(&mut self, line: &OutputDocstringLine<'_>) -> FormatResult<()> { - let trim_end = line.line.trim_end(); - if trim_end.is_empty() { - return if line.is_last { - // If the doc string ends with ` """`, the last line is - // ` `, but we don't want to insert an empty line (but close - // the docstring). - Ok(()) - } else { - empty_line().fmt(self.f) - }; - } - - let tab_or_non_ascii_space = trim_end - .chars() - .take_while(|c| c.is_whitespace()) - .any(|c| c != ' '); - - if tab_or_non_ascii_space { - // We strip the indentation that is shared with the docstring - // statement, unless a line was indented less than the docstring - // statement, in which case we strip only this much indentation to - // implicitly pad all lines by the difference, or all lines were - // overindented, in which case we strip the additional whitespace - // (see example in [`format_docstring`] doc comment). We then - // prepend the in-docstring indentation to the string. - let indent_len = indentation_length(trim_end) - self.stripped_indentation_length; - let in_docstring_indent = " ".repeat(usize::from(indent_len)) + trim_end.trim_start(); - text(&in_docstring_indent, Some(line.offset)).fmt(self.f)?; - } else { - // Take the string with the trailing whitespace removed, then also - // skip the leading whitespace. - let trimmed_line_range = TextRange::at(line.offset, trim_end.text_len()) - .add_start(self.stripped_indentation_length); - if self.already_normalized { - source_text_slice(trimmed_line_range).fmt(self.f)?; - } else { - // All indents are ascii spaces, so the slicing is correct. - text( - &trim_end[usize::from(self.stripped_indentation_length)..], - Some(trimmed_line_range.start()), - ) - .fmt(self.f)?; - } - } - - // We handled the case that the closing quotes are on their own line - // above (the last line is empty except for whitespace). If they are on - // the same line as content, we don't insert a line break. - if !line.is_last { - hard_line_break().fmt(self.f)?; - } - - Ok(()) - } - - /// Given a sequence of lines from a code snippet, format them and return - /// the formatted code as a sequence of owned docstring lines. - /// - /// This routine generally only returns an error when the recursive call - /// to the formatter itself returns a `FormatError`. In all other cases - /// (for example, if the code snippet is invalid Python or even if the - /// resulting reformatted code snippet is invalid Python), then `Ok(None)` - /// is returned. In this case, callers should assume that a reformatted - /// code snippet is unavailable and bail out of trying to format it. - /// - /// Currently, when the above cases happen and `Ok(None)` is returned, the - /// routine is silent about it. So from the user's perspective, this will - /// fail silently. Ideally, this would at least emit a warning message, - /// but at time of writing, it wasn't clear to me how to best do that. - /// - /// # Panics - /// - /// This panics when the given slice is empty. - fn format( - &mut self, - code: &[CodeExampleLine<'_>], - ) -> FormatResult>>> { - use ruff_python_parser::AsMode; - - let offset = code - .get(0) - .expect("code blob must be non-empty") - .original - .offset; - let last_line_is_last = code - .last() - .expect("code blob must be non-empty") - .original - .is_last(); - let codeblob = code - .iter() - .map(|line| line.code) - .collect::>() - .join("\n"); - let printed = match docstring_format_source(self.f.options(), self.quote_style, &codeblob) { - Ok(printed) => printed, - Err(FormatModuleError::FormatError(err)) => return Err(err), - Err( - FormatModuleError::LexError(_) - | FormatModuleError::ParseError(_) - | FormatModuleError::PrintError(_), - ) => { - return Ok(None); - } - }; - // This is a little hokey, but we want to determine whether the - // reformatted code snippet will lead to an overall invalid docstring. - // So attempt to parse it as Python code, but ensure it is wrapped - // within a docstring using the same quotes as the docstring we're in - // right now. - // - // This is an unfortunate stop-gap to attempt to prevent us from - // writing invalid Python due to some oddity of the code snippet within - // a docstring. As we fix corner cases over time, we can perhaps - // remove this check. See the `doctest_invalid_skipped` tests in - // `docstring_code_examples.py` for when this check is relevant. - let wrapped = match self.quote_style { - QuoteStyle::Single => std::format!("'''{}'''", printed.as_code()), - QuoteStyle::Double => std::format!(r#""""{}""""#, printed.as_code()), - }; - let result = ruff_python_parser::parse( - &wrapped, - self.f.options().source_type().as_mode(), - "", - ); - // If the resulting code is not valid, then reset and pass through - // the docstring lines as-is. - if result.is_err() { - return Ok(None); - } - let mut lines = printed - .as_code() - .lines() - .map(|line| OutputDocstringLine { - line: Cow::Owned(line.to_string()), - offset, - is_last: false, - }) - .collect::>(); - if let Some(last) = lines.last_mut() { - last.is_last = last_line_is_last; - } - Ok(Some(lines)) - } -} - -/// Represents a single line in a docstring. -/// -/// This type is only used to represent the original lines in a docstring. -/// Specifically, the line contained in this type has no changes from the input -/// source. -#[derive(Clone, Copy, Debug)] -struct InputDocstringLine<'src> { - /// The actual text of the line, not including the line terminator. - /// - /// In practice, this line is borrowed when it corresponds to an original - /// unformatted line in a docstring, and owned when it corresponds to a - /// reformatted line (e.g., from a code snippet) in a docstring. - line: &'src str, - /// The offset into the source document which this line corresponds to. - offset: TextSize, - /// For any input line that isn't the last line, this contains a reference - /// to the line immediately following this one. - /// - /// This is `None` if and only if this is the last line in the docstring. - next: Option<&'src str>, -} - -impl<'src> InputDocstringLine<'src> { - /// Borrow this input docstring line as an output docstring line. - fn as_output(&self) -> OutputDocstringLine<'src> { - OutputDocstringLine { - line: Cow::Borrowed(self.line), - offset: self.offset, - is_last: self.is_last(), - } - } - - /// Whether this is the last line in the docstring or not. - fn is_last(&self) -> bool { - self.next.is_none() - } -} - -/// Represents a single reformatted code line in a docstring. -/// -/// An input source line may be cheaply converted to an output source line. -/// This is the common case: an input source line is printed pretty much as it -/// is, with perhaps some whitespace normalization applied. The less common -/// case is that the output docstring line owns its `line` because it was -/// produced by reformatting a code snippet. -#[derive(Clone, Debug)] -struct OutputDocstringLine<'src> { - /// The output line. - /// - /// This is an owned variant in precisely the cases where it corresponds to - /// a line from a reformatted code snippet. In other cases, it is borrowed - /// from the input docstring line as-is. - line: Cow<'src, str>, - /// The offset into the source document which this line corresponds to. - /// Currently, this is an estimate. - offset: TextSize, - /// Whether this is the last line in a docstring or not. This is determined - /// by whether the last line in the code snippet was also the last line in - /// the docstring. If it was, then it follows that the last line in the - /// reformatted code snippet is also the last line in the docstring. - is_last: bool, -} - -impl<'src> OutputDocstringLine<'src> { - /// Return this reformatted line, but with the given function applied to - /// the text of the line. - fn map(self, mut map: impl FnMut(&str) -> String) -> OutputDocstringLine<'static> { - OutputDocstringLine { - line: Cow::Owned(map(&self.line)), - ..self - } - } -} - -/// A single code example extracted from a docstring. -/// -/// This represents an intermediate state from when the code example was first -/// found all the way up until the point at which the code example has finished -/// and is reformatted. -/// -/// Its default state is "empty." That is, that no code example is currently -/// being collected. -#[derive(Debug, Default)] -struct CodeExample<'src> { - /// The kind of code example being collected, or `None` if no code example - /// has been observed. - /// - /// The kind is split out into a separate type so that we can pass it - /// around and have a guarantee that a code example actually exists. - kind: Option>, -} - -impl<'src> CodeExample<'src> { - /// Attempt to add an original line from a docstring to this code example. - /// - /// Based on the line and the internal state of whether a code example is - /// currently being collected or not, this will return an "action" for - /// the caller to perform. The typical case is a "print" action, which - /// instructs the caller to just print the line as though it were not part - /// of a code snippet. - fn add(&mut self, original: InputDocstringLine<'src>) -> CodeExampleAddAction<'src> { - match self.kind.take() { - // There's no existing code example being built, so we look for - // the start of one or otherwise tell the caller we couldn't find - // anything. - None => match self.add_start(original) { - None => CodeExampleAddAction::Kept, - Some(original) => CodeExampleAddAction::Print { original }, - }, - Some(CodeExampleKind::Doctest(mut doctest)) => { - if doctest.add_code_line(original) { - // Stay with the doctest kind while we accumulate all - // PS2 prompts. - self.kind = Some(CodeExampleKind::Doctest(doctest)); - return CodeExampleAddAction::Kept; - } - let original = self.add_start(original); - CodeExampleAddAction::Format { - kind: CodeExampleKind::Doctest(doctest), - original, - } - } - } - } - - /// Looks for the start of a code example. If one was found, then the given - /// line is kept and added as part of the code example. Otherwise, the line - /// is returned unchanged and no code example was found. - /// - /// # Panics - /// - /// This panics when the existing code-example is any non-None value. That - /// is, this routine assumes that there is no ongoing code example being - /// collected and looks for the beginning of another code example. - fn add_start( - &mut self, - original: InputDocstringLine<'src>, - ) -> Option> { - assert!(self.kind.is_none(), "expected no existing code example"); - if let Some(doctest) = CodeExampleDoctest::new(original) { - self.kind = Some(CodeExampleKind::Doctest(doctest)); - return None; - } - Some(original) - } -} - -/// The kind of code example observed in a docstring. -#[derive(Debug)] -enum CodeExampleKind<'src> { - /// Code found in Python "doctests." - /// - /// Documentation describing doctests and how they're recognized can be - /// found as part of the Python standard library: - /// https://docs.python.org/3/library/doctest.html. - /// - /// (You'll likely need to read the [regex matching] used internally by the - /// doctest module to determine more precisely how it works.) - /// - /// [regex matching]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L611-L622 - Doctest(CodeExampleDoctest<'src>), -} - -impl<'src> CodeExampleKind<'src> { - /// Return the lines of code collected so far for this example. - /// - /// This is borrowed mutably because it may need to mutate the code lines - /// based on the state accrued so far. - fn code(&mut self) -> &[CodeExampleLine<'src>] { - match *self { - CodeExampleKind::Doctest(ref doctest) => &doctest.lines, - } - } -} - -/// State corresponding to a single doctest code example found in a docstring. -#[derive(Debug)] -struct CodeExampleDoctest<'src> { - /// The lines that have been seen so far that make up the doctest. - lines: Vec>, - /// The indent observed in the first doctest line. - /// - /// More precisely, this corresponds to the whitespace observed before - /// the starting `>>> ` (the "PS1 prompt"). - ps1_indent: &'src str, -} - -impl<'src> CodeExampleDoctest<'src> { - /// Looks for a valid doctest PS1 prompt in the line given. - /// - /// If one was found, then state for a new doctest code example is - /// returned, along with the code example line. - fn new(original: InputDocstringLine<'src>) -> Option> { - let trim_start = original.line.trim_start(); - // Prompts must be followed by an ASCII space character[1]. - // - // [1]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L809-L812 - let code = trim_start.strip_prefix(">>> ")?; - let indent_len = original - .line - .len() - .checked_sub(trim_start.len()) - .expect("suffix is <= original"); - let lines = vec![CodeExampleLine { original, code }]; - let ps1_indent = &original.line[..indent_len]; - let doctest = CodeExampleDoctest { lines, ps1_indent }; - Some(doctest) - } - - /// Looks for a valid doctest PS2 prompt in the line given. - /// - /// If one is found, then the code portion of the line following the PS2 prompt - /// is returned. - /// - /// Callers must provide a string containing the original indentation of the - /// PS1 prompt that started the doctest containing the potential PS2 prompt - /// in the line given. If the line contains a PS2 prompt, its indentation must - /// match the indentation used for the corresponding PS1 prompt (otherwise - /// `None` will be returned). - fn add_code_line(&mut self, original: InputDocstringLine<'src>) -> bool { - let Some((ps2_indent, ps2_after)) = original.line.split_once("...") else { - return false; - }; - // PS2 prompts must have the same indentation as their - // corresponding PS1 prompt.[1] While the 'doctest' Python - // module will error in this case, we just treat this line as a - // non-doctest line. - // - // [1]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L733 - if self.ps1_indent != ps2_indent { - return false; - } - // PS2 prompts must be followed by an ASCII space character unless - // it's an otherwise empty line[1]. - // - // [1]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L809-L812 - let code = match ps2_after.strip_prefix(' ') { - None if ps2_after.is_empty() => "", - None => return false, - Some(code) => code, - }; - self.lines.push(CodeExampleLine { original, code }); - true - } -} - -/// A single line in a code example found in a docstring. -/// -/// A code example line exists prior to formatting, and is thus in full -/// correspondence with the original lines from the docstring. Indeed, a -/// code example line includes both the original line *and* the actual code -/// extracted from the line. For example, if a line in a docstring is `>>> -/// foo(x)`, then the original line is `>>> foo(x)` and the code portion is -/// `foo(x)`. -/// -/// The original line is kept for things like offset information, but also -/// because it may still be needed if it turns out that the code snippet is -/// not valid or otherwise could not be formatted. In which case, the original -/// lines are printed as-is. -#[derive(Debug)] -struct CodeExampleLine<'src> { - /// The normalized (but original) line from the doc string. This might, for - /// example, contain a `>>> ` or `... ` prefix if this code example is a - /// doctest. - original: InputDocstringLine<'src>, - /// The code extracted from the line. - code: &'src str, -} - -/// An action that a caller should perform after attempting to add a line from -/// a docstring to a code example. -/// -/// Callers are expected to add every line from a docstring to a code example, -/// and the state of the code example (and the line itself) will determine -/// how the caller should react. -#[derive(Debug)] -enum CodeExampleAddAction<'src> { - /// The line added was ignored by `CodeExample` and the caller should print - /// it to the formatter as-is. - /// - /// This is the common case. That is, most lines in most docstrings are not - /// part of a code example. - Print { original: InputDocstringLine<'src> }, - /// The line added was kept by `CodeExample` as part of a new or existing - /// code example. - /// - /// When this occurs, callers should not try to format the line and instead - /// move on to the next line. - Kept, - /// The line added indicated that the code example is finished and should - /// be formatted and printed. The line added is not treated as part of - /// the code example. If the line added indicated the start of another - /// code example, then is won't be returned to the caller here. Otherwise, - /// callers should pass it through to the formatter as-is. - Format { - /// The kind of code example that was found. - /// - /// This is guaranteed to have a non-empty code snippet. - kind: CodeExampleKind<'src>, - /// When set, the line is considered not part of any code example and - /// should be formatted as if the [`Print`] action were returned. - /// Otherwise, if there is no line, then either one does not exist - /// or it is part of another code example and should be treated as a - /// [`Kept`] action. - original: Option>, - }, - /// This occurs when adding a line to an existing code example - /// results in that code example becoming invalid. In this case, - /// we don't want to treat it as a code example, but instead write - /// back the lines to the docstring unchanged. - #[allow(dead_code)] // FIXME: remove when reStructuredText support is added - Reset { - /// The lines of code that we collected but should be printed back to - /// the docstring as-is and not formatted. - code: Vec>, - /// The line that was added and triggered this reset to occur. It - /// should be written back to the docstring as-is after the code lines. - original: InputDocstringLine<'src>, - }, -} - -/// Formats the given source code using the given options. -/// -/// The given quote style should correspond to the style used by the docstring -/// containing the code snippet being formatted. The formatter will use this -/// information to invert the quote style of any such strings contained within -/// the code snippet in order to avoid writing invalid Python code. -/// -/// This is similar to the top-level formatting entrypoint, except this -/// explicitly sets the context to indicate that formatting is taking place -/// inside of a docstring. -fn docstring_format_source( - options: &crate::PyFormatOptions, - docstring_quote_style: QuoteStyle, - source: &str, -) -> Result { - use ruff_python_parser::AsMode; - - let source_type = options.source_type(); - let (tokens, comment_ranges) = ruff_python_index::tokens_and_ranges(source, source_type)?; - let module = - ruff_python_parser::parse_ok_tokens(tokens, source, source_type.as_mode(), "")?; - let source_code = ruff_formatter::SourceCode::new(source); - let comments = crate::Comments::from_ast(&module, source_code, &comment_ranges); - let locator = Locator::new(source); - - let ctx = PyFormatContext::new(options.clone(), locator.contents(), comments) - .in_docstring(docstring_quote_style); - let formatted = crate::format!(ctx, [module.format()])?; - formatted - .context() - .comments() - .assert_all_formatted(source_code); - Ok(formatted.print()?) -} - -/// If the last line of the docstring is `content" """` or `content\ """`, we need a chaperone space -/// that avoids `content""""` and `content\"""`. This does only applies to un-escaped backslashes, -/// so `content\\ """` doesn't need a space while `content\\\ """` does. -fn needs_chaperone_space(normalized: &NormalizedString, trim_end: &str) -> bool { - trim_end.ends_with(normalized.quotes.style.as_char()) - || trim_end.chars().rev().take_while(|c| *c == '\\').count() % 2 == 1 -} - -/// For docstring indentation, black counts spaces as 1 and tabs by increasing the indentation up -/// to the next multiple of 8. This is effectively a port of -/// [`str.expandtabs`](https://docs.python.org/3/library/stdtypes.html#str.expandtabs), -/// which black [calls with the default tab width of 8](https://github.com/psf/black/blob/c36e468794f9256d5e922c399240d49782ba04f1/src/black/strings.py#L61). -fn indentation_length(line: &str) -> TextSize { - let mut indentation = 0u32; - for char in line.chars() { - if char == '\t' { - // Pad to the next multiple of tab_width - indentation += 8 - (indentation.rem_euclid(8)); - } else if char.is_whitespace() { - indentation += u32::from(char.text_len()); - } else { - break; - } - } - TextSize::new(indentation) -} - -#[cfg(test)] -mod tests { - use ruff_text_size::TextSize; - - use super::indentation_length; - - #[test] - fn test_indentation_like_black() { - assert_eq!(indentation_length("\t \t \t"), TextSize::new(24)); - assert_eq!(indentation_length("\t \t"), TextSize::new(24)); - assert_eq!(indentation_length("\t\t\t"), TextSize::new(24)); - assert_eq!(indentation_length(" "), TextSize::new(4)); - } -} diff --git a/crates/ruff_python_formatter/src/generated.rs b/crates/ruff_python_formatter/src/generated.rs index 30b5bb122a..a5217a11d1 100644 --- a/crates/ruff_python_formatter/src/generated.rs +++ b/crates/ruff_python_formatter/src/generated.rs @@ -1534,42 +1534,6 @@ impl<'ast> IntoFormat> for ast::ExprCall { } } -impl FormatRule> - for crate::expression::expr_formatted_value::FormatExprFormattedValue -{ - #[inline] - fn fmt(&self, node: &ast::ExprFormattedValue, f: &mut PyFormatter) -> FormatResult<()> { - FormatNodeRule::::fmt(self, node, f) - } -} -impl<'ast> AsFormat> for ast::ExprFormattedValue { - type Format<'a> = FormatRefWithRule< - 'a, - ast::ExprFormattedValue, - crate::expression::expr_formatted_value::FormatExprFormattedValue, - PyFormatContext<'ast>, - >; - fn format(&self) -> Self::Format<'_> { - FormatRefWithRule::new( - self, - crate::expression::expr_formatted_value::FormatExprFormattedValue::default(), - ) - } -} -impl<'ast> IntoFormat> for ast::ExprFormattedValue { - type Format = FormatOwnedWithRule< - ast::ExprFormattedValue, - crate::expression::expr_formatted_value::FormatExprFormattedValue, - PyFormatContext<'ast>, - >; - fn into_format(self) -> Self::Format { - FormatOwnedWithRule::new( - self, - crate::expression::expr_formatted_value::FormatExprFormattedValue::default(), - ) - } -} - impl FormatRule> for crate::expression::expr_f_string::FormatExprFString { @@ -2979,70 +2943,6 @@ impl<'ast> IntoFormat> for ast::TypeParamParamSpec { } } -impl FormatRule> for crate::other::f_string::FormatFString { - #[inline] - fn fmt(&self, node: &ast::FString, f: &mut PyFormatter) -> FormatResult<()> { - FormatNodeRule::::fmt(self, node, f) - } -} -impl<'ast> AsFormat> for ast::FString { - type Format<'a> = FormatRefWithRule< - 'a, - ast::FString, - crate::other::f_string::FormatFString, - PyFormatContext<'ast>, - >; - fn format(&self) -> Self::Format<'_> { - FormatRefWithRule::new(self, crate::other::f_string::FormatFString::default()) - } -} -impl<'ast> IntoFormat> for ast::FString { - type Format = FormatOwnedWithRule< - ast::FString, - crate::other::f_string::FormatFString, - PyFormatContext<'ast>, - >; - fn into_format(self) -> Self::Format { - FormatOwnedWithRule::new(self, crate::other::f_string::FormatFString::default()) - } -} - -impl FormatRule> - for crate::other::string_literal::FormatStringLiteral -{ - #[inline] - fn fmt(&self, node: &ast::StringLiteral, f: &mut PyFormatter) -> FormatResult<()> { - FormatNodeRule::::fmt(self, node, f) - } -} -impl<'ast> AsFormat> for ast::StringLiteral { - type Format<'a> = FormatRefWithRule< - 'a, - ast::StringLiteral, - crate::other::string_literal::FormatStringLiteral, - PyFormatContext<'ast>, - >; - fn format(&self) -> Self::Format<'_> { - FormatRefWithRule::new( - self, - crate::other::string_literal::FormatStringLiteral::default(), - ) - } -} -impl<'ast> IntoFormat> for ast::StringLiteral { - type Format = FormatOwnedWithRule< - ast::StringLiteral, - crate::other::string_literal::FormatStringLiteral, - PyFormatContext<'ast>, - >; - fn into_format(self) -> Self::Format { - FormatOwnedWithRule::new( - self, - crate::other::string_literal::FormatStringLiteral::default(), - ) - } -} - impl FormatRule> for crate::other::bytes_literal::FormatBytesLiteral { diff --git a/crates/ruff_python_formatter/src/lib.rs b/crates/ruff_python_formatter/src/lib.rs index e9a3c34757..05f122606b 100644 --- a/crates/ruff_python_formatter/src/lib.rs +++ b/crates/ruff_python_formatter/src/lib.rs @@ -16,7 +16,8 @@ use crate::comments::{ }; pub use crate::context::PyFormatContext; pub use crate::options::{ - DocstringCode, MagicTrailingComma, PreviewMode, PyFormatOptions, QuoteStyle, + DocstringCode, DocstringCodeLineWidth, MagicTrailingComma, PreviewMode, PyFormatOptions, + QuoteStyle, }; pub use crate::shared_traits::{AsFormat, FormattedIter, FormattedIterExt, IntoFormat}; use crate::verbatim::suppressed_node; @@ -32,8 +33,10 @@ mod options; pub(crate) mod other; pub(crate) mod pattern; mod prelude; +mod preview; mod shared_traits; pub(crate) mod statement; +pub(crate) mod string; pub(crate) mod type_param; mod verbatim; diff --git a/crates/ruff_python_formatter/src/options.rs b/crates/ruff_python_formatter/src/options.rs index 261dfeab19..4f637dca9e 100644 --- a/crates/ruff_python_formatter/src/options.rs +++ b/crates/ruff_python_formatter/src/options.rs @@ -11,7 +11,7 @@ use std::str::FromStr; #[cfg_attr( feature = "serde", derive(serde::Serialize, serde::Deserialize), - serde(default) + serde(default, deny_unknown_fields) )] pub struct PyFormatOptions { /// Whether we're in a `.py` file or `.pyi` file, which have different rules. @@ -49,6 +49,11 @@ pub struct PyFormatOptions { /// enabled by default (opt-out) in the future. docstring_code: DocstringCode, + /// The preferred line width at which the formatter should wrap lines in + /// docstring code examples. This only has an impact when `docstring_code` + /// is enabled. + docstring_code_line_width: DocstringCodeLineWidth, + /// Whether preview style formatting is enabled or not preview: PreviewMode, } @@ -77,6 +82,7 @@ impl Default for PyFormatOptions { magic_trailing_comma: MagicTrailingComma::default(), source_map_generation: SourceMapGeneration::default(), docstring_code: DocstringCode::default(), + docstring_code_line_width: DocstringCodeLineWidth::default(), preview: PreviewMode::default(), } } @@ -119,7 +125,11 @@ impl PyFormatOptions { self.docstring_code } - pub fn preview(&self) -> PreviewMode { + pub fn docstring_code_line_width(&self) -> DocstringCodeLineWidth { + self.docstring_code_line_width + } + + pub const fn preview(&self) -> PreviewMode { self.preview } @@ -165,6 +175,12 @@ impl PyFormatOptions { self } + #[must_use] + pub fn with_docstring_code_line_width(mut self, line_width: DocstringCodeLineWidth) -> Self { + self.docstring_code_line_width = line_width; + self + } + #[must_use] pub fn with_preview(mut self, preview: PreviewMode) -> Self { self.preview = preview; @@ -207,35 +223,7 @@ pub enum QuoteStyle { Single, #[default] Double, -} - -impl QuoteStyle { - pub const fn as_char(self) -> char { - match self { - QuoteStyle::Single => '\'', - QuoteStyle::Double => '"', - } - } - - #[must_use] - pub const fn invert(self) -> QuoteStyle { - match self { - QuoteStyle::Single => QuoteStyle::Double, - QuoteStyle::Double => QuoteStyle::Single, - } - } -} - -impl TryFrom for QuoteStyle { - type Error = (); - - fn try_from(value: char) -> std::result::Result { - match value { - '\'' => Ok(QuoteStyle::Single), - '"' => Ok(QuoteStyle::Double), - _ => Err(()), - } - } + Preserve, } impl FromStr for QuoteStyle { @@ -245,6 +233,7 @@ impl FromStr for QuoteStyle { match s { "\"" | "double" | "Double" => Ok(Self::Double), "'" | "single" | "Single" => Ok(Self::Single), + "preserve" | "Preserve" => Ok(Self::Preserve), // TODO: replace this error with a diagnostic _ => Err("Value not supported for QuoteStyle"), } @@ -318,3 +307,45 @@ impl DocstringCode { matches!(self, DocstringCode::Enabled) } } + +#[derive(Copy, Clone, Default, Eq, PartialEq, CacheKey)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))] +#[cfg_attr(feature = "serde", serde(untagged))] +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +pub enum DocstringCodeLineWidth { + Fixed(LineWidth), + #[default] + #[cfg_attr( + feature = "serde", + serde(deserialize_with = "deserialize_docstring_code_line_width_dynamic") + )] + Dynamic, +} + +impl std::fmt::Debug for DocstringCodeLineWidth { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match *self { + DocstringCodeLineWidth::Fixed(v) => v.value().fmt(f), + DocstringCodeLineWidth::Dynamic => "dynamic".fmt(f), + } + } +} + +/// Responsible for deserializing the `DocstringCodeLineWidth::Dynamic` +/// variant. +fn deserialize_docstring_code_line_width_dynamic<'de, D>(d: D) -> Result<(), D::Error> +where + D: serde::Deserializer<'de>, +{ + use serde::{de::Error, Deserialize}; + + let value = String::deserialize(d)?; + match &*value { + "dynamic" => Ok(()), + s => Err(D::Error::invalid_value( + serde::de::Unexpected::Str(s), + &"dynamic", + )), + } +} diff --git a/crates/ruff_python_formatter/src/other/arguments.rs b/crates/ruff_python_formatter/src/other/arguments.rs index a48596cac0..d57e168c89 100644 --- a/crates/ruff_python_formatter/src/other/arguments.rs +++ b/crates/ruff_python_formatter/src/other/arguments.rs @@ -9,6 +9,7 @@ use crate::expression::is_expression_huggable; use crate::expression::parentheses::{empty_parenthesized, parenthesized, Parentheses}; use crate::other::commas; use crate::prelude::*; +use crate::preview::is_hug_parens_with_braces_and_square_brackets_enabled; #[derive(Default)] pub struct FormatArguments; @@ -177,8 +178,7 @@ fn is_single_argument_parenthesized(argument: &Expr, call_end: TextSize, source: /// Hugging should only be applied to single-argument collections, like lists, or starred versions /// of those collections. fn is_argument_huggable(item: &Arguments, context: &PyFormatContext) -> bool { - let options = context.options(); - if !options.preview().is_enabled() { + if !is_hug_parens_with_braces_and_square_brackets_enabled(context) { return false; } @@ -192,7 +192,7 @@ fn is_argument_huggable(item: &Arguments, context: &PyFormatContext) -> bool { }; // If the expression itself isn't huggable, then we can't hug it. - if !is_expression_huggable(arg, options) { + if !is_expression_huggable(arg, context) { return false; } @@ -202,6 +202,8 @@ fn is_argument_huggable(item: &Arguments, context: &PyFormatContext) -> bool { return false; } + let options = context.options(); + // If the expression has a trailing comma, then we can't hug it. if options.magic_trailing_comma().is_respect() && commas::has_magic_trailing_comma(TextRange::new(arg.end(), item.end()), options, context) diff --git a/crates/ruff_python_formatter/src/other/bytes_literal.rs b/crates/ruff_python_formatter/src/other/bytes_literal.rs index 55117241f8..c6445c8d6a 100644 --- a/crates/ruff_python_formatter/src/other/bytes_literal.rs +++ b/crates/ruff_python_formatter/src/other/bytes_literal.rs @@ -1,12 +1,23 @@ use ruff_python_ast::BytesLiteral; +use ruff_text_size::Ranged; use crate::prelude::*; +use crate::string::{Quoting, StringPart}; #[derive(Default)] pub struct FormatBytesLiteral; impl FormatNodeRule for FormatBytesLiteral { - fn fmt_fields(&self, _item: &BytesLiteral, _f: &mut PyFormatter) -> FormatResult<()> { - unreachable!("Handled inside of `FormatExprBytesLiteral`"); + fn fmt_fields(&self, item: &BytesLiteral, f: &mut PyFormatter) -> FormatResult<()> { + let locator = f.context().locator(); + + StringPart::from_source(item.range(), &locator) + .normalize( + Quoting::CanChange, + &locator, + f.options().quote_style(), + f.context().docstring(), + ) + .fmt(f) } } diff --git a/crates/ruff_python_formatter/src/other/f_string.rs b/crates/ruff_python_formatter/src/other/f_string.rs index e08254aba7..da81162c2e 100644 --- a/crates/ruff_python_formatter/src/other/f_string.rs +++ b/crates/ruff_python_formatter/src/other/f_string.rs @@ -1,12 +1,49 @@ use ruff_python_ast::FString; +use ruff_text_size::Ranged; use crate::prelude::*; +use crate::string::{Quoting, StringPart}; -#[derive(Default)] -pub struct FormatFString; +/// Formats an f-string which is part of a larger f-string expression. +/// +/// For example, this would be used to format the f-string part in `"foo" f"bar {x}"` +/// or the standalone f-string in `f"foo {x} bar"`. +pub(crate) struct FormatFString<'a> { + value: &'a FString, + /// The quoting of an f-string. This is determined by the parent node + /// (f-string expression) and is required to format an f-string correctly. + quoting: Quoting, +} -impl FormatNodeRule for FormatFString { - fn fmt_fields(&self, _item: &FString, _f: &mut PyFormatter) -> FormatResult<()> { - unreachable!("Handled inside of `FormatExprFString`"); +impl<'a> FormatFString<'a> { + pub(crate) fn new(value: &'a FString, quoting: Quoting) -> Self { + Self { value, quoting } + } +} + +impl Format> for FormatFString<'_> { + fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> { + let locator = f.context().locator(); + + let result = StringPart::from_source(self.value.range(), &locator) + .normalize( + self.quoting, + &locator, + f.options().quote_style(), + f.context().docstring(), + ) + .fmt(f); + + // TODO(dhruvmanila): With PEP 701, comments can be inside f-strings. + // This is to mark all of those comments as formatted but we need to + // figure out how to handle them. Note that this needs to be done only + // after the f-string is formatted, so only for all the non-formatted + // comments. + let comments = f.context().comments(); + self.value.elements.iter().for_each(|value| { + comments.mark_verbatim_node_comments_formatted(value.into()); + }); + + result } } diff --git a/crates/ruff_python_formatter/src/other/f_string_part.rs b/crates/ruff_python_formatter/src/other/f_string_part.rs new file mode 100644 index 0000000000..c471b5fc8c --- /dev/null +++ b/crates/ruff_python_formatter/src/other/f_string_part.rs @@ -0,0 +1,39 @@ +use ruff_python_ast::FStringPart; + +use crate::other::f_string::FormatFString; +use crate::other::string_literal::{FormatStringLiteral, StringLiteralKind}; +use crate::prelude::*; +use crate::string::Quoting; + +/// Formats an f-string part which is either a string literal or an f-string. +/// +/// This delegates the actual formatting to the appropriate formatter. +pub(crate) struct FormatFStringPart<'a> { + part: &'a FStringPart, + /// The quoting to be used for all the f-string parts. This is determined by + /// the parent node (f-string expression) and is required to format all parts + /// correctly. + quoting: Quoting, +} + +impl<'a> FormatFStringPart<'a> { + pub(crate) fn new(part: &'a FStringPart, quoting: Quoting) -> Self { + Self { part, quoting } + } +} + +impl Format> for FormatFStringPart<'_> { + fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> { + match self.part { + FStringPart::Literal(string_literal) => FormatStringLiteral::new( + string_literal, + // If an f-string part is a string literal, the f-string is always + // implicitly concatenated e.g., `"foo" f"bar {x}"`. A standalone + // string literal would be a string expression, not an f-string. + StringLiteralKind::InImplicitlyConcatenatedFString(self.quoting), + ) + .fmt(f), + FStringPart::FString(f_string) => FormatFString::new(f_string, self.quoting).fmt(f), + } + } +} diff --git a/crates/ruff_python_formatter/src/other/mod.rs b/crates/ruff_python_formatter/src/other/mod.rs index c980a14c0f..d07339f717 100644 --- a/crates/ruff_python_formatter/src/other/mod.rs +++ b/crates/ruff_python_formatter/src/other/mod.rs @@ -7,6 +7,7 @@ pub(crate) mod decorator; pub(crate) mod elif_else_clause; pub(crate) mod except_handler_except_handler; pub(crate) mod f_string; +pub(crate) mod f_string_part; pub(crate) mod identifier; pub(crate) mod keyword; pub(crate) mod match_case; diff --git a/crates/ruff_python_formatter/src/other/parameters.rs b/crates/ruff_python_formatter/src/other/parameters.rs index d095d33ae1..c3e251ebe9 100644 --- a/crates/ruff_python_formatter/src/other/parameters.rs +++ b/crates/ruff_python_formatter/src/other/parameters.rs @@ -252,7 +252,7 @@ impl FormatNodeRule for FormatParameters { let mut f = WithNodeLevel::new(NodeLevel::ParenthesizedExpression, f); // No parameters, format any dangling comments between `()` write!(f, [empty_parenthesized("(", dangling, ")")]) - } else if num_parameters == 1 { + } else if num_parameters == 1 && posonlyargs.is_empty() && kwonlyargs.is_empty() { // If we have a single argument, avoid the inner group, to ensure that we insert a // trailing comma if the outer group breaks. let mut f = WithNodeLevel::new(NodeLevel::ParenthesizedExpression, f); diff --git a/crates/ruff_python_formatter/src/other/string_literal.rs b/crates/ruff_python_formatter/src/other/string_literal.rs index 291552db73..e23db85707 100644 --- a/crates/ruff_python_formatter/src/other/string_literal.rs +++ b/crates/ruff_python_formatter/src/other/string_literal.rs @@ -1,12 +1,72 @@ use ruff_python_ast::StringLiteral; +use ruff_text_size::Ranged; use crate::prelude::*; +use crate::string::{docstring, Quoting, StringPart}; +use crate::QuoteStyle; -#[derive(Default)] -pub struct FormatStringLiteral; +pub(crate) struct FormatStringLiteral<'a> { + value: &'a StringLiteral, + layout: StringLiteralKind, +} -impl FormatNodeRule for FormatStringLiteral { - fn fmt_fields(&self, _item: &StringLiteral, _f: &mut PyFormatter) -> FormatResult<()> { - unreachable!("Handled inside of `FormatExprStringLiteral`"); +impl<'a> FormatStringLiteral<'a> { + pub(crate) fn new(value: &'a StringLiteral, layout: StringLiteralKind) -> Self { + Self { value, layout } + } +} + +/// The kind of a string literal. +#[derive(Copy, Clone, Debug, Default)] +pub(crate) enum StringLiteralKind { + /// A normal string literal e.g., `"foo"`. + #[default] + String, + /// A string literal used as a docstring. + Docstring, + /// A string literal that is implicitly concatenated with an f-string. This + /// makes the overall expression an f-string whose quoting detection comes + /// from the parent node (f-string expression). + InImplicitlyConcatenatedFString(Quoting), +} + +impl StringLiteralKind { + /// Checks if this string literal is a docstring. + pub(crate) const fn is_docstring(self) -> bool { + matches!(self, StringLiteralKind::Docstring) + } + + /// Returns the quoting to be used for this string literal. + fn quoting(self) -> Quoting { + match self { + StringLiteralKind::String | StringLiteralKind::Docstring => Quoting::CanChange, + StringLiteralKind::InImplicitlyConcatenatedFString(quoting) => quoting, + } + } +} + +impl Format> for FormatStringLiteral<'_> { + fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> { + let locator = f.context().locator(); + + let quote_style = if self.layout.is_docstring() { + // Per PEP 8 and PEP 257, always prefer double quotes for docstrings + QuoteStyle::Double + } else { + f.options().quote_style() + }; + + let normalized = StringPart::from_source(self.value.range(), &locator).normalize( + self.layout.quoting(), + &locator, + quote_style, + f.context().docstring(), + ); + + if self.layout.is_docstring() { + docstring::format(&normalized, f) + } else { + normalized.fmt(f) + } } } diff --git a/crates/ruff_python_formatter/src/preview.rs b/crates/ruff_python_formatter/src/preview.rs new file mode 100644 index 0000000000..4de87cf05c --- /dev/null +++ b/crates/ruff_python_formatter/src/preview.rs @@ -0,0 +1,35 @@ +//! Helpers to test if a specific preview style is enabled or not. +//! +//! The motivation for these functions isn't to avoid code duplication but to ease promoting preview styles +//! to stable. The challenge with directly using [`is_preview`](PyFormatContext::is_preview) is that it is unclear +//! for which specific feature this preview check is for. Having named functions simplifies the promotion: +//! Simply delete the function and let Rust tell you which checks you have to remove. +use crate::PyFormatContext; + +/// Returns `true` if the [`fix_power_op_line_length`](https://github.com/astral-sh/ruff/issues/8938) preview style is enabled. +pub(crate) const fn is_fix_power_op_line_length_enabled(context: &PyFormatContext) -> bool { + context.is_preview() +} + +/// Returns `true` if the [`hug_parens_with_braces_and_square_brackets`](https://github.com/astral-sh/ruff/issues/8279) preview style is enabled. +pub(crate) const fn is_hug_parens_with_braces_and_square_brackets_enabled( + context: &PyFormatContext, +) -> bool { + context.is_preview() +} + +/// Returns `true` if the [`prefer_splitting_right_hand_side_of_assignments`](https://github.com/astral-sh/ruff/issues/6975) preview style is enabled. +pub(crate) const fn is_prefer_splitting_right_hand_side_of_assignments_enabled( + context: &PyFormatContext, +) -> bool { + context.is_preview() +} + +/// Returns `true` if the [`no_blank_line_before_class_docstring`] preview style is enabled. +/// +/// [`no_blank_line_before_class_docstring`]: https://github.com/astral-sh/ruff/issues/8888 +pub(crate) const fn is_no_blank_line_before_class_docstring_enabled( + context: &PyFormatContext, +) -> bool { + context.is_preview() +} diff --git a/crates/ruff_python_formatter/src/statement/stmt_ann_assign.rs b/crates/ruff_python_formatter/src/statement/stmt_ann_assign.rs index cb5f5fa745..89a97acc6f 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_ann_assign.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_ann_assign.rs @@ -2,10 +2,12 @@ use ruff_formatter::write; use ruff_python_ast::StmtAnnAssign; use crate::comments::{SourceComment, SuppressionKind}; - -use crate::expression::maybe_parenthesize_expression; -use crate::expression::parentheses::Parenthesize; +use crate::expression::has_parentheses; use crate::prelude::*; +use crate::preview::is_prefer_splitting_right_hand_side_of_assignments_enabled; +use crate::statement::stmt_assign::{ + AnyAssignmentOperator, AnyBeforeOperator, FormatStatementsLastExpression, +}; use crate::statement::trailing_semicolon; #[derive(Default)] @@ -21,21 +23,33 @@ impl FormatNodeRule for FormatStmtAnnAssign { simple: _, } = item; - write!( - f, - [target.format(), token(":"), space(), annotation.format(),] - )?; + write!(f, [target.format(), token(":"), space()])?; if let Some(value) = value { - write!( - f, - [ - space(), - token("="), - space(), - maybe_parenthesize_expression(value, item, Parenthesize::IfBreaks) - ] - )?; + if is_prefer_splitting_right_hand_side_of_assignments_enabled(f.context()) + && has_parentheses(annotation, f.context()).is_some() + { + FormatStatementsLastExpression::RightToLeft { + before_operator: AnyBeforeOperator::Expression(annotation), + operator: AnyAssignmentOperator::Assign, + value, + statement: item.into(), + } + .fmt(f)?; + } else { + write!( + f, + [ + annotation.format(), + space(), + token("="), + space(), + FormatStatementsLastExpression::left_to_right(value, item) + ] + )?; + } + } else { + annotation.format().fmt(f)?; } if f.options().source_type().is_ipynb() diff --git a/crates/ruff_python_formatter/src/statement/stmt_assign.rs b/crates/ruff_python_formatter/src/statement/stmt_assign.rs index 7a8a5fd2be..9430e7289a 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_assign.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_assign.rs @@ -1,11 +1,17 @@ use ruff_formatter::{format_args, write, FormatError}; -use ruff_python_ast::{Expr, StmtAssign}; +use ruff_python_ast::{AnyNodeRef, Expr, Operator, StmtAssign, TypeParams}; -use crate::comments::{SourceComment, SuppressionKind}; +use crate::builders::parenthesize_if_expands; +use crate::comments::{ + trailing_comments, Comments, LeadingDanglingTrailingComments, SourceComment, SuppressionKind, +}; use crate::context::{NodeLevel, WithNodeLevel}; -use crate::expression::parentheses::{Parentheses, Parenthesize}; +use crate::expression::parentheses::{ + is_expression_parenthesized, NeedsParentheses, OptionalParentheses, Parentheses, Parenthesize, +}; use crate::expression::{has_own_parentheses, maybe_parenthesize_expression}; use crate::prelude::*; +use crate::preview::is_prefer_splitting_right_hand_side_of_assignments_enabled; use crate::statement::trailing_semicolon; #[derive(Default)] @@ -23,31 +29,66 @@ impl FormatNodeRule for FormatStmtAssign { "Expected at least on assignment target", ))?; - write!( - f, - [ - first.format(), - space(), - token("="), - space(), - FormatTargets { targets: rest } - ] - )?; + // The first target is special because it never gets parenthesized nor does the formatter remove parentheses if unnecessary. + let format_first = FormatTargetWithEqualOperator { + target: first, + preserve_parentheses: true, + }; - write!( - f, - [maybe_parenthesize_expression( - value, - item, - Parenthesize::IfBreaks - )] - )?; + if is_prefer_splitting_right_hand_side_of_assignments_enabled(f.context()) { + // Avoid parenthesizing the value if the last target before the assigned value expands. + if let Some((last, head)) = rest.split_last() { + format_first.fmt(f)?; + + for target in head { + FormatTargetWithEqualOperator { + target, + preserve_parentheses: false, + } + .fmt(f)?; + } + + FormatStatementsLastExpression::RightToLeft { + before_operator: AnyBeforeOperator::Expression(last), + operator: AnyAssignmentOperator::Assign, + value, + statement: item.into(), + } + .fmt(f)?; + } + // Avoid parenthesizing the value for single-target assignments that where the + // target has its own parentheses (list, dict, tuple, ...) and the target expands. + else if has_target_own_parentheses(first, f.context()) + && !is_expression_parenthesized( + first.into(), + f.context().comments().ranges(), + f.context().source(), + ) + { + FormatStatementsLastExpression::RightToLeft { + before_operator: AnyBeforeOperator::Expression(first), + operator: AnyAssignmentOperator::Assign, + value, + statement: item.into(), + } + .fmt(f)?; + } + // For single targets that have no split points, parenthesize the value only + // if it makes it fit. Otherwise omit the parentheses. + else { + format_first.fmt(f)?; + FormatStatementsLastExpression::left_to_right(value, item).fmt(f)?; + } + } else { + write!(f, [format_first, FormatTargets { targets: rest }])?; + + FormatStatementsLastExpression::left_to_right(value, item).fmt(f)?; + } if f.options().source_type().is_ipynb() && f.context().node_level().is_last_top_level_statement() - && rest.is_empty() - && first.is_name_expr() && trailing_semicolon(item.into(), f.context().source()).is_some() + && matches!(targets.as_slice(), [Expr::Name(_)]) { token(";").fmt(f)?; } @@ -64,6 +105,7 @@ impl FormatNodeRule for FormatStmtAssign { } } +/// Formats the targets so that they split left-to right. #[derive(Debug)] struct FormatTargets<'a> { targets: &'a [Expr], @@ -74,9 +116,9 @@ impl Format> for FormatTargets<'_> { if let Some((first, rest)) = self.targets.split_first() { let comments = f.context().comments(); - let parenthesize = if comments.has_leading(first) { + let parenthesize = if comments.has_leading(first) || comments.has_trailing(first) { ParenthesizeTarget::Always - } else if has_own_parentheses(first, f.context()).is_some() { + } else if has_target_own_parentheses(first, f.context()) { ParenthesizeTarget::Never } else { ParenthesizeTarget::IfBreaks @@ -133,3 +175,565 @@ enum ParenthesizeTarget { Never, IfBreaks, } + +/// Formats a single target with the equal operator. +struct FormatTargetWithEqualOperator<'a> { + target: &'a Expr, + + /// Whether parentheses should be preserved as in the source or if the target + /// should only be parenthesized if necessary (because of comments or because it doesn't fit). + preserve_parentheses: bool, +} + +impl Format> for FormatTargetWithEqualOperator<'_> { + fn fmt(&self, f: &mut Formatter>) -> FormatResult<()> { + // Preserve parentheses for the first target or around targets with leading or trailing comments. + if self.preserve_parentheses + || f.context().comments().has_leading(self.target) + || f.context().comments().has_trailing(self.target) + { + self.target.format().fmt(f)?; + } else if has_target_own_parentheses(self.target, f.context()) { + self.target + .format() + .with_options(Parentheses::Never) + .fmt(f)?; + } else { + parenthesize_if_expands(&self.target.format().with_options(Parentheses::Never)) + .fmt(f)?; + } + + write!(f, [space(), token("="), space()]) + } +} + +/// Formats the last expression in statements that start with a keyword (like `return`) or after an operator (assignments). +/// +/// The implementation avoids parenthesizing unsplittable values (like `None`, `True`, `False`, Names, a subset of strings) +/// if the value won't fit even when parenthesized. +/// +/// ## Trailing comments +/// Trailing comments are inlined inside the `value`'s parentheses rather than formatted at the end +/// of the statement for unsplittable values if the `value` gets parenthesized. +/// +/// Inlining the trailing comments prevent situations where the parenthesized value +/// still exceeds the configured line width, but parenthesizing helps to make the trailing comment fit. +/// Instead, it only parenthesizes `value` if it makes both the `value` and the trailing comment fit. +/// See [PR 8431](https://github.com/astral-sh/ruff/pull/8431) for more details. +/// +/// The implementation formats the statement's and value's trailing end of line comments: +/// * after the expression if the expression needs no parentheses (necessary or the `expand_parent` makes the group never fit). +/// * inside the parentheses if the expression exceeds the line-width. +/// +/// ```python +/// a = loooooooooooooooooooooooooooong # with_comment +/// b = ( +/// short # with_comment +/// ) +/// ``` +/// +/// Which gets formatted to: +/// +/// ```python +/// # formatted +/// a = ( +/// loooooooooooooooooooooooooooong # with comment +/// ) +/// b = short # with comment +/// ``` +/// +/// The long name gets parenthesized because it exceeds the configured line width and the trailing comment of the +/// statement gets formatted inside (instead of outside) the parentheses. +/// +/// No parentheses are added for `short` because it fits into the configured line length, regardless of whether +/// the comment exceeds the line width or not. +/// +/// This logic isn't implemented in [`place_comment`] by associating trailing statement comments to the expression because +/// doing so breaks the suite empty lines formatting that relies on trailing comments to be stored on the statement. +pub(super) enum FormatStatementsLastExpression<'a> { + /// Prefers to split what's left of `value` before splitting the value. + /// + /// ```python + /// aaaaaaa[bbbbbbbb] = some_long_value + /// ``` + /// + /// This layout splits `aaaaaaa[bbbbbbbb]` first assuming the whole statements exceeds the line width, resulting in + /// + /// ```python + /// aaaaaaa[ + /// bbbbbbbb + /// ] = some_long_value + /// ``` + /// + /// This layout is preferred over [`RightToLeft`] if the left is unsplittable (single keyword like `return` or a Name) + /// because it has better performance characteristics. + LeftToRight { + /// The right side of an assignment or the value returned in a return statement. + value: &'a Expr, + + /// The parent statement that encloses the `value` expression. + statement: AnyNodeRef<'a>, + }, + + /// Prefers parenthesizing the value before splitting the left side. Specific to assignments. + /// + /// Formats what's left of `value` together with the assignment operator and the assigned `value`. + /// This layout prefers parenthesizing the value over parenthesizing the left (target or type annotation): + /// + /// ```python + /// aaaaaaa[bbbbbbbb] = some_long_value + /// ``` + /// + /// gets formatted to... + /// + /// ```python + /// aaaaaaa[bbbbbbbb] = ( + /// some_long_value + /// ) + /// ``` + /// + /// ... regardless whether the value will fit or not. + /// + /// The left only gets parenthesized if the left exceeds the configured line width on its own or + /// is forced to split because of a magical trailing comma or contains comments: + /// + /// ```python + /// aaaaaaa[bbbbbbbb_exceeds_the_line_width] = some_long_value + /// ``` + /// + /// gets formatted to + /// ```python + /// aaaaaaa[ + /// bbbbbbbb_exceeds_the_line_width + /// ] = some_long_value + /// ``` + /// + /// The layout avoids parenthesizing the value when the left splits to avoid + /// unnecessary parentheses. Adding the parentheses, as shown in the below example, reduces readability. + /// + /// ```python + /// aaaaaaa[ + /// bbbbbbbb_exceeds_the_line_width + /// ] = ( + /// some_long_value + /// ) + /// + /// ## Non-fluent Call Expressions + /// Non-fluent call expressions in the `value` position are only parenthesized if the opening parentheses + /// exceeds the configured line length. The layout prefers splitting after the opening parentheses + /// if the `callee` expression and the opening parentheses fit. + /// fits on the line. + RightToLeft { + /// The expression that comes before the assignment operator. This is either + /// the last target, or the type annotation of an annotated assignment. + before_operator: AnyBeforeOperator<'a>, + + /// The assignment operator. Either `Assign` (`=`) or the operator used by the augmented assignment statement. + operator: AnyAssignmentOperator, + + /// The assigned `value`. + value: &'a Expr, + + /// The assignment statement. + statement: AnyNodeRef<'a>, + }, +} + +impl<'a> FormatStatementsLastExpression<'a> { + pub(super) fn left_to_right>>(value: &'a Expr, statement: S) -> Self { + Self::LeftToRight { + value, + statement: statement.into(), + } + } +} + +impl Format> for FormatStatementsLastExpression<'_> { + fn fmt(&self, f: &mut Formatter>) -> FormatResult<()> { + match self { + FormatStatementsLastExpression::LeftToRight { value, statement } => { + let can_inline_comment = should_inline_comments(value, *statement, f.context()); + + if !can_inline_comment { + return maybe_parenthesize_expression( + value, + *statement, + Parenthesize::IfBreaks, + ) + .fmt(f); + } + + let comments = f.context().comments().clone(); + let expression_comments = comments.leading_dangling_trailing(*value); + + if let Some(inline_comments) = OptionalParenthesesInlinedComments::new( + &expression_comments, + *statement, + &comments, + ) { + let group_id = f.group_id("optional_parentheses"); + + let f = &mut WithNodeLevel::new(NodeLevel::Expression(Some(group_id)), f); + + best_fit_parenthesize(&format_with(|f| { + inline_comments.mark_formatted(); + + value.format().with_options(Parentheses::Never).fmt(f)?; + + if !inline_comments.is_empty() { + // If the expressions exceeds the line width, format the comments in the parentheses + if_group_breaks(&inline_comments).fmt(f)?; + } + + Ok(()) + })) + .with_group_id(Some(group_id)) + .fmt(f)?; + + if !inline_comments.is_empty() { + // If the line fits into the line width, format the comments after the parenthesized expression + if_group_fits_on_line(&inline_comments) + .with_group_id(Some(group_id)) + .fmt(f)?; + } + + Ok(()) + } else { + // Preserve the parentheses if the expression has any leading or trailing comments, + // to avoid syntax errors, similar to `maybe_parenthesize_expression`. + value.format().with_options(Parentheses::Always).fmt(f) + } + } + FormatStatementsLastExpression::RightToLeft { + before_operator, + operator, + value, + statement, + } => { + let should_inline_comments = should_inline_comments(value, *statement, f.context()); + + // Use the normal `maybe_parenthesize_layout` for splittable `value`s. + if !should_inline_comments + && !should_non_inlineable_use_best_fit(value, *statement, f.context()) + { + return write!( + f, + [ + before_operator, + space(), + operator, + space(), + maybe_parenthesize_expression( + value, + *statement, + Parenthesize::IfBreaks + ) + ] + ); + } + + let comments = f.context().comments().clone(); + let expression_comments = comments.leading_dangling_trailing(*value); + + // Don't inline comments for attribute and call expressions for black compatibility + let inline_comments = if should_inline_comments { + OptionalParenthesesInlinedComments::new( + &expression_comments, + *statement, + &comments, + ) + } else if expression_comments.has_leading() + || expression_comments.has_trailing_own_line() + { + None + } else { + Some(OptionalParenthesesInlinedComments::default()) + }; + + let Some(inline_comments) = inline_comments else { + // Preserve the parentheses if the expression has any leading or trailing own line comments + // same as `maybe_parenthesize_expression` + return write!( + f, + [ + before_operator, + space(), + operator, + space(), + value.format().with_options(Parentheses::Always) + ] + ); + }; + + // Prevent inline comments to be formatted as part of the expression. + inline_comments.mark_formatted(); + + let mut last_target = before_operator.memoized(); + + // Don't parenthesize the `value` if it is known that the target will break. + // This is mainly a performance optimisation that avoids unnecessary memoization + // and using the costly `BestFitting` layout if it is already known that only the last variant + // can ever fit because the left breaks. + if last_target.inspect(f)?.will_break() { + return write!( + f, + [ + last_target, + space(), + operator, + space(), + value.format().with_options(Parentheses::Never), + inline_comments + ] + ); + } + + let format_value = value.format().with_options(Parentheses::Never).memoized(); + + // Tries to fit the `left` and the `value` on a single line: + // ```python + // a = b = c + // ``` + let format_flat = format_with(|f| { + write!( + f, + [ + last_target, + space(), + operator, + space(), + format_value, + inline_comments + ] + ) + }); + + // Don't break the last assignment target but parenthesize the value to see if it fits (break right first). + // + // ```python + // a["bbbbb"] = ( + // c + // ) + // ``` + let format_parenthesize_value = format_with(|f| { + write!( + f, + [ + last_target, + space(), + operator, + space(), + token("("), + block_indent(&format_args![format_value, inline_comments]), + token(")") + ] + ) + }); + + // Fall back to parenthesizing (or splitting) the last target part if we can't make the value + // fit. Don't parenthesize the value to avoid unnecessary parentheses. + // + // ```python + // a[ + // "bbbbb" + // ] = c + // ``` + let format_split_left = format_with(|f| { + write!( + f, + [ + last_target, + space(), + operator, + space(), + format_value, + inline_comments + ] + ) + }); + + // For call expressions, prefer breaking after the call expression's opening parentheses + // over parenthesizing the entire call expression. + if value.is_call_expr() { + best_fitting![ + format_flat, + // Avoid parenthesizing the call expression if the `(` fit on the line + format_args![ + last_target, + space(), + operator, + space(), + group(&format_value).should_expand(true), + ], + format_parenthesize_value, + format_split_left + ] + .fmt(f) + } else { + best_fitting![format_flat, format_parenthesize_value, format_split_left].fmt(f) + } + } + } + } +} + +#[derive(Debug, Default)] +struct OptionalParenthesesInlinedComments<'a> { + expression: &'a [SourceComment], + statement: &'a [SourceComment], +} + +impl<'a> OptionalParenthesesInlinedComments<'a> { + fn new( + expression_comments: &LeadingDanglingTrailingComments<'a>, + statement: AnyNodeRef<'a>, + comments: &'a Comments<'a>, + ) -> Option { + if expression_comments.has_leading() || expression_comments.has_trailing_own_line() { + return None; + } + + let statement_trailing_comments = comments.trailing(statement); + let after_end_of_line = statement_trailing_comments + .partition_point(|comment| comment.line_position().is_end_of_line()); + let (stmt_inline_comments, _) = statement_trailing_comments.split_at(after_end_of_line); + + let after_end_of_line = expression_comments + .trailing + .partition_point(|comment| comment.line_position().is_end_of_line()); + + let (expression_inline_comments, trailing_own_line_comments) = + expression_comments.trailing.split_at(after_end_of_line); + + debug_assert!(trailing_own_line_comments.is_empty(), "The method should have returned early if the expression has trailing own line comments"); + + Some(OptionalParenthesesInlinedComments { + expression: expression_inline_comments, + statement: stmt_inline_comments, + }) + } + + fn is_empty(&self) -> bool { + self.expression.is_empty() && self.statement.is_empty() + } + + fn iter_comments(&self) -> impl Iterator { + self.expression.iter().chain(self.statement) + } + + fn mark_formatted(&self) { + for comment in self.expression { + comment.mark_formatted(); + } + } +} + +impl Format> for OptionalParenthesesInlinedComments<'_> { + fn fmt(&self, f: &mut Formatter>) -> FormatResult<()> { + for comment in self.iter_comments() { + comment.mark_unformatted(); + } + + write!( + f, + [ + trailing_comments(self.expression), + trailing_comments(self.statement) + ] + ) + } +} + +#[derive(Copy, Clone, Debug)] +pub(super) enum AnyAssignmentOperator { + Assign, + AugAssign(Operator), +} + +impl Format> for AnyAssignmentOperator { + fn fmt(&self, f: &mut Formatter>) -> FormatResult<()> { + match self { + AnyAssignmentOperator::Assign => token("=").fmt(f), + AnyAssignmentOperator::AugAssign(operator) => { + write!(f, [operator.format(), token("=")]) + } + } + } +} + +#[derive(Copy, Clone, Debug)] +pub(super) enum AnyBeforeOperator<'a> { + Expression(&'a Expr), + TypeParams(&'a TypeParams), +} + +impl Format> for AnyBeforeOperator<'_> { + fn fmt(&self, f: &mut Formatter>) -> FormatResult<()> { + match self { + AnyBeforeOperator::Expression(expression) => { + // Preserve parentheses around targets with comments. + if f.context().comments().has_leading(*expression) + || f.context().comments().has_trailing(*expression) + { + expression + .format() + .with_options(Parentheses::Preserve) + .fmt(f) + } + // Never parenthesize targets that come with their own parentheses, e.g. don't parenthesize lists or dictionary literals. + else if has_target_own_parentheses(expression, f.context()) { + expression.format().with_options(Parentheses::Never).fmt(f) + } else { + parenthesize_if_expands(&expression.format().with_options(Parentheses::Never)) + .fmt(f) + } + } + // Never parenthesize type params + AnyBeforeOperator::TypeParams(type_params) => type_params.format().fmt(f), + } + } +} + +/// Returns `true` for unsplittable expressions for which comments should be inlined. +fn should_inline_comments( + expression: &Expr, + parent: AnyNodeRef, + context: &PyFormatContext, +) -> bool { + match expression { + Expr::Name(_) | Expr::NoneLiteral(_) | Expr::NumberLiteral(_) | Expr::BooleanLiteral(_) => { + true + } + Expr::StringLiteral(string) => { + string.needs_parentheses(parent, context) == OptionalParentheses::BestFit + } + Expr::BytesLiteral(bytes) => { + bytes.needs_parentheses(parent, context) == OptionalParentheses::BestFit + } + Expr::FString(fstring) => { + fstring.needs_parentheses(parent, context) == OptionalParentheses::BestFit + } + _ => false, + } +} + +/// Tests whether an expression that for which comments shouldn't be inlined should use the best fit layout +fn should_non_inlineable_use_best_fit( + expr: &Expr, + parent: AnyNodeRef, + context: &PyFormatContext, +) -> bool { + match expr { + Expr::Attribute(attribute) => { + attribute.needs_parentheses(parent, context) == OptionalParentheses::BestFit + } + Expr::Call(call) => call.needs_parentheses(parent, context) == OptionalParentheses::BestFit, + _ => false, + } +} + +/// Returns `true` for targets that should not be parenthesized if they split because their expanded +/// layout comes with their own set of parentheses. +pub(super) fn has_target_own_parentheses(target: &Expr, context: &PyFormatContext) -> bool { + matches!(target, Expr::Tuple(_)) || has_own_parentheses(target, context).is_some() +} diff --git a/crates/ruff_python_formatter/src/statement/stmt_aug_assign.rs b/crates/ruff_python_formatter/src/statement/stmt_aug_assign.rs index 65260c5fec..003c10a32a 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_aug_assign.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_aug_assign.rs @@ -2,10 +2,13 @@ use ruff_formatter::write; use ruff_python_ast::StmtAugAssign; use crate::comments::{SourceComment, SuppressionKind}; - -use crate::expression::maybe_parenthesize_expression; -use crate::expression::parentheses::Parenthesize; +use crate::expression::parentheses::is_expression_parenthesized; use crate::prelude::*; +use crate::preview::is_prefer_splitting_right_hand_side_of_assignments_enabled; +use crate::statement::stmt_assign::{ + has_target_own_parentheses, AnyAssignmentOperator, AnyBeforeOperator, + FormatStatementsLastExpression, +}; use crate::statement::trailing_semicolon; use crate::{AsFormat, FormatNodeRule}; @@ -20,17 +23,35 @@ impl FormatNodeRule for FormatStmtAugAssign { value, range: _, } = item; - write!( - f, - [ - target.format(), - space(), - op.format(), - token("="), - space(), - maybe_parenthesize_expression(value, item, Parenthesize::IfBreaks) - ] - )?; + + if is_prefer_splitting_right_hand_side_of_assignments_enabled(f.context()) + && has_target_own_parentheses(target, f.context()) + && !is_expression_parenthesized( + target.into(), + f.context().comments().ranges(), + f.context().source(), + ) + { + FormatStatementsLastExpression::RightToLeft { + before_operator: AnyBeforeOperator::Expression(target), + operator: AnyAssignmentOperator::AugAssign(*op), + value, + statement: item.into(), + } + .fmt(f)?; + } else { + write!( + f, + [ + target.format(), + space(), + op.format(), + token("="), + space(), + FormatStatementsLastExpression::left_to_right(value, item) + ] + )?; + } if f.options().source_type().is_ipynb() && f.context().node_level().is_last_top_level_statement() diff --git a/crates/ruff_python_formatter/src/statement/stmt_return.rs b/crates/ruff_python_formatter/src/statement/stmt_return.rs index be63db2e73..4eaa3d9261 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_return.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_return.rs @@ -3,9 +3,8 @@ use ruff_python_ast::{Expr, StmtReturn}; use crate::comments::{SourceComment, SuppressionKind}; use crate::expression::expr_tuple::TupleParentheses; -use crate::expression::maybe_parenthesize_expression; -use crate::expression::parentheses::Parenthesize; use crate::prelude::*; +use crate::statement::stmt_assign::FormatStatementsLastExpression; #[derive(Default)] pub struct FormatStmtReturn; @@ -33,7 +32,7 @@ impl FormatNodeRule for FormatStmtReturn { f, [ space(), - maybe_parenthesize_expression(value, item, Parenthesize::IfBreaks) + FormatStatementsLastExpression::left_to_right(value, item) ] ) } diff --git a/crates/ruff_python_formatter/src/statement/stmt_type_alias.rs b/crates/ruff_python_formatter/src/statement/stmt_type_alias.rs index c2daaf8528..b6ae69b443 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_type_alias.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_type_alias.rs @@ -2,9 +2,11 @@ use ruff_formatter::write; use ruff_python_ast::StmtTypeAlias; use crate::comments::{SourceComment, SuppressionKind}; -use crate::expression::maybe_parenthesize_expression; -use crate::expression::parentheses::Parenthesize; use crate::prelude::*; +use crate::preview::is_prefer_splitting_right_hand_side_of_assignments_enabled; +use crate::statement::stmt_assign::{ + AnyAssignmentOperator, AnyBeforeOperator, FormatStatementsLastExpression, +}; #[derive(Default)] pub struct FormatStmtTypeAlias; @@ -21,6 +23,16 @@ impl FormatNodeRule for FormatStmtTypeAlias { write!(f, [token("type"), space(), name.as_ref().format()])?; if let Some(type_params) = type_params { + if is_prefer_splitting_right_hand_side_of_assignments_enabled(f.context()) { + return FormatStatementsLastExpression::RightToLeft { + before_operator: AnyBeforeOperator::TypeParams(type_params), + operator: AnyAssignmentOperator::Assign, + value, + statement: item.into(), + } + .fmt(f); + }; + write!(f, [type_params.format()])?; } @@ -30,7 +42,7 @@ impl FormatNodeRule for FormatStmtTypeAlias { space(), token("="), space(), - maybe_parenthesize_expression(value, item, Parenthesize::IfBreaks) + FormatStatementsLastExpression::left_to_right(value, item) ] ) } diff --git a/crates/ruff_python_formatter/src/statement/stmt_with.rs b/crates/ruff_python_formatter/src/statement/stmt_with.rs index af2389279b..06dc9a5f88 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_with.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_with.rs @@ -6,9 +6,7 @@ use ruff_text_size::{Ranged, TextRange}; use crate::builders::parenthesize_if_expands; use crate::comments::SourceComment; -use crate::expression::parentheses::{ - in_parentheses_only_soft_line_break_or_space, optional_parentheses, parenthesized, -}; +use crate::expression::parentheses::parenthesized; use crate::other::commas; use crate::prelude::*; use crate::statement::clause::{clause_body, clause_header, ClauseHeader}; @@ -77,7 +75,7 @@ impl FormatNodeRule for FormatStmtWith { joiner.entry_with_line_separator( item, &item.format(), - in_parentheses_only_soft_line_break_or_space(), + soft_line_break_or_space(), ); } joiner.finish() @@ -87,7 +85,7 @@ impl FormatNodeRule for FormatStmtWith { // This is similar to `maybe_parenthesize_expression`, but we're not // dealing with an expression here, it's a `WithItem`. if comments.has_leading(item) || comments.has_trailing(item) { - optional_parentheses(&item.format()).fmt(f)?; + parenthesized("(", &item.format(), ")").fmt(f)?; } else { item.format().fmt(f)?; } diff --git a/crates/ruff_python_formatter/src/statement/suite.rs b/crates/ruff_python_formatter/src/statement/suite.rs index d8001cebe3..f811e882f2 100644 --- a/crates/ruff_python_formatter/src/statement/suite.rs +++ b/crates/ruff_python_formatter/src/statement/suite.rs @@ -8,9 +8,10 @@ use ruff_text_size::{Ranged, TextRange}; use crate::comments::{ leading_comments, trailing_comments, Comments, LeadingDanglingTrailingComments, }; -use crate::context::{NodeLevel, TopLevelStatementPosition, WithNodeLevel}; -use crate::expression::string::StringLayout; +use crate::context::{NodeLevel, TopLevelStatementPosition, WithIndentLevel, WithNodeLevel}; +use crate::expression::expr_string_literal::ExprStringLiteralKind; use crate::prelude::*; +use crate::preview::is_no_blank_line_before_class_docstring_enabled; use crate::statement::stmt_expr::FormatStmtExpr; use crate::verbatim::{ suppressed_node, write_suppressed_statements_starting_with_leading_comment, @@ -71,7 +72,8 @@ impl FormatRule> for FormatSuite { let source = f.context().source(); let source_type = f.options().source_type(); - let f = &mut WithNodeLevel::new(node_level, f); + let f = WithNodeLevel::new(node_level, f); + let f = &mut WithIndentLevel::new(f.context().indent_level().increment(), f); // Format the first statement in the body, which often has special formatting rules. let first = match self.kind { @@ -107,14 +109,24 @@ impl FormatRule> for FormatSuite { if !comments.has_leading(first) && lines_before(first.start(), source) > 1 && !source_type.is_stub() + && !is_no_blank_line_before_class_docstring_enabled(f.context()) { // Allow up to one empty line before a class docstring, e.g., this is // stable formatting: + // // ```python // class Test: // // """Docstring""" // ``` + // + // But, in preview mode, we don't want to allow any empty lines before a + // class docstring, e.g., this is preview formatting: + // + // ```python + // class Test: + // """Docstring""" + // ``` empty_line().fmt(f)?; } @@ -511,7 +523,9 @@ pub(crate) fn contains_only_an_ellipsis(body: &[Stmt], comments: &Comments) -> b let [node] = body else { return false; }; - value.is_ellipsis_literal_expr() && !comments.has_leading(node) + value.is_ellipsis_literal_expr() + && !comments.has_leading(node) + && !comments.has_trailing_own_line(node) } _ => false, } @@ -606,7 +620,7 @@ impl Format> for DocstringStmt<'_> { leading_comments(node_comments.leading), string_literal .format() - .with_options(StringLayout::DocString), + .with_options(ExprStringLiteralKind::Docstring), ] )?; diff --git a/crates/ruff_python_formatter/src/string/docstring.rs b/crates/ruff_python_formatter/src/string/docstring.rs new file mode 100644 index 0000000000..51fee063ca --- /dev/null +++ b/crates/ruff_python_formatter/src/string/docstring.rs @@ -0,0 +1,1635 @@ +// This gives tons of false positives in this file because of +// "reStructuredText." +#![allow(clippy::doc_markdown)] + +use std::{borrow::Cow, collections::VecDeque}; + +use {once_cell::sync::Lazy, regex::Regex}; + +use { + ruff_formatter::{write, FormatOptions, IndentStyle, LineWidth, Printed}, + ruff_python_trivia::{is_python_whitespace, PythonWhitespace}, + ruff_source_file::Locator, + ruff_text_size::{Ranged, TextLen, TextRange, TextSize}, +}; + +use crate::{prelude::*, DocstringCodeLineWidth, FormatModuleError}; + +use super::{NormalizedString, QuoteChar}; + +/// Format a docstring by trimming whitespace and adjusting the indentation. +/// +/// Summary of changes we make: +/// * Normalize the string like all other strings +/// * Ignore docstring that have an escaped newline +/// * Trim all trailing whitespace, except for a chaperone space that avoids quotes or backslashes +/// in the last line. +/// * Trim leading whitespace on the first line, again except for a chaperone space +/// * If there is only content in the first line and after that only whitespace, collapse the +/// docstring into one line +/// * Adjust the indentation (see below) +/// +/// # Docstring indentation +/// +/// Unlike any other string, like black we change the indentation of docstring lines. +/// +/// We want to preserve the indentation inside the docstring relative to the suite statement/block +/// indent that the docstring statement is in, but also want to apply the change of the outer +/// indentation in the docstring, e.g. +/// ```python +/// def sparkle_sky(): +/// """Make a pretty sparkly sky. +/// * * ✨ *. . +/// * * ✨ . +/// . * . ✨ * . . +/// """ +/// ``` +/// should become +/// ```python +/// def sparkle_sky(): +/// """Make a pretty sparkly sky. +/// * * ✨ *. . +/// * * ✨ . +/// . * . ✨ * . . +/// """ +/// ``` +/// We can't compute the full indentation here since we don't know what the block indent of +/// the doc comment will be yet and which we can only have added by formatting each line +/// separately with a hard line break. This means we need to strip shared indentation from +/// docstring while preserving the in-docstring bigger-than-suite-statement indentation. Example: +/// ```python +/// def f(): +/// """first line +/// line a +/// line b +/// """ +/// ``` +/// The docstring indentation is 2, the block indents will change this to 4 (but we can't +/// determine this at this point). The indentation of line a is 2, so we trim ` line a` +/// to `line a`. For line b it's 5, so we trim it to `line b` and pad with 5-2=3 spaces to +/// ` line b`. The closing quotes, being on their own line, are stripped get only the +/// default indentation. Fully formatted: +/// ```python +/// def f(): +/// """first line +/// line a +/// line b +/// """ +/// ``` +/// +/// Tabs are counted by padding them to the next multiple of 8 according to +/// [`str.expandtabs`](https://docs.python.org/3/library/stdtypes.html#str.expandtabs). When +/// we see indentation that contains a tab or any other none ascii-space whitespace we rewrite the +/// string. +/// +/// Additionally, if any line in the docstring has less indentation than the docstring +/// (effectively a negative indentation wrt. to the current level), we pad all lines to the +/// level of the docstring with spaces. +/// ```python +/// def f(): +/// """first line +/// line a +/// line b +/// line c +/// """ +/// ``` +/// Here line a is 3 columns negatively indented, so we pad all lines by an extra 3 spaces: +/// ```python +/// def f(): +/// """first line +/// line a +/// line b +/// line c +/// """ +/// ``` +pub(crate) fn format(normalized: &NormalizedString, f: &mut PyFormatter) -> FormatResult<()> { + let docstring = &normalized.text; + + // Black doesn't change the indentation of docstrings that contain an escaped newline + if contains_unescaped_newline(docstring) { + return normalized.fmt(f); + } + + // is_borrowed is unstable :/ + let already_normalized = matches!(docstring, Cow::Borrowed(_)); + + let mut lines = docstring.lines().peekable(); + + // Start the string + write!( + f, + [ + normalized.prefix, + normalized.quotes, + source_position(normalized.start()), + ] + )?; + // We track where in the source docstring we are (in source code byte offsets) + let mut offset = normalized.start(); + + // The first line directly after the opening quotes has different rules than the rest, mainly + // that we remove all leading whitespace as there's no indentation + let first = lines.next().unwrap_or_default(); + // Black trims whitespace using [`str.strip()`](https://docs.python.org/3/library/stdtypes.html#str.strip) + // https://github.com/psf/black/blob/b4dca26c7d93f930bbd5a7b552807370b60d4298/src/black/strings.py#L77-L85 + // So we use the unicode whitespace definition through `trim_{start,end}` instead of the python + // tokenizer whitespace definition in `trim_whitespace_{start,end}`. + let trim_end = first.trim_end(); + let trim_both = trim_end.trim_start(); + + // Edge case: The first line is `""" "content`, so we need to insert chaperone space that keep + // inner quotes and closing quotes from getting to close to avoid `""""content` + if trim_both.starts_with(normalized.quotes.quote_char.as_char()) { + space().fmt(f)?; + } + + if !trim_end.is_empty() { + // For the first line of the docstring we strip the leading and trailing whitespace, e.g. + // `""" content ` to `"""content` + let leading_whitespace = trim_end.text_len() - trim_both.text_len(); + let trimmed_line_range = + TextRange::at(offset, trim_end.text_len()).add_start(leading_whitespace); + if already_normalized { + source_text_slice(trimmed_line_range).fmt(f)?; + } else { + text(trim_both, Some(trimmed_line_range.start())).fmt(f)?; + } + } + offset += first.text_len(); + + // Check if we have a single line (or empty) docstring + if docstring[first.len()..].trim().is_empty() { + // For `"""\n"""` or other whitespace between the quotes, black keeps a single whitespace, + // but `""""""` doesn't get one inserted. + if needs_chaperone_space(normalized, trim_end) + || (trim_end.is_empty() && !docstring.is_empty()) + { + space().fmt(f)?; + } + normalized.quotes.fmt(f)?; + return Ok(()); + } + + hard_line_break().fmt(f)?; + // We know that the normalized string has \n line endings + offset += "\n".text_len(); + + // If some line of the docstring is less indented than the function body, we pad all lines to + // align it with the docstring statement. Conversely, if all lines are over-indented, we strip + // the extra indentation. We call this stripped indentation since it's relative to the block + // indent printer-made indentation. + let stripped_indentation_length = lines + .clone() + // We don't want to count whitespace-only lines as miss-indented + .filter(|line| !line.trim().is_empty()) + .map(indentation_length) + .min() + .unwrap_or_default(); + + DocstringLinePrinter { + f, + action_queue: VecDeque::new(), + offset, + stripped_indentation_length, + already_normalized, + quote_char: normalized.quotes.quote_char, + code_example: CodeExample::default(), + } + .add_iter(lines)?; + + // Same special case in the last line as for the first line + let trim_end = docstring + .as_ref() + .trim_end_matches(|c: char| c.is_whitespace() && c != '\n'); + if needs_chaperone_space(normalized, trim_end) { + space().fmt(f)?; + } + + write!(f, [source_position(normalized.end()), normalized.quotes]) +} + +fn contains_unescaped_newline(haystack: &str) -> bool { + let mut rest = haystack; + + while let Some(index) = memchr::memchr(b'\\', rest.as_bytes()) { + rest = &rest[index + 1..].trim_whitespace_start(); + + if rest.starts_with('\n') { + return true; + } + } + + false +} + +/// An abstraction for printing each line of a docstring. +struct DocstringLinePrinter<'ast, 'buf, 'fmt, 'src> { + f: &'fmt mut PyFormatter<'ast, 'buf>, + + /// A queue of actions to perform. + /// + /// Whenever we process a line, it is possible for it to generate multiple + /// actions to take. The most basic, and most common case, is for the line + /// to just simply be printed as-is. But in some cases, a line is part of + /// a code example that we'd like to reformat. In those cases, the actions + /// can be more complicated. + /// + /// Actions are pushed on to the end of the queue and popped from the + /// beginning. + action_queue: VecDeque>, + + /// The source offset of the beginning of the line that is currently being + /// printed. + offset: TextSize, + + /// Indentation alignment based on the least indented line in the + /// docstring. + stripped_indentation_length: TextSize, + + /// Whether the docstring is overall already considered normalized. When it + /// is, the formatter can take a fast path. + already_normalized: bool, + + /// The quote character used by the docstring being printed. + quote_char: QuoteChar, + + /// The current code example detected in the docstring. + code_example: CodeExample<'src>, +} + +impl<'ast, 'buf, 'fmt, 'src> DocstringLinePrinter<'ast, 'buf, 'fmt, 'src> { + /// Print all of the lines in the given iterator to this + /// printer's formatter. + /// + /// Note that callers may treat the first line specially, such that the + /// iterator given contains all lines except for the first. + fn add_iter( + &mut self, + mut lines: std::iter::Peekable>, + ) -> FormatResult<()> { + while let Some(line) = lines.next() { + let line = InputDocstringLine { + line, + offset: self.offset, + next: lines.peek().copied(), + }; + // We know that the normalized string has \n line endings. + self.offset += line.line.text_len() + "\n".text_len(); + self.add_one(line)?; + } + self.code_example.finish(&mut self.action_queue); + self.run_action_queue() + } + + /// Adds the given line to this printer. + /// + /// Depending on what's in the line, this may or may not print the line + /// immediately to the underlying buffer. If the line starts or is part + /// of an existing code snippet, then the lines will get buffered until + /// the code snippet is complete. + fn add_one(&mut self, line: InputDocstringLine<'src>) -> FormatResult<()> { + // Just pass through the line as-is without looking for a code snippet + // when docstring code formatting is disabled. And also when we are + // formatting a code snippet so as to avoid arbitrarily nested code + // snippet formatting. We avoid this because it's likely quite tricky + // to get right 100% of the time, although perhaps not impossible. It's + // not clear that it's worth the effort to support. + if !self.f.options().docstring_code().is_enabled() || self.f.context().docstring().is_some() + { + return self.print_one(&line.as_output()); + } + self.code_example.add(line, &mut self.action_queue); + self.run_action_queue() + } + + /// Process any actions in this printer's queue until the queue is empty. + fn run_action_queue(&mut self) -> FormatResult<()> { + while let Some(action) = self.action_queue.pop_front() { + match action { + CodeExampleAddAction::Print { original } => { + self.print_one(&original.as_output())?; + } + CodeExampleAddAction::Kept => {} + CodeExampleAddAction::Reset { code } => { + for codeline in code { + self.print_one(&codeline.original.as_output())?; + } + } + CodeExampleAddAction::Format { mut kind } => { + let Some(formatted_lines) = self.format(&mut kind)? else { + // Since we've failed to emit these lines, we need to + // put them back in the queue but have them jump to the + // front of the queue to get processed before any other + // action. + self.action_queue.push_front(CodeExampleAddAction::Reset { + code: kind.into_code(), + }); + continue; + }; + + self.already_normalized = false; + match kind { + CodeExampleKind::Doctest(CodeExampleDoctest { ps1_indent, .. }) => { + let mut lines = formatted_lines.into_iter(); + let Some(first) = lines.next() else { continue }; + self.print_one( + &first.map(|line| std::format!("{ps1_indent}>>> {line}")), + )?; + for docline in lines { + self.print_one( + &docline.map(|line| std::format!("{ps1_indent}... {line}")), + )?; + } + } + CodeExampleKind::Rst(litblock) => { + let Some(min_indent) = litblock.min_indent else { + continue; + }; + // This looks suspicious, but it's consistent with the whitespace + // normalization that will occur anyway. + let indent = " ".repeat(min_indent.to_usize()); + for docline in formatted_lines { + self.print_one( + &docline.map(|line| std::format!("{indent}{line}")), + )?; + } + } + CodeExampleKind::Markdown(fenced) => { + // This looks suspicious, but it's consistent with the whitespace + // normalization that will occur anyway. + let indent = " ".repeat(fenced.opening_fence_indent.to_usize()); + for docline in formatted_lines { + self.print_one( + &docline.map(|line| std::format!("{indent}{line}")), + )?; + } + } + } + } + } + } + Ok(()) + } + + /// Prints the single line given. + /// + /// This mostly just handles indentation and ensuring line breaks are + /// inserted as appropriate before passing it on to the formatter to + /// print to the buffer. + fn print_one(&mut self, line: &OutputDocstringLine<'_>) -> FormatResult<()> { + let trim_end = line.line.trim_end(); + if trim_end.is_empty() { + return if line.is_last { + // If the doc string ends with ` """`, the last line is + // ` `, but we don't want to insert an empty line (but close + // the docstring). + Ok(()) + } else { + empty_line().fmt(self.f) + }; + } + + let tab_or_non_ascii_space = trim_end + .chars() + .take_while(|c| c.is_whitespace()) + .any(|c| c != ' '); + + if tab_or_non_ascii_space { + // We strip the indentation that is shared with the docstring + // statement, unless a line was indented less than the docstring + // statement, in which case we strip only this much indentation to + // implicitly pad all lines by the difference, or all lines were + // overindented, in which case we strip the additional whitespace + // (see example in [`format_docstring`] doc comment). We then + // prepend the in-docstring indentation to the string. + let indent_len = indentation_length(trim_end) - self.stripped_indentation_length; + let in_docstring_indent = " ".repeat(usize::from(indent_len)) + trim_end.trim_start(); + text(&in_docstring_indent, Some(line.offset)).fmt(self.f)?; + } else { + // Take the string with the trailing whitespace removed, then also + // skip the leading whitespace. + let trimmed_line_range = TextRange::at(line.offset, trim_end.text_len()) + .add_start(self.stripped_indentation_length); + if self.already_normalized { + source_text_slice(trimmed_line_range).fmt(self.f)?; + } else { + // All indents are ascii spaces, so the slicing is correct. + text( + &trim_end[usize::from(self.stripped_indentation_length)..], + Some(trimmed_line_range.start()), + ) + .fmt(self.f)?; + } + } + + // We handled the case that the closing quotes are on their own line + // above (the last line is empty except for whitespace). If they are on + // the same line as content, we don't insert a line break. + if !line.is_last { + hard_line_break().fmt(self.f)?; + } + + Ok(()) + } + + /// Given a code example, format them and return + /// the formatted code as a sequence of owned docstring lines. + /// + /// This may mutate the code example in place if extracting the lines of + /// code requires adjusting which part of each line is used for the actual + /// code bit. + /// + /// This routine generally only returns an error when the recursive call + /// to the formatter itself returns a `FormatError`. In all other cases + /// (for example, if the code snippet is invalid Python or even if the + /// resulting reformatted code snippet is invalid Python), then `Ok(None)` + /// is returned. In this case, callers should assume that a reformatted + /// code snippet is unavailable and bail out of trying to format it. + /// + /// Currently, when the above cases happen and `Ok(None)` is returned, the + /// routine is silent about it. So from the user's perspective, this will + /// fail silently. Ideally, this would at least emit a warning message, + /// but at time of writing, it wasn't clear to me how to best do that. + fn format( + &mut self, + kind: &mut CodeExampleKind<'_>, + ) -> FormatResult>>> { + use ruff_python_parser::AsMode; + + let line_width = match self.f.options().docstring_code_line_width() { + DocstringCodeLineWidth::Fixed(width) => width, + DocstringCodeLineWidth::Dynamic => { + let global_line_width = self.f.options().line_width().value(); + let indent_width = self.f.options().indent_width(); + let indent_level = self.f.context().indent_level(); + let current_indent = indent_level + .to_ascii_spaces(indent_width) + .saturating_add(kind.extra_indent_ascii_spaces()); + let width = std::cmp::max(1, global_line_width.saturating_sub(current_indent)); + LineWidth::try_from(width).expect("width is capped at a minimum of 1") + } + }; + + let code = kind.code(); + let (Some(unformatted_first), Some(unformatted_last)) = (code.first(), code.last()) else { + return Ok(None); + }; + let codeblob = code + .iter() + .map(|line| line.code) + .collect::>() + .join("\n"); + let options = self + .f + .options() + .clone() + .with_line_width(line_width) + // It's perhaps a little odd to be hard-coding the indent + // style here, but I believe it is necessary as a result + // of the whitespace normalization otherwise done in + // docstrings. Namely, tabs are rewritten with ASCII + // spaces. If code examples in docstrings are formatted + // with tabs and those tabs end up getting rewritten, this + // winds up screwing with the indentation in ways that + // results in formatting no longer being idempotent. Since + // tabs will get erased anyway, we just clobber them here + // instead of later, and as a result, get more consistent + // results. + .with_indent_style(IndentStyle::Space); + let printed = match docstring_format_source(options, self.quote_char, &codeblob) { + Ok(printed) => printed, + Err(FormatModuleError::FormatError(err)) => return Err(err), + Err( + FormatModuleError::LexError(_) + | FormatModuleError::ParseError(_) + | FormatModuleError::PrintError(_), + ) => { + return Ok(None); + } + }; + // This is a little hokey, but we want to determine whether the + // reformatted code snippet will lead to an overall invalid docstring. + // So attempt to parse it as Python code, but ensure it is wrapped + // within a docstring using the same quotes as the docstring we're in + // right now. + // + // This is an unfortunate stop-gap to attempt to prevent us from + // writing invalid Python due to some oddity of the code snippet within + // a docstring. As we fix corner cases over time, we can perhaps + // remove this check. See the `doctest_invalid_skipped` tests in + // `docstring_code_examples.py` for when this check is relevant. + let wrapped = match self.quote_char { + QuoteChar::Single => std::format!("'''{}'''", printed.as_code()), + QuoteChar::Double => { + std::format!(r#""""{}""""#, printed.as_code()) + } + }; + let result = ruff_python_parser::parse( + &wrapped, + self.f.options().source_type().as_mode(), + "", + ); + // If the resulting code is not valid, then reset and pass through + // the docstring lines as-is. + if result.is_err() { + return Ok(None); + } + let mut lines = printed + .as_code() + .lines() + .map(|line| OutputDocstringLine { + line: Cow::Owned(line.to_string()), + offset: unformatted_first.original.offset, + is_last: false, + }) + .collect::>(); + if let Some(reformatted_last) = lines.last_mut() { + reformatted_last.is_last = unformatted_last.original.is_last(); + } + Ok(Some(lines)) + } +} + +/// Represents a single line in a docstring. +/// +/// This type is only used to represent the original lines in a docstring. +/// Specifically, the line contained in this type has no changes from the input +/// source. +#[derive(Clone, Copy, Debug)] +struct InputDocstringLine<'src> { + /// The actual text of the line, not including the line terminator. + /// + /// In practice, this line is borrowed when it corresponds to an original + /// unformatted line in a docstring, and owned when it corresponds to a + /// reformatted line (e.g., from a code snippet) in a docstring. + line: &'src str, + + /// The offset into the source document which this line corresponds to. + offset: TextSize, + + /// For any input line that isn't the last line, this contains a reference + /// to the line immediately following this one. + /// + /// This is `None` if and only if this is the last line in the docstring. + next: Option<&'src str>, +} + +impl<'src> InputDocstringLine<'src> { + /// Borrow this input docstring line as an output docstring line. + fn as_output(&self) -> OutputDocstringLine<'src> { + OutputDocstringLine { + line: Cow::Borrowed(self.line), + offset: self.offset, + is_last: self.is_last(), + } + } + + /// Whether this is the last line in the docstring or not. + fn is_last(&self) -> bool { + self.next.is_none() + } +} + +/// Represents a single reformatted code line in a docstring. +/// +/// An input source line may be cheaply converted to an output source line. +/// This is the common case: an input source line is printed pretty much as it +/// is, with perhaps some whitespace normalization applied. The less common +/// case is that the output docstring line owns its `line` because it was +/// produced by reformatting a code snippet. +#[derive(Clone, Debug)] +struct OutputDocstringLine<'src> { + /// The output line. + /// + /// This is an owned variant in precisely the cases where it corresponds to + /// a line from a reformatted code snippet. In other cases, it is borrowed + /// from the input docstring line as-is. + line: Cow<'src, str>, + + /// The offset into the source document which this line corresponds to. + /// Currently, this is an estimate. + offset: TextSize, + + /// Whether this is the last line in a docstring or not. This is determined + /// by whether the last line in the code snippet was also the last line in + /// the docstring. If it was, then it follows that the last line in the + /// reformatted code snippet is also the last line in the docstring. + is_last: bool, +} + +impl<'src> OutputDocstringLine<'src> { + /// Return this reformatted line, but with the given function applied to + /// the text of the line. + fn map(self, mut map: impl FnMut(&str) -> String) -> OutputDocstringLine<'static> { + OutputDocstringLine { + line: Cow::Owned(map(&self.line)), + ..self + } + } +} + +/// A single code example extracted from a docstring. +/// +/// This represents an intermediate state from when the code example was first +/// found all the way up until the point at which the code example has finished +/// and is reformatted. +/// +/// Its default state is "empty." That is, that no code example is currently +/// being collected. +#[derive(Debug, Default)] +struct CodeExample<'src> { + /// The kind of code example being collected, or `None` if no code example + /// has been observed. + /// + /// The kind is split out into a separate type so that we can pass it + /// around and have a guarantee that a code example actually exists. + kind: Option>, +} + +impl<'src> CodeExample<'src> { + /// Attempt to add an original line from a docstring to this code example. + /// + /// Based on the line and the internal state of whether a code example is + /// currently being collected or not, this will push an "action" to the + /// given queue for the caller to perform. The typical case is a "print" + /// action, which instructs the caller to just print the line as though it + /// were not part of a code snippet. + fn add( + &mut self, + original: InputDocstringLine<'src>, + queue: &mut VecDeque>, + ) { + match self.kind.take() { + // There's no existing code example being built, so we look for + // the start of one or otherwise tell the caller we couldn't find + // anything. + None => { + self.add_start(original, queue); + } + Some(CodeExampleKind::Doctest(doctest)) => { + let Some(doctest) = doctest.add_code_line(original, queue) else { + self.add_start(original, queue); + return; + }; + self.kind = Some(CodeExampleKind::Doctest(doctest)); + } + Some(CodeExampleKind::Rst(litblock)) => { + let Some(litblock) = litblock.add_code_line(original, queue) else { + self.add_start(original, queue); + return; + }; + self.kind = Some(CodeExampleKind::Rst(litblock)); + } + Some(CodeExampleKind::Markdown(fenced)) => { + let Some(fenced) = fenced.add_code_line(original, queue) else { + // For Markdown, the last line in a block should be printed + // as-is. Especially since the last line in many Markdown + // fenced code blocks is identical to the start of a code + // block. So if we try to start a new code block with + // the last line, we risk opening another Markdown block + // inappropriately. + return; + }; + self.kind = Some(CodeExampleKind::Markdown(fenced)); + } + } + } + + /// Finish the code example by generating any final actions if applicable. + /// + /// This typically adds an action when the end of a code example coincides + /// with the end of the docstring. + fn finish(&mut self, queue: &mut VecDeque>) { + let Some(kind) = self.kind.take() else { return }; + queue.push_back(CodeExampleAddAction::Format { kind }); + } + + /// Looks for the start of a code example. If one was found, then the given + /// line is kept and added as part of the code example. Otherwise, the line + /// is pushed onto the queue unchanged to be printed as-is. + /// + /// # Panics + /// + /// This panics when the existing code-example is any non-None value. That + /// is, this routine assumes that there is no ongoing code example being + /// collected and looks for the beginning of another code example. + fn add_start( + &mut self, + original: InputDocstringLine<'src>, + queue: &mut VecDeque>, + ) { + assert!(self.kind.is_none(), "expected no existing code example"); + if let Some(doctest) = CodeExampleDoctest::new(original) { + self.kind = Some(CodeExampleKind::Doctest(doctest)); + queue.push_back(CodeExampleAddAction::Kept); + } else if let Some(litblock) = CodeExampleRst::new(original) { + self.kind = Some(CodeExampleKind::Rst(litblock)); + queue.push_back(CodeExampleAddAction::Print { original }); + } else if let Some(fenced) = CodeExampleMarkdown::new(original) { + self.kind = Some(CodeExampleKind::Markdown(fenced)); + queue.push_back(CodeExampleAddAction::Print { original }); + } else { + queue.push_back(CodeExampleAddAction::Print { original }); + } + } +} + +/// The kind of code example observed in a docstring. +#[derive(Debug)] +enum CodeExampleKind<'src> { + /// Code found in Python "doctests." + /// + /// Documentation describing doctests and how they're recognized can be + /// found as part of the Python standard library: + /// https://docs.python.org/3/library/doctest.html. + /// + /// (You'll likely need to read the [regex matching] used internally by the + /// doctest module to determine more precisely how it works.) + /// + /// [regex matching]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L611-L622 + Doctest(CodeExampleDoctest<'src>), + /// Code found from a reStructuredText "[literal block]" or "[code block + /// directive]". + /// + /// [literal block]: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks + /// [code block directive]: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block + Rst(CodeExampleRst<'src>), + /// Code found from a Markdown "[fenced code block]". + /// + /// [fenced code block]: https://spec.commonmark.org/0.30/#fenced-code-blocks + Markdown(CodeExampleMarkdown<'src>), +} + +impl<'src> CodeExampleKind<'src> { + /// Return the lines of code collected so far for this example. + /// + /// This is borrowed mutably because it may need to mutate the code lines + /// based on the state accrued so far. + fn code(&mut self) -> &[CodeExampleLine<'src>] { + match *self { + CodeExampleKind::Doctest(ref doctest) => &doctest.lines, + CodeExampleKind::Rst(ref mut litblock) => litblock.indented_code(), + CodeExampleKind::Markdown(ref fenced) => &fenced.lines, + } + } + + /// Consume this code example and return only the lines that have been + /// accrued so far. + /// + /// This is useful when the code example being collected has been + /// determined to be invalid, and one wants to "give up" and print the + /// original lines through unchanged without attempting formatting. + fn into_code(self) -> Vec> { + match self { + CodeExampleKind::Doctest(doctest) => doctest.lines, + CodeExampleKind::Rst(litblock) => litblock.lines, + CodeExampleKind::Markdown(fenced) => fenced.lines, + } + } + + /// This returns any extra indent that will be added after formatting this + /// code example. + /// + /// The extra indent is expressed in units of ASCII space characters. + fn extra_indent_ascii_spaces(&self) -> u16 { + match *self { + CodeExampleKind::Doctest(_) => 4, + _ => 0, + } + } +} + +/// State corresponding to a single doctest code example found in a docstring. +#[derive(Debug)] +struct CodeExampleDoctest<'src> { + /// The lines that have been seen so far that make up the doctest. + lines: Vec>, + + /// The indent observed in the first doctest line. + /// + /// More precisely, this corresponds to the whitespace observed before + /// the starting `>>> ` (the "PS1 prompt"). + ps1_indent: &'src str, +} + +impl<'src> CodeExampleDoctest<'src> { + /// Looks for a valid doctest PS1 prompt in the line given. + /// + /// If one was found, then state for a new doctest code example is + /// returned, along with the code example line. + fn new(original: InputDocstringLine<'src>) -> Option> { + let trim_start = original.line.trim_start(); + // Prompts must be followed by an ASCII space character[1]. + // + // [1]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L809-L812 + let code = trim_start.strip_prefix(">>> ")?; + let indent_len = original + .line + .len() + .checked_sub(trim_start.len()) + .expect("suffix is <= original"); + let lines = vec![CodeExampleLine { original, code }]; + let ps1_indent = &original.line[..indent_len]; + let doctest = CodeExampleDoctest { lines, ps1_indent }; + Some(doctest) + } + + /// Looks for a valid doctest PS2 prompt in the line given. If one is + /// found, it is added to this code example and ownership of the example is + /// returned to the caller. In this case, callers should continue trying to + /// add PS2 prompt lines. + /// + /// But if one isn't found, then the given line is not part of the code + /// example and ownership of this example is not returned. + /// + /// In either case, relevant actions will be added to the given queue to + /// process. + fn add_code_line( + mut self, + original: InputDocstringLine<'src>, + queue: &mut VecDeque>, + ) -> Option> { + let Some((ps2_indent, ps2_after)) = original.line.split_once("...") else { + queue.push_back(self.into_format_action()); + return None; + }; + // PS2 prompts must have the same indentation as their + // corresponding PS1 prompt.[1] While the 'doctest' Python + // module will error in this case, we just treat this line as a + // non-doctest line. + // + // [1]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L733 + if self.ps1_indent != ps2_indent { + queue.push_back(self.into_format_action()); + return None; + } + // PS2 prompts must be followed by an ASCII space character unless + // it's an otherwise empty line[1]. + // + // [1]: https://github.com/python/cpython/blob/0ff6368519ed7542ad8b443de01108690102420a/Lib/doctest.py#L809-L812 + let code = match ps2_after.strip_prefix(' ') { + None if ps2_after.is_empty() => "", + None => { + queue.push_back(self.into_format_action()); + return None; + } + Some(code) => code, + }; + self.lines.push(CodeExampleLine { original, code }); + queue.push_back(CodeExampleAddAction::Kept); + Some(self) + } + + /// Consume this doctest and turn it into a formatting action. + fn into_format_action(self) -> CodeExampleAddAction<'src> { + CodeExampleAddAction::Format { + kind: CodeExampleKind::Doctest(self), + } + } +} + +/// State corresponding to a single reStructuredText literal block or +/// code-block directive. +/// +/// While a literal block and code-block directive are technically two +/// different reStructuredText constructs, we use one type to represent +/// both because they are exceptionally similar. Basically, they are +/// the same with two main differences: +/// +/// 1. Literal blocks are began with a line that ends with `::`. Code block +/// directives are began with a line like `.. code-block:: python`. +/// 2. Code block directives permit a list of options as a "field list" +/// immediately after the opening line. Literal blocks have no options. +/// +/// Otherwise, everything else, including the indentation structure, is the +/// same. +#[derive(Debug)] +struct CodeExampleRst<'src> { + /// The lines that have been seen so far that make up the block. + lines: Vec>, + + /// The indent of the line "opening" this block measured via + /// `indentation_length`. + /// + /// It can either be the indent of a line ending with `::` (for a literal + /// block) or the indent of a line starting with `.. ` (a directive). + /// + /// The content body of a block needs to be indented more than the line + /// opening the block, so we use this indentation to look for indentation + /// that is "more than" it. + opening_indent: TextSize, + + /// The minimum indent of the block measured via `indentation_length`. + /// + /// This is `None` until the first such line is seen. If no such line is + /// found, then we consider it an invalid block and bail out of trying to + /// find a code snippet. Otherwise, we update this indentation as we see + /// lines in the block with less indentation. (Usually, the minimum is the + /// indentation of the first block, but this is not required.) + /// + /// By construction, all lines part of the block must have at least this + /// indentation. Additionally, it is guaranteed that the indentation length + /// of the opening indent is strictly less than the indentation of the + /// minimum indent. Namely, the block ends once we find a line that has + /// been unindented to at most the indent of the opening line. + /// + /// When the code snippet has been extracted, it is re-built before being + /// reformatted. The minimum indent is stripped from each line when it is + /// re-built. + min_indent: Option, + + /// Whether this is a directive block or not. When not a directive, this is + /// a literal block. The main difference between them is that they start + /// differently. A literal block is started merely by trailing a line with + /// `::`. A directive block is started with `.. code-block:: python`. + /// + /// The other difference is that directive blocks can have options + /// (represented as a reStructuredText "field list") after the beginning of + /// the directive and before the body content of the directive. + is_directive: bool, +} + +impl<'src> CodeExampleRst<'src> { + /// Looks for the start of a reStructuredText [literal block] or [code + /// block directive]. + /// + /// If the start of a block is found, then this returns a correctly + /// initialized reStructuredText block. Callers should print the line as + /// given as it is not retained as part of the block. + /// + /// [literal block]: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks + /// [code block directive]: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block + fn new(original: InputDocstringLine<'src>) -> Option { + let (opening_indent, rest) = indent_with_suffix(original.line); + if rest.starts_with(".. ") { + if let Some(litblock) = CodeExampleRst::new_code_block(original) { + return Some(litblock); + } + // In theory, we could still have something that looks like a literal block, + // but if the line starts with `.. `, then it seems like it probably shouldn't + // be a literal block. For example: + // + // .. code-block:: + // + // cool_stuff( 1 ) + // + // The above is not valid because the `language` argument is missing from + // the `code-block` directive. Because of how we handle it here, the above + // is not treated as a code snippet. + return None; + } + // At this point, we know we didn't find a code block, so the only + // thing we can hope for is a literal block which must end with a `::`. + if !rest.trim_end().ends_with("::") { + return None; + } + Some(CodeExampleRst { + lines: vec![], + opening_indent: indentation_length(opening_indent), + min_indent: None, + is_directive: false, + }) + } + + /// Attempts to create a new reStructuredText code example from a + /// `code-block` or `sourcecode` directive. If one couldn't be found, then + /// `None` is returned. + fn new_code_block(original: InputDocstringLine<'src>) -> Option { + // This regex attempts to parse the start of a reStructuredText code + // block [directive]. From the reStructuredText spec: + // + // > Directives are indicated by an explicit markup start (".. ") + // > followed by the directive type, two colons, and whitespace + // > (together called the "directive marker"). Directive types + // > are case-insensitive single words (alphanumerics plus + // > isolated internal hyphens, underscores, plus signs, colons, + // > and periods; no whitespace). + // + // The language names matched here (e.g., `python` or `py`) are taken + // from the [Pygments lexer names], which is referenced from the docs + // for the [code-block] directive. + // + // [directives]: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#directives + // [Pygments lexer names]: https://pygments.org/docs/lexers/ + // [code-block]: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block + static DIRECTIVE_START: Lazy = Lazy::new(|| { + Regex::new( + r"(?m)^\s*\.\. \s*(?i:code-block|sourcecode)::\s*(?i:python|py|python3|py3)$", + ) + .unwrap() + }); + if !DIRECTIVE_START.is_match(original.line) { + return None; + } + Some(CodeExampleRst { + lines: vec![], + opening_indent: indentation_length(original.line), + min_indent: None, + is_directive: true, + }) + } + + /// Returns the code collected in this example as a sequence of lines. + /// + /// The lines returned have the minimum indentation stripped from their + /// prefix in-place. Based on the definition of minimum indentation, this + /// implies there is at least one line in the slice returned with no + /// whitespace prefix. + fn indented_code(&mut self) -> &[CodeExampleLine<'src>] { + let Some(min_indent) = self.min_indent else { + return &[]; + }; + for line in &mut self.lines { + line.code = if line.original.line.trim().is_empty() { + "" + } else { + indentation_trim(min_indent, line.original.line) + }; + } + &self.lines + } + + /// Attempts to add the given line from a docstring to the reStructuredText + /// code snippet being collected. + /// + /// This takes ownership of `self`, and if ownership is returned to the + /// caller, that means the caller should continue trying to add lines to + /// this code snippet. Otherwise, if ownership is not returned, then this + /// implies at least one action was added to the give queue to either reset + /// the code block or format. That is, the code snippet was either found to + /// be invalid or it was completed and should be reformatted. + /// + /// Note that actions may be added even if ownership is returned. For + /// example, empty lines immediately preceding the actual code snippet will + /// be returned back as an action to print them verbatim, but the caller + /// should still continue to try to add lines to this code snippet. + fn add_code_line( + mut self, + original: InputDocstringLine<'src>, + queue: &mut VecDeque>, + ) -> Option> { + // If we haven't started populating the minimum indent yet, then + // we haven't found the first code line and may need to find and + // pass through leading empty lines. + let Some(min_indent) = self.min_indent else { + return self.add_first_line(original, queue); + }; + let (indent, rest) = indent_with_suffix(original.line); + if rest.is_empty() { + // This is the standard way we close a block: when we see + // an empty line followed by an unindented non-empty line. + if let Some(next) = original.next { + let (next_indent, next_rest) = indent_with_suffix(next); + if !next_rest.is_empty() && indentation_length(next_indent) <= self.opening_indent { + self.push_format_action(queue); + return None; + } + } else { + self.push_format_action(queue); + return None; + } + self.push(original); + queue.push_back(CodeExampleAddAction::Kept); + return Some(self); + } + let indent_len = indentation_length(indent); + if indent_len <= self.opening_indent { + // If we find an unindented non-empty line at the same (or less) + // indentation of the opening line at this point, then we know it + // must be wrong because we didn't see it immediately following an + // empty line. + queue.push_back(self.into_reset_action()); + return None; + } else if indent_len < min_indent { + // While the minimum indent is usually the indentation of the first + // line in a code snippet, it is not guaranteed to be the case. + // And indeed, reST is happy to let blocks have a first line whose + // indentation is greater than a subsequent line in the block. The + // only real restriction is that every line in the block must be + // indented at least past the indentation of the `::` line. + self.min_indent = Some(indent_len); + } + self.push(original); + queue.push_back(CodeExampleAddAction::Kept); + Some(self) + } + + /// Looks for the first line in a literal or code block. + /// + /// If a first line is found, then this returns true. Otherwise, an empty + /// line has been found and the caller should pass it through to the + /// docstring unchanged. (Empty lines are allowed to precede a + /// block. And there must be at least one of them.) + /// + /// If the given line is invalid for a reStructuredText block (i.e., no + /// empty lines seen between the opening line), then an error variant is + /// returned. In this case, callers should bail out of parsing this code + /// example. + /// + /// When this returns `true`, it is guaranteed that `self.min_indent` is + /// set to a non-None value. + /// + /// # Panics + /// + /// Callers must only call this when the first indentation has not yet been + /// found. If it has, then this panics. + fn add_first_line( + mut self, + original: InputDocstringLine<'src>, + queue: &mut VecDeque>, + ) -> Option> { + assert!(self.min_indent.is_none()); + + // While the rst spec isn't completely clear on this point, through + // experimentation, I found that multiple empty lines before the first + // non-empty line are ignored. + let (indent, rest) = indent_with_suffix(original.line); + if rest.is_empty() { + queue.push_back(CodeExampleAddAction::Print { original }); + return Some(self); + } + // Ignore parameters in field lists. These can only occur in + // directives, not literal blocks. + if self.is_directive && is_rst_option(rest) { + queue.push_back(CodeExampleAddAction::Print { original }); + return Some(self); + } + let min_indent = indentation_length(indent); + // At this point, we found a non-empty line. The only thing we require + // is that its indentation is strictly greater than the indentation of + // the line containing the `::`. Otherwise, we treat this as an invalid + // block and bail. + if min_indent <= self.opening_indent { + queue.push_back(self.into_reset_action()); + return None; + } + self.min_indent = Some(min_indent); + self.push(original); + queue.push_back(CodeExampleAddAction::Kept); + Some(self) + } + + /// Pushes the given line as part of this code example. + fn push(&mut self, original: InputDocstringLine<'src>) { + // N.B. We record the code portion as identical to the original line. + // When we go to reformat the code lines, we change them by removing + // the `min_indent`. This design is necessary because the true value of + // `min_indent` isn't known until the entire block has been parsed. + let code = original.line; + self.lines.push(CodeExampleLine { original, code }); + } + + /// Consume this block and add actions to the give queue for formatting. + /// + /// This may trim lines from the end of the block and add them to the queue + /// for printing as-is. For example, this happens when there are trailing + /// empty lines, as we would like to preserve those since they aren't + /// generally treated as part of the code block. + fn push_format_action(mut self, queue: &mut VecDeque>) { + let has_non_whitespace = |line: &CodeExampleLine| { + line.original + .line + .chars() + .any(|ch| !is_python_whitespace(ch)) + }; + let first_trailing_empty_line = self + .lines + .iter() + .rposition(has_non_whitespace) + .map_or(0, |i| i + 1); + let trailing_lines = self.lines.split_off(first_trailing_empty_line); + queue.push_back(CodeExampleAddAction::Format { + kind: CodeExampleKind::Rst(self), + }); + queue.extend( + trailing_lines + .into_iter() + .map(|line| CodeExampleAddAction::Print { + original: line.original, + }), + ); + } + + /// Consume this block and turn it into a reset action. + /// + /// This occurs when we started collecting a code example from something + /// that looked like a block, but later determined that it wasn't a valid + /// block. + fn into_reset_action(self) -> CodeExampleAddAction<'src> { + CodeExampleAddAction::Reset { code: self.lines } + } +} + +/// Represents a code example extracted from a Markdown [fenced code block]. +/// +/// [fenced code block]: https://spec.commonmark.org/0.30/#fenced-code-blocks +#[derive(Debug)] +struct CodeExampleMarkdown<'src> { + /// The lines that have been seen so far that make up the block. + lines: Vec>, + + /// The indent of the line "opening" fence of this block measured via + /// `indentation_length`. + /// + /// This indentation is trimmed from the indentation of every line in the + /// body of the code block, + opening_fence_indent: TextSize, + + /// The kind of fence, backticks or tildes, used for this block. We need to + /// keep track of which kind was used to open the block in order to look + /// for a correct close of the block. + fence_kind: MarkdownFenceKind, + + /// The size of the fence, in codepoints, in the opening line. A correct + /// close of the fence must use *at least* this many characters. In other + /// words, this is the number of backticks or tildes that opened the fenced + /// code block. + fence_len: usize, +} + +impl<'src> CodeExampleMarkdown<'src> { + /// Looks for the start of a Markdown [fenced code block]. + /// + /// If the start of a block is found, then this returns a correctly + /// initialized Markdown code block. Callers should print the line as given + /// as it is not retained as part of the block. + /// + /// [fenced code block]: https://spec.commonmark.org/0.30/#fenced-code-blocks + fn new(original: InputDocstringLine<'src>) -> Option> { + static FENCE_START: Lazy = Lazy::new(|| { + Regex::new( + r"(?xm) + ^ + (?: + # In the backtick case, info strings (following the fence) + # cannot contain backticks themselves, since it would + # introduce ambiguity with parsing inline code. In other + # words, if we didn't specifically exclude matching ` + # in the info string for backtick fences, then we might + # erroneously consider something to be a code fence block + # that is actually inline code. + # + # NOTE: The `ticklang` and `tildlang` capture groups are + # currently unused, but there was some discussion about not + # assuming unlabeled blocks were Python. At the time of + # writing, we do assume unlabeled blocks are Python, but + # one could inspect the `ticklang` and `tildlang` capture + # groups to determine whether the block is labeled or not. + (?```+)(?:\s*(?(?i:python|py|python3|py3))[^`]*)? + | + (?~~~+)(?:\s*(?(?i:python|py|python3|py3))\p{any}*)? + ) + $ + ", + ) + .unwrap() + }); + + let (opening_fence_indent, rest) = indent_with_suffix(original.line); + // Quit quickly in the vast majority of cases. + if !rest.starts_with("```") && !rest.starts_with("~~~") { + return None; + } + + let caps = FENCE_START.captures(rest)?; + let (fence_kind, fence_len) = if let Some(ticks) = caps.name("ticks") { + (MarkdownFenceKind::Backtick, ticks.as_str().chars().count()) + } else { + let tildes = caps + .name("tilds") + .expect("no ticks means it must be tildes"); + (MarkdownFenceKind::Tilde, tildes.as_str().chars().count()) + }; + Some(CodeExampleMarkdown { + lines: vec![], + opening_fence_indent: indentation_length(opening_fence_indent), + fence_kind, + fence_len, + }) + } + + /// Attempts to add the given line from a docstring to the Markdown code + /// snippet being collected. + /// + /// In this case, ownership is only not returned when the end of the block + /// was found, or if the block was determined to be invalid. A formatting + /// action is then pushed onto the queue. + fn add_code_line( + mut self, + original: InputDocstringLine<'src>, + queue: &mut VecDeque>, + ) -> Option> { + if self.is_end(original) { + queue.push_back(self.into_format_action()); + queue.push_back(CodeExampleAddAction::Print { original }); + return None; + } + // When a line in a Markdown fenced closed block is indented *less* + // than the opening indent, we treat the entire block as invalid. + // + // I believe that code blocks of this form are actually valid Markdown + // in some cases, but the interplay between it and our docstring + // whitespace normalization leads to undesirable outcomes. For example, + // if the line here is unindented out beyond the initial indent of the + // docstring itself, then this causes the entire docstring to have + // its indent normalized. And, at the time of writing, a subsequent + // formatting run undoes this indentation, thus violating idempotency. + if !original.line.trim_whitespace().is_empty() + && indentation_length(original.line) < self.opening_fence_indent + { + queue.push_back(self.into_reset_action()); + queue.push_back(CodeExampleAddAction::Print { original }); + return None; + } + self.push(original); + queue.push_back(CodeExampleAddAction::Kept); + Some(self) + } + + /// Returns true when given line ends this fenced code block. + fn is_end(&self, original: InputDocstringLine<'src>) -> bool { + let (_, rest) = indent_with_suffix(original.line); + // We can bail early if we don't have at least three backticks or + // tildes. + if !rest.starts_with("```") && !rest.starts_with("~~~") { + return false; + } + // We do need to check that we have the right number of + // backticks/tildes... + let fence_len = rest + .chars() + .take_while(|&ch| ch == self.fence_kind.to_char()) + .count(); + // A closing fence only needs *at least* the number of ticks/tildes + // that are in the opening fence. + if fence_len < self.fence_len { + return false; + } + // And, also, there can only be trailing whitespace. Nothing else. + assert!( + self.fence_kind.to_char().is_ascii(), + "fence char should be ASCII", + ); + if !rest[fence_len..].chars().all(is_python_whitespace) { + return false; + } + true + } + + /// Pushes the given line as part of this code example. + fn push(&mut self, original: InputDocstringLine<'src>) { + // Unlike reStructuredText blocks, for Markdown fenced code blocks, the + // indentation that we want to strip from each line is known when the + // block is opened. So we can strip it as we collect lines. + let code = indentation_trim(self.opening_fence_indent, original.line); + self.lines.push(CodeExampleLine { original, code }); + } + + /// Consume this block and turn it into a reset action. + /// + /// This occurs when we started collecting a code example from something + /// that looked like a block, but later determined that it wasn't a valid + /// block. + fn into_format_action(self) -> CodeExampleAddAction<'src> { + // Note that unlike in reStructuredText blocks, if a Markdown fenced + // code block is unclosed, then *all* remaining lines should be treated + // as part of the block[1]: + // + // > If the end of the containing block (or document) is reached and no + // > closing code fence has been found, the code block contains all of the + // > lines after the opening code fence until the end of the containing + // > block (or document). + // + // This means that we don't need to try and trim trailing empty lines. + // Those will get fed into the code formatter and ultimately stripped, + // which is what you'd expect if those lines are treated as part of the + // block. + // + // [1]: https://spec.commonmark.org/0.30/#fenced-code-blocks + CodeExampleAddAction::Format { + kind: CodeExampleKind::Markdown(self), + } + } + + /// Consume this block and turn it into a reset action. + /// + /// This occurs when we started collecting a code example from something + /// that looked like a code fence, but later determined that it wasn't a + /// valid. + fn into_reset_action(self) -> CodeExampleAddAction<'src> { + CodeExampleAddAction::Reset { code: self.lines } + } +} + +/// The kind of fence used in a Markdown code block. +/// +/// This indicates that the fence is either surrounded by fences made from +/// backticks, or fences made from tildes. +#[derive(Clone, Copy, Debug)] +enum MarkdownFenceKind { + Backtick, + Tilde, +} + +impl MarkdownFenceKind { + /// Convert the fence kind to the actual character used to build the fence. + fn to_char(self) -> char { + match self { + MarkdownFenceKind::Backtick => '`', + MarkdownFenceKind::Tilde => '~', + } + } +} + +/// A single line in a code example found in a docstring. +/// +/// A code example line exists prior to formatting, and is thus in full +/// correspondence with the original lines from the docstring. Indeed, a +/// code example line includes both the original line *and* the actual code +/// extracted from the line. For example, if a line in a docstring is `>>> +/// foo(x)`, then the original line is `>>> foo(x)` and the code portion is +/// `foo(x)`. +/// +/// The original line is kept for things like offset information, but also +/// because it may still be needed if it turns out that the code snippet is +/// not valid or otherwise could not be formatted. In which case, the original +/// lines are printed as-is. +#[derive(Debug)] +struct CodeExampleLine<'src> { + /// The normalized (but original) line from the doc string. This might, for + /// example, contain a `>>> ` or `... ` prefix if this code example is a + /// doctest. + original: InputDocstringLine<'src>, + + /// The code extracted from the line. + code: &'src str, +} + +/// An action that a caller should perform after attempting to add a line from +/// a docstring to a code example. +/// +/// Callers are expected to add every line from a docstring to a code example, +/// and the state of the code example (and the line itself) will determine +/// how the caller should react. +#[derive(Debug)] +enum CodeExampleAddAction<'src> { + /// The line added was ignored by `CodeExample` and the caller should print + /// it to the formatter as-is. + /// + /// This is the common case. That is, most lines in most docstrings are not + /// part of a code example. + Print { original: InputDocstringLine<'src> }, + /// The line added was kept by `CodeExample` as part of a new or existing + /// code example. + /// + /// When this occurs, callers should not try to format the line and instead + /// move on to the next line. + Kept, + /// The line added indicated that the code example is finished and should + /// be formatted and printed. The line added is not treated as part of + /// the code example. + Format { + /// The kind of code example that was found. + kind: CodeExampleKind<'src>, + }, + /// This occurs when adding a line to an existing code example + /// results in that code example becoming invalid. In this case, + /// we don't want to treat it as a code example, but instead write + /// back the lines to the docstring unchanged. + #[allow(dead_code)] // FIXME: remove when reStructuredText support is added + Reset { + /// The lines of code that we collected but should be printed back to + /// the docstring as-is and not formatted. + code: Vec>, + }, +} + +/// Formats the given source code using the given options. +/// +/// The given quote style should correspond to the style used by the docstring +/// containing the code snippet being formatted. The formatter will use this +/// information to invert the quote style of any such strings contained within +/// the code snippet in order to avoid writing invalid Python code. +/// +/// This is similar to the top-level formatting entrypoint, except this +/// explicitly sets the context to indicate that formatting is taking place +/// inside of a docstring. +fn docstring_format_source( + options: crate::PyFormatOptions, + docstring_quote_style: QuoteChar, + source: &str, +) -> Result { + use ruff_python_parser::AsMode; + + let source_type = options.source_type(); + let (tokens, comment_ranges) = ruff_python_index::tokens_and_ranges(source, source_type)?; + let module = + ruff_python_parser::parse_ok_tokens(tokens, source, source_type.as_mode(), "")?; + let source_code = ruff_formatter::SourceCode::new(source); + let comments = crate::Comments::from_ast(&module, source_code, &comment_ranges); + let locator = Locator::new(source); + + let ctx = PyFormatContext::new(options, locator.contents(), comments) + .in_docstring(docstring_quote_style); + let formatted = crate::format!(ctx, [module.format()])?; + formatted + .context() + .comments() + .assert_all_formatted(source_code); + Ok(formatted.print()?) +} + +/// If the last line of the docstring is `content" """` or `content\ """`, we need a chaperone space +/// that avoids `content""""` and `content\"""`. This does only applies to un-escaped backslashes, +/// so `content\\ """` doesn't need a space while `content\\\ """` does. +fn needs_chaperone_space(normalized: &NormalizedString, trim_end: &str) -> bool { + trim_end.ends_with(normalized.quotes.quote_char.as_char()) + || trim_end.chars().rev().take_while(|c| *c == '\\').count() % 2 == 1 +} + +/// For docstring indentation, black counts spaces as 1 and tabs by increasing the indentation up +/// to the next multiple of 8. This is effectively a port of +/// [`str.expandtabs`](https://docs.python.org/3/library/stdtypes.html#str.expandtabs), +/// which black [calls with the default tab width of 8](https://github.com/psf/black/blob/c36e468794f9256d5e922c399240d49782ba04f1/src/black/strings.py#L61). +fn indentation_length(line: &str) -> TextSize { + let mut indentation = 0u32; + for char in line.chars() { + if char == '\t' { + // Pad to the next multiple of tab_width + indentation += 8 - (indentation.rem_euclid(8)); + } else if char.is_whitespace() { + indentation += u32::from(char.text_len()); + } else { + break; + } + } + TextSize::new(indentation) +} + +/// Trims at most `indent_len` indentation from the beginning of `line`. +/// +/// This treats indentation in precisely the same way as `indentation_length`. +/// As such, it is expected that `indent_len` is computed from +/// `indentation_length`. This is useful when one needs to trim some minimum +/// level of indentation from a code snippet collected from a docstring before +/// attempting to reformat it. +fn indentation_trim(indent_len: TextSize, line: &str) -> &str { + let mut seen_indent_len = 0u32; + let mut trimmed = line; + for char in line.chars() { + if seen_indent_len >= indent_len.to_u32() { + return trimmed; + } + if char == '\t' { + // Pad to the next multiple of tab_width + seen_indent_len += 8 - (seen_indent_len.rem_euclid(8)); + trimmed = &trimmed[1..]; + } else if char.is_whitespace() { + seen_indent_len += u32::from(char.text_len()); + trimmed = &trimmed[char.len_utf8()..]; + } else { + break; + } + } + trimmed +} + +/// Returns the indentation of the given line and everything following it. +fn indent_with_suffix(line: &str) -> (&str, &str) { + let suffix = line.trim_whitespace_start(); + let indent_len = line + .len() + .checked_sub(suffix.len()) + .expect("suffix <= line"); + let indent = &line[..indent_len]; + (indent, suffix) +} + +/// Returns true if this line looks like a reStructuredText option in a +/// field list. +/// +/// That is, a line that looks like `:name: optional-value`. +fn is_rst_option(line: &str) -> bool { + let line = line.trim_start(); + if !line.starts_with(':') { + return false; + } + line.chars() + .take_while(|&ch| !is_python_whitespace(ch)) + .any(|ch| ch == ':') +} + +#[cfg(test)] +mod tests { + use ruff_text_size::TextSize; + + use super::indentation_length; + + #[test] + fn test_indentation_like_black() { + assert_eq!(indentation_length("\t \t \t"), TextSize::new(24)); + assert_eq!(indentation_length("\t \t"), TextSize::new(24)); + assert_eq!(indentation_length("\t\t\t"), TextSize::new(24)); + assert_eq!(indentation_length(" "), TextSize::new(4)); + } +} diff --git a/crates/ruff_python_formatter/src/expression/string/mod.rs b/crates/ruff_python_formatter/src/string/mod.rs similarity index 72% rename from crates/ruff_python_formatter/src/expression/string/mod.rs rename to crates/ruff_python_formatter/src/string/mod.rs index 5a7a6a1b46..57c11cd622 100644 --- a/crates/ruff_python_formatter/src/expression/string/mod.rs +++ b/crates/ruff_python_formatter/src/string/mod.rs @@ -5,35 +5,41 @@ use bitflags::bitflags; use ruff_formatter::{format_args, write}; use ruff_python_ast::AnyNodeRef; use ruff_python_ast::{ - self as ast, ExprBytesLiteral, ExprFString, ExprStringLiteral, ExpressionRef, + self as ast, Expr, ExprBytesLiteral, ExprFString, ExprStringLiteral, ExpressionRef, }; use ruff_source_file::Locator; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use crate::comments::{leading_comments, trailing_comments}; -use crate::expression::parentheses::{ - in_parentheses_only_group, in_parentheses_only_soft_line_break_or_space, -}; -use crate::expression::Expr; +use crate::expression::expr_f_string::f_string_quoting; +use crate::expression::parentheses::in_parentheses_only_soft_line_break_or_space; +use crate::other::f_string::FormatFString; +use crate::other::string_literal::{FormatStringLiteral, StringLiteralKind}; use crate::prelude::*; use crate::QuoteStyle; -mod docstring; +pub(crate) mod docstring; -#[derive(Copy, Clone, Debug)] -enum Quoting { +#[derive(Copy, Clone, Debug, Default)] +pub(crate) enum Quoting { + #[default] CanChange, Preserve, } +/// Represents any kind of string expression. This could be either a string, +/// bytes or f-string. #[derive(Clone, Debug)] -pub(super) enum AnyString<'a> { +pub(crate) enum AnyString<'a> { String(&'a ExprStringLiteral), Bytes(&'a ExprBytesLiteral), FString(&'a ExprFString), } impl<'a> AnyString<'a> { + /// Creates a new [`AnyString`] from the given [`Expr`]. + /// + /// Returns `None` if the expression is not either a string, bytes or f-string. pub(crate) fn from_expression(expression: &'a Expr) -> Option> { match expression { Expr::StringLiteral(string) => Some(AnyString::String(string)), @@ -43,36 +49,8 @@ impl<'a> AnyString<'a> { } } - fn quoting(&self, locator: &Locator) -> Quoting { - match self { - Self::String(_) | Self::Bytes(_) => Quoting::CanChange, - Self::FString(f_string) => { - let unprefixed = locator - .slice(f_string.range) - .trim_start_matches(|c| c != '"' && c != '\''); - let triple_quoted = - unprefixed.starts_with(r#"""""#) || unprefixed.starts_with(r"'''"); - if f_string.value.elements().any(|value| match value { - Expr::FormattedValue(ast::ExprFormattedValue { range, .. }) => { - let string_content = locator.slice(*range); - if triple_quoted { - string_content.contains(r#"""""#) || string_content.contains("'''") - } else { - string_content.contains(['"', '\'']) - } - } - _ => false, - }) { - Quoting::Preserve - } else { - Quoting::CanChange - } - } - } - } - /// Returns `true` if the string is implicitly concatenated. - pub(super) fn is_implicit_concatenated(&self) -> bool { + pub(crate) fn is_implicit_concatenated(&self) -> bool { match self { Self::String(ExprStringLiteral { value, .. }) => value.is_implicit_concatenated(), Self::Bytes(ExprBytesLiteral { value, .. }) => value.is_implicit_concatenated(), @@ -80,21 +58,38 @@ impl<'a> AnyString<'a> { } } - fn parts(&self) -> Vec> { + /// Returns the quoting to be used for this string. + fn quoting(&self, locator: &Locator<'_>) -> Quoting { match self { - Self::String(ExprStringLiteral { value, .. }) => { - value.parts().map(AnyStringPart::String).collect() - } + Self::String(_) | Self::Bytes(_) => Quoting::CanChange, + Self::FString(f_string) => f_string_quoting(f_string, locator), + } + } + + /// Returns a vector of all the [`AnyStringPart`] of this string. + fn parts(&self, quoting: Quoting) -> Vec> { + match self { + Self::String(ExprStringLiteral { value, .. }) => value + .iter() + .map(|part| AnyStringPart::String { + part, + layout: StringLiteralKind::String, + }) + .collect(), Self::Bytes(ExprBytesLiteral { value, .. }) => { - value.parts().map(AnyStringPart::Bytes).collect() + value.iter().map(AnyStringPart::Bytes).collect() } Self::FString(ExprFString { value, .. }) => value - .parts() + .iter() .map(|f_string_part| match f_string_part { - ast::FStringPart::Literal(string_literal) => { - AnyStringPart::String(string_literal) - } - ast::FStringPart::FString(f_string) => AnyStringPart::FString(f_string), + ast::FStringPart::Literal(string_literal) => AnyStringPart::String { + part: string_literal, + layout: StringLiteralKind::InImplicitlyConcatenatedFString(quoting), + }, + ast::FStringPart::FString(f_string) => AnyStringPart::FString { + part: f_string, + quoting, + }, }) .collect(), } @@ -131,19 +126,29 @@ impl<'a> From<&AnyString<'a>> for ExpressionRef<'a> { } } +/// Represents any kind of string which is part of an implicitly concatenated +/// string. This could be either a string, bytes or f-string. +/// +/// This is constructed from the [`AnyString::parts`] method on [`AnyString`]. #[derive(Clone, Debug)] enum AnyStringPart<'a> { - String(&'a ast::StringLiteral), + String { + part: &'a ast::StringLiteral, + layout: StringLiteralKind, + }, Bytes(&'a ast::BytesLiteral), - FString(&'a ast::FString), + FString { + part: &'a ast::FString, + quoting: Quoting, + }, } impl<'a> From<&AnyStringPart<'a>> for AnyNodeRef<'a> { fn from(value: &AnyStringPart<'a>) -> Self { match value { - AnyStringPart::String(part) => AnyNodeRef::StringLiteral(part), + AnyStringPart::String { part, .. } => AnyNodeRef::StringLiteral(part), AnyStringPart::Bytes(part) => AnyNodeRef::BytesLiteral(part), - AnyStringPart::FString(part) => AnyNodeRef::FString(part), + AnyStringPart::FString { part, .. } => AnyNodeRef::FString(part), } } } @@ -151,98 +156,33 @@ impl<'a> From<&AnyStringPart<'a>> for AnyNodeRef<'a> { impl Ranged for AnyStringPart<'_> { fn range(&self) -> TextRange { match self { - Self::String(part) => part.range(), + Self::String { part, .. } => part.range(), Self::Bytes(part) => part.range(), - Self::FString(part) => part.range(), + Self::FString { part, .. } => part.range(), } } } -pub(super) struct FormatString<'a> { - string: &'a AnyString<'a>, - layout: StringLayout, -} - -#[derive(Default, Copy, Clone, Debug)] -pub enum StringLayout { - #[default] - Default, - DocString, - /// An implicit concatenated string in a binary like (e.g. `a + b` or `a < b`) expression. - /// - /// Formats the implicit concatenated string parts without the enclosing group because the group - /// is added by the binary like formatting. - ImplicitConcatenatedStringInBinaryLike, -} - -impl<'a> FormatString<'a> { - pub(super) fn new(string: &'a AnyString<'a>) -> Self { - Self { - string, - layout: StringLayout::Default, - } - } - - pub(super) fn with_layout(mut self, layout: StringLayout) -> Self { - self.layout = layout; - self - } -} - -impl<'a> Format> for FormatString<'a> { +impl Format> for AnyStringPart<'_> { fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> { - let parent_docstring_quote_style = f.context().docstring(); - let locator = f.context().locator(); - let result = match self.layout { - StringLayout::Default => { - if self.string.is_implicit_concatenated() { - in_parentheses_only_group(&FormatStringContinuation::new(self.string)).fmt(f) - } else { - StringPart::from_source(self.string.range(), &locator) - .normalize( - self.string.quoting(&locator), - &locator, - f.options().quote_style(), - parent_docstring_quote_style, - ) - .fmt(f) - } + match self { + AnyStringPart::String { part, layout } => { + FormatStringLiteral::new(part, *layout).fmt(f) } - StringLayout::DocString => { - let string_part = StringPart::from_source(self.string.range(), &locator); - let normalized = string_part.normalize( - Quoting::CanChange, - &locator, - f.options().quote_style(), - parent_docstring_quote_style, - ); - docstring::format(&normalized, f) - } - StringLayout::ImplicitConcatenatedStringInBinaryLike => { - FormatStringContinuation::new(self.string).fmt(f) - } - }; - // TODO(dhruvmanila): With PEP 701, comments can be inside f-strings. - // This is to mark all of those comments as formatted but we need to - // figure out how to handle them. Note that this needs to be done only - // after the f-string is formatted, so only for all the non-formatted - // comments. - if let AnyString::FString(fstring) = self.string { - let comments = f.context().comments(); - fstring.value.elements().for_each(|value| { - comments.mark_verbatim_node_comments_formatted(value.into()); - }); + AnyStringPart::Bytes(bytes_literal) => bytes_literal.format().fmt(f), + AnyStringPart::FString { part, quoting } => FormatFString::new(part, *quoting).fmt(f), } - result } } -struct FormatStringContinuation<'a> { +/// Formats any implicitly concatenated string. This could be any valid combination +/// of string, bytes or f-string literals. +pub(crate) struct FormatStringContinuation<'a> { string: &'a AnyString<'a>, } impl<'a> FormatStringContinuation<'a> { - fn new(string: &'a AnyString<'a>) -> Self { + pub(crate) fn new(string: &'a AnyString<'a>) -> Self { Self { string } } } @@ -250,24 +190,15 @@ impl<'a> FormatStringContinuation<'a> { impl Format> for FormatStringContinuation<'_> { fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> { let comments = f.context().comments().clone(); - let locator = f.context().locator(); - let in_docstring = f.context().docstring(); - let quote_style = f.options().quote_style(); + let quoting = self.string.quoting(&f.context().locator()); let mut joiner = f.join_with(in_parentheses_only_soft_line_break_or_space()); - for part in self.string.parts() { - let normalized = StringPart::from_source(part.range(), &locator).normalize( - self.string.quoting(&locator), - &locator, - quote_style, - in_docstring, - ); - + for part in self.string.parts(quoting) { joiner.entry(&format_args![ line_suffix_boundary(), leading_comments(comments.leading(&part)), - normalized, + part, trailing_comments(comments.trailing(&part)) ]); } @@ -277,7 +208,7 @@ impl Format> for FormatStringContinuation<'_> { } #[derive(Debug)] -struct StringPart { +pub(crate) struct StringPart { /// The prefix. prefix: StringPrefix, @@ -289,7 +220,7 @@ struct StringPart { } impl StringPart { - fn from_source(range: TextRange, locator: &Locator) -> Self { + pub(crate) fn from_source(range: TextRange, locator: &Locator) -> Self { let string_content = locator.slice(range); let prefix = StringPrefix::parse(string_content); @@ -316,16 +247,14 @@ impl StringPart { /// snippet within the docstring. The quote style should correspond to the /// style of quotes used by said docstring. Normalization will ensure the /// quoting styles don't conflict. - fn normalize<'a>( + pub(crate) fn normalize<'a>( self, quoting: Quoting, locator: &'a Locator, configured_style: QuoteStyle, - parent_docstring_quote_style: Option, + parent_docstring_quote_char: Option, ) -> NormalizedString<'a> { - // Per PEP 8 and PEP 257, always prefer double quotes for docstrings - // and triple-quoted strings. (We assume docstrings are always - // triple-quoted.) + // Per PEP 8, always prefer double quotes for triple-quoted strings. let preferred_style = if self.quotes.triple { // ... unless we're formatting a code snippet inside a docstring, // then we specifically want to invert our quote style to avoid @@ -372,8 +301,8 @@ impl StringPart { // Overall this is a bit of a corner case and just inverting the // style from what the parent ultimately decided upon works, even // if it doesn't have perfect alignment with PEP8. - if let Some(style) = parent_docstring_quote_style { - style.invert() + if let Some(quote) = parent_docstring_quote_char { + QuoteStyle::from(quote.invert()) } else { QuoteStyle::Double } @@ -386,10 +315,14 @@ impl StringPart { let quotes = match quoting { Quoting::Preserve => self.quotes, Quoting::CanChange => { - if self.prefix.is_raw_string() { - choose_quotes_raw(raw_content, self.quotes, preferred_style) + if let Some(preferred_quote) = QuoteChar::from_style(preferred_style) { + if self.prefix.is_raw_string() { + choose_quotes_raw(raw_content, self.quotes, preferred_quote) + } else { + choose_quotes(raw_content, self.quotes, preferred_quote) + } } else { - choose_quotes(raw_content, self.quotes, preferred_style) + self.quotes } } }; @@ -406,7 +339,7 @@ impl StringPart { } #[derive(Debug)] -struct NormalizedString<'a> { +pub(crate) struct NormalizedString<'a> { prefix: StringPrefix, /// The quotes of the normalized string (preferred quotes) @@ -442,7 +375,7 @@ impl Format> for NormalizedString<'_> { bitflags! { #[derive(Copy, Clone, Debug, PartialEq, Eq)] - pub(super) struct StringPrefix: u8 { + pub(crate) struct StringPrefix: u8 { const UNICODE = 0b0000_0001; /// `r"test"` const RAW = 0b0000_0010; @@ -454,7 +387,7 @@ bitflags! { } impl StringPrefix { - pub(super) fn parse(input: &str) -> StringPrefix { + pub(crate) fn parse(input: &str) -> StringPrefix { let chars = input.chars(); let mut prefix = StringPrefix::empty(); @@ -479,7 +412,7 @@ impl StringPrefix { prefix } - pub(super) const fn text_len(self) -> TextSize { + pub(crate) const fn text_len(self) -> TextSize { TextSize::new(self.bits().count_ones()) } @@ -524,9 +457,9 @@ impl Format> for StringPrefix { fn choose_quotes_raw( input: &str, quotes: StringQuotes, - preferred_style: QuoteStyle, + preferred_quote: QuoteChar, ) -> StringQuotes { - let preferred_quote_char = preferred_style.as_char(); + let preferred_quote_char = preferred_quote.as_char(); let mut chars = input.chars().peekable(); let contains_unescaped_configured_quotes = loop { match chars.next() { @@ -564,10 +497,10 @@ fn choose_quotes_raw( StringQuotes { triple: quotes.triple, - style: if contains_unescaped_configured_quotes { - quotes.style + quote_char: if contains_unescaped_configured_quotes { + quotes.quote_char } else { - preferred_style + preferred_quote }, } } @@ -580,14 +513,14 @@ fn choose_quotes_raw( /// For triple quoted strings, the preferred quote style is always used, unless the string contains /// a triplet of the quote character (e.g., if double quotes are preferred, double quotes will be /// used unless the string contains `"""`). -fn choose_quotes(input: &str, quotes: StringQuotes, preferred_style: QuoteStyle) -> StringQuotes { - let style = if quotes.triple { +fn choose_quotes(input: &str, quotes: StringQuotes, preferred_quote: QuoteChar) -> StringQuotes { + let quote = if quotes.triple { // True if the string contains a triple quote sequence of the configured quote style. let mut uses_triple_quotes = false; let mut chars = input.chars().peekable(); while let Some(c) = chars.next() { - let preferred_quote_char = preferred_style.as_char(); + let preferred_quote_char = preferred_quote.as_char(); match c { '\\' => { if matches!(chars.peek(), Some('"' | '\\')) { @@ -635,9 +568,9 @@ fn choose_quotes(input: &str, quotes: StringQuotes, preferred_style: QuoteStyle) if uses_triple_quotes { // String contains a triple quote sequence of the configured quote style. // Keep the existing quote style. - quotes.style + quotes.quote_char } else { - preferred_style + preferred_quote } } else { let mut single_quotes = 0u32; @@ -657,19 +590,19 @@ fn choose_quotes(input: &str, quotes: StringQuotes, preferred_style: QuoteStyle) } } - match preferred_style { - QuoteStyle::Single => { + match preferred_quote { + QuoteChar::Single => { if single_quotes > double_quotes { - QuoteStyle::Double + QuoteChar::Double } else { - QuoteStyle::Single + QuoteChar::Single } } - QuoteStyle::Double => { + QuoteChar::Double => { if double_quotes > single_quotes { - QuoteStyle::Single + QuoteChar::Single } else { - QuoteStyle::Double + QuoteChar::Double } } } @@ -677,29 +610,32 @@ fn choose_quotes(input: &str, quotes: StringQuotes, preferred_style: QuoteStyle) StringQuotes { triple: quotes.triple, - style, + quote_char: quote, } } #[derive(Copy, Clone, Debug)] -pub(super) struct StringQuotes { +pub(crate) struct StringQuotes { triple: bool, - style: QuoteStyle, + quote_char: QuoteChar, } impl StringQuotes { - pub(super) fn parse(input: &str) -> Option { + pub(crate) fn parse(input: &str) -> Option { let mut chars = input.chars(); let quote_char = chars.next()?; - let style = QuoteStyle::try_from(quote_char).ok()?; + let quote = QuoteChar::try_from(quote_char).ok()?; let triple = chars.next() == Some(quote_char) && chars.next() == Some(quote_char); - Some(Self { triple, style }) + Some(Self { + triple, + quote_char: quote, + }) } - pub(super) const fn is_triple(self) -> bool { + pub(crate) const fn is_triple(self) -> bool { self.triple } @@ -714,17 +650,74 @@ impl StringQuotes { impl Format> for StringQuotes { fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> { - let quotes = match (self.style, self.triple) { - (QuoteStyle::Single, false) => "'", - (QuoteStyle::Single, true) => "'''", - (QuoteStyle::Double, false) => "\"", - (QuoteStyle::Double, true) => "\"\"\"", + let quotes = match (self.quote_char, self.triple) { + (QuoteChar::Single, false) => "'", + (QuoteChar::Single, true) => "'''", + (QuoteChar::Double, false) => "\"", + (QuoteChar::Double, true) => "\"\"\"", }; token(quotes).fmt(f) } } +/// The quotation character used to quote a string, byte, or fstring literal. +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum QuoteChar { + /// A single quote: `'` + Single, + + /// A double quote: '"' + Double, +} + +impl QuoteChar { + pub const fn as_char(self) -> char { + match self { + QuoteChar::Single => '\'', + QuoteChar::Double => '"', + } + } + + #[must_use] + pub const fn invert(self) -> QuoteChar { + match self { + QuoteChar::Single => QuoteChar::Double, + QuoteChar::Double => QuoteChar::Single, + } + } + + #[must_use] + pub const fn from_style(style: QuoteStyle) -> Option { + match style { + QuoteStyle::Single => Some(QuoteChar::Single), + QuoteStyle::Double => Some(QuoteChar::Double), + QuoteStyle::Preserve => None, + } + } +} + +impl From for QuoteStyle { + fn from(value: QuoteChar) -> Self { + match value { + QuoteChar::Single => QuoteStyle::Single, + QuoteChar::Double => QuoteStyle::Double, + } + } +} + +impl TryFrom for QuoteChar { + type Error = (); + + fn try_from(value: char) -> Result { + match value { + '\'' => Ok(QuoteChar::Single), + '"' => Ok(QuoteChar::Double), + _ => Err(()), + } + } +} + /// Adds the necessary quote escapes and removes unnecessary escape sequences when quoting `input` /// with the provided [`StringQuotes`] style. /// @@ -737,9 +730,9 @@ fn normalize_string(input: &str, quotes: StringQuotes, prefix: StringPrefix) -> // If `last_index` is `0` at the end, then the input is already normalized and can be returned as is. let mut last_index = 0; - let style = quotes.style; - let preferred_quote = style.as_char(); - let opposite_quote = style.invert().as_char(); + let quote = quotes.quote_char; + let preferred_quote = quote.as_char(); + let opposite_quote = quote.invert().as_char(); let mut chars = input.char_indices().peekable(); diff --git a/crates/ruff_python_formatter/tests/fixtures.rs b/crates/ruff_python_formatter/tests/fixtures.rs index 3c48027d98..3845385d73 100644 --- a/crates/ruff_python_formatter/tests/fixtures.rs +++ b/crates/ruff_python_formatter/tests/fixtures.rs @@ -347,14 +347,15 @@ impl fmt::Display for DisplayPyOptions<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { writeln!( f, - r#"indent-style = {indent_style} -line-width = {line_width} -indent-width = {indent_width} -quote-style = {quote_style:?} -line-ending = {line_ending:?} -magic-trailing-comma = {magic_trailing_comma:?} -docstring-code = {docstring_code:?} -preview = {preview:?}"#, + r#"indent-style = {indent_style} +line-width = {line_width} +indent-width = {indent_width} +quote-style = {quote_style:?} +line-ending = {line_ending:?} +magic-trailing-comma = {magic_trailing_comma:?} +docstring-code = {docstring_code:?} +docstring-code-line-width = {docstring_code_line_width:?} +preview = {preview:?}"#, indent_style = self.0.indent_style(), indent_width = self.0.indent_width().value(), line_width = self.0.line_width().value(), @@ -362,6 +363,7 @@ preview = {preview:?}"#, line_ending = self.0.line_ending(), magic_trailing_comma = self.0.magic_trailing_comma(), docstring_code = self.0.docstring_code(), + docstring_code_line_width = self.0.docstring_code_line_width(), preview = self.0.preview() ) } diff --git a/crates/ruff_python_formatter/tests/normalizer.rs b/crates/ruff_python_formatter/tests/normalizer.rs index d0f2ba0b9c..2bab8915cc 100644 --- a/crates/ruff_python_formatter/tests/normalizer.rs +++ b/crates/ruff_python_formatter/tests/normalizer.rs @@ -60,7 +60,7 @@ impl Transformer for Normalizer { } fn visit_string_literal(&self, string_literal: &mut ast::StringLiteral) { - static STRIP_CODE_SNIPPETS: Lazy = Lazy::new(|| { + static STRIP_DOC_TESTS: Lazy = Lazy::new(|| { Regex::new( r#"(?mx) ( @@ -75,14 +75,37 @@ impl Transformer for Normalizer { ) .unwrap() }); + static STRIP_RST_BLOCKS: Lazy = Lazy::new(|| { + // This is kind of unfortunate, but it's pretty tricky (likely + // impossible) to detect a reStructuredText block with a simple + // regex. So we just look for the start of a block and remove + // everything after it. Talk about a hammer. + Regex::new(r#"::(?s:.*)"#).unwrap() + }); + static STRIP_MARKDOWN_BLOCKS: Lazy = Lazy::new(|| { + // This covers more than valid Markdown blocks, but that's OK. + Regex::new(r#"(```|~~~)\p{any}*(```|~~~|$)"#).unwrap() + }); // Start by (1) stripping everything that looks like a code // snippet, since code snippets may be completely reformatted if // they are Python code. - string_literal.value = STRIP_CODE_SNIPPETS + string_literal.value = STRIP_DOC_TESTS .replace_all( &string_literal.value, - "\n", + "\n", + ) + .into_owned(); + string_literal.value = STRIP_RST_BLOCKS + .replace_all( + &string_literal.value, + "\n", + ) + .into_owned(); + string_literal.value = STRIP_MARKDOWN_BLOCKS + .replace_all( + &string_literal.value, + "\n", ) .into_owned(); // Normalize a string by (2) stripping any leading and trailing space from each diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__ignore_pyi.pyi.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__ignore_pyi.pyi.snap index ed9ee11309..0d405b5b57 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__ignore_pyi.pyi.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__ignore_pyi.pyi.snap @@ -22,10 +22,9 @@ def g(): # hi ... -# FIXME(#8905): Uncomment, leads to unstable formatting -# def h(): -# ... -# # bye +def h(): + ... + # bye ``` ## Black Differences @@ -41,17 +40,6 @@ def g(): class y: ... # comment # whitespace doesn't matter (note the next line has a trailing space and tab) -@@ -13,6 +12,7 @@ - # hi - ... - --def h(): -- ... -- # bye -+# FIXME(#8905): Uncomment, leads to unstable formatting -+# def h(): -+# ... -+# # bye ``` ## Ruff Output @@ -71,10 +59,9 @@ def g(): # hi ... -# FIXME(#8905): Uncomment, leads to unstable formatting -# def h(): -# ... -# # bye +def h(): + ... + # bye ``` ## Black Output diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__line_ranges_fmt_off_decorator.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__line_ranges_fmt_off_decorator.py.snap index 4ac42448f3..d14b92cc08 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__line_ranges_fmt_off_decorator.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__line_ranges_fmt_off_decorator.py.snap @@ -16,6 +16,15 @@ class MyClass: # fmt: on def method(): print ( "str" ) + + @decor( + a=1, + # fmt: off + b=(2, 3), + # fmt: on + ) + def func(): + pass ``` ## Black Differences @@ -23,8 +32,8 @@ class MyClass: ```diff --- Black +++ Ruff -@@ -1,12 +1,10 @@ --# flags: --line-ranges=12-12 +@@ -1,15 +1,13 @@ +-# flags: --line-ranges=12-12 --line-ranges=21-21 # NOTE: If you need to modify this file, pay special attention to the --line-ranges= # flag above as it's formatting specifically these lines. @@ -37,6 +46,15 @@ class MyClass: def method(): - print("str") + print ( "str" ) + + @decor( + a=1, +@@ -18,4 +16,4 @@ + # fmt: on + ) + def func(): +- pass ++ pass ``` ## Ruff Output @@ -52,12 +70,21 @@ class MyClass: # fmt: on def method(): print ( "str" ) + + @decor( + a=1, + # fmt: off + b=(2, 3), + # fmt: on + ) + def func(): + pass ``` ## Black Output ```python -# flags: --line-ranges=12-12 +# flags: --line-ranges=12-12 --line-ranges=21-21 # NOTE: If you need to modify this file, pay special attention to the --line-ranges= # flag above as it's formatting specifically these lines. @@ -69,6 +96,15 @@ class MyClass: # fmt: on def method(): print("str") + + @decor( + a=1, + # fmt: off + b=(2, 3), + # fmt: on + ) + def func(): + pass ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__pep604_union_types_line_breaks.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__pep604_union_types_line_breaks.py.snap index 278a20e4a6..bf6f676be0 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__pep604_union_types_line_breaks.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__pep604_union_types_line_breaks.py.snap @@ -95,7 +95,7 @@ def f( ```diff --- Black +++ Ruff -@@ -7,26 +7,16 @@ +@@ -7,23 +7,13 @@ ) # "AnnAssign"s now also work @@ -120,16 +120,10 @@ def f( - | Loooooooooooooooooooooooong - | Loooooooooooooooooooooooong -) = 7 --z: Short | Short2 | Short3 | Short4 = 8 --z: int = 2.3 --z: int = foo() +z: Loooooooooooooooooooooooong | Loooooooooooooooooooooooong | Loooooooooooooooooooooooong | Loooooooooooooooooooooooong = 7 -+z: (Short | Short2 | Short3 | Short4) = 8 -+z: (int) = 2.3 -+z: (int) = foo() - - # In case I go for not enforcing parantheses, this might get improved at the same time - x = ( + z: Short | Short2 | Short3 | Short4 = 8 + z: int = 2.3 + z: int = foo() @@ -63,7 +53,7 @@ @@ -186,9 +180,9 @@ z: (int) z: Loooooooooooooooooooooooong | Loooooooooooooooooooooooong | Loooooooooooooooooooooooong | Loooooooooooooooooooooooong = 7 -z: (Short | Short2 | Short3 | Short4) = 8 -z: (int) = 2.3 -z: (int) = foo() +z: Short | Short2 | Short3 | Short4 = 8 +z: int = 2.3 +z: int = foo() # In case I go for not enforcing parantheses, this might get improved at the same time x = ( diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__power_op_newline.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__power_op_newline.py.snap index 1796017091..bb1b6eed95 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__power_op_newline.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__power_op_newline.py.snap @@ -13,21 +13,24 @@ importA;()<<0**0# ```diff --- Black +++ Ruff -@@ -1,6 +1,2 @@ - importA --( -- () -- << 0 +@@ -2,5 +2,5 @@ + ( + () + << 0 - ** 0 --) # -+() << 0**0 # ++ **0 + ) # ``` ## Ruff Output ```python importA -() << 0**0 # +( + () + << 0 + **0 +) # ``` ## Black Output diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings.py.snap index b3a03820fa..a589aeffed 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings.py.snap @@ -789,14 +789,12 @@ log.info(f"""Skipping: {'a' == 'b'} {desc['ms_name']} {money=} {dte=} {pos_share - "This is a large string that has a type annotation attached to it. A type" - " annotation should NOT stop a long string from being wrapped." -) --annotated_variable: Literal["fakse_literal"] = ( ++annotated_variable: Final = "This is a large string that has a type annotation attached to it. A type annotation should NOT stop a long string from being wrapped." + annotated_variable: Literal["fakse_literal"] = ( - "This is a large string that has a type annotation attached to it. A type" - " annotation should NOT stop a long string from being wrapped." --) -+annotated_variable: Final = "This is a large string that has a type annotation attached to it. A type annotation should NOT stop a long string from being wrapped." -+annotated_variable: Literal[ -+ "fakse_literal" -+] = "This is a large string that has a type annotation attached to it. A type annotation should NOT stop a long string from being wrapped." ++ "This is a large string that has a type annotation attached to it. A type annotation should NOT stop a long string from being wrapped." + ) -backslashes = ( - "This is a really long string with \"embedded\" double quotes and 'single' quotes" @@ -1308,9 +1306,9 @@ annotated_variable: Final = ( + "using the '+' operator." ) annotated_variable: Final = "This is a large string that has a type annotation attached to it. A type annotation should NOT stop a long string from being wrapped." -annotated_variable: Literal[ - "fakse_literal" -] = "This is a large string that has a type annotation attached to it. A type annotation should NOT stop a long string from being wrapped." +annotated_variable: Literal["fakse_literal"] = ( + "This is a large string that has a type annotation attached to it. A type annotation should NOT stop a long string from being wrapped." +) backslashes = "This is a really long string with \"embedded\" double quotes and 'single' quotes that also handles checking for an even number of backslashes \\" backslashes = "This is a really long string with \"embedded\" double quotes and 'single' quotes that also handles checking for an even number of backslashes \\\\" diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings__regression.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings__regression.py.snap index 1cde924609..762af6aa16 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings__regression.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_long_strings__regression.py.snap @@ -832,7 +832,7 @@ s = f'Lorem Ipsum is simply dummy text of the printing and typesetting industry: some_commented_string = ( # This comment stays at the top. "This string is long but not so long that it needs hahahah toooooo be so greatttt" -@@ -279,38 +280,27 @@ +@@ -279,36 +280,25 @@ ) lpar_and_rpar_have_comments = func_call( # LPAR Comment @@ -852,33 +852,31 @@ s = f'Lorem Ipsum is simply dummy text of the printing and typesetting industry: - f" {'' if ID is None else ID} | perl -nE 'print if /^{field}:/'" -) +cmd_fstring = f"sudo -E deluge-console info --detailed --sort-reverse=time_added {'' if ID is None else ID} | perl -nE 'print if /^{field}:/'" -+ -+cmd_fstring = f"sudo -E deluge-console info --detailed --sort-reverse=time_added {'{{}}' if ID is None else ID} | perl -nE 'print if /^{field}:/'" -cmd_fstring = ( - "sudo -E deluge-console info --detailed --sort-reverse=time_added" - f" {'{{}}' if ID is None else ID} | perl -nE 'print if /^{field}:/'" -) -+cmd_fstring = f"sudo -E deluge-console info --detailed --sort-reverse=time_added {{'' if ID is None else ID}} | perl -nE 'print if /^{field}:/'" ++cmd_fstring = f"sudo -E deluge-console info --detailed --sort-reverse=time_added {'{{}}' if ID is None else ID} | perl -nE 'print if /^{field}:/'" -cmd_fstring = ( - "sudo -E deluge-console info --detailed --sort-reverse=time_added {'' if ID is" - f" None else ID}} | perl -nE 'print if /^{field}:/'" -) -+fstring = f"This string really doesn't need to be an {{{{fstring}}}}, but this one most certainly, absolutely {does}." ++cmd_fstring = f"sudo -E deluge-console info --detailed --sort-reverse=time_added {{'' if ID is None else ID}} | perl -nE 'print if /^{field}:/'" ++fstring = f"This string really doesn't need to be an {{{{fstring}}}}, but this one most certainly, absolutely {does}." ++ fstring = ( - "This string really doesn't need to be an {{fstring}}, but this one most" - f" certainly, absolutely {does}." + f"We have to remember to escape {braces}." " Like {these}." f" But not {this}." ) - +- -fstring = f"We have to remember to escape {braces}. Like {{these}}. But not {this}." -- + class A: - class B: - def foo(): @@ -364,10 +354,7 @@ def foo(): if not hasattr(module, name): @@ -933,7 +931,7 @@ s = f'Lorem Ipsum is simply dummy text of the printing and typesetting industry: ) -@@ -432,14 +415,12 @@ +@@ -432,9 +415,7 @@ assert xxxxxxx_xxxx in [ x.xxxxx.xxxxxx.xxxxx.xxxxxx, x.xxxxx.xxxxxx.xxxxx.xxxx, @@ -943,15 +941,7 @@ s = f'Lorem Ipsum is simply dummy text of the printing and typesetting industry: + ], "xxxxxxxxxxx xxxxxxx xxxx (xxxxxx xxxx) %x xxx xxxxx" % xxxxxxx_xxxx --value.__dict__[key] = ( -- "test" # set some Thrift field to non-None in the struct aa bb cc dd ee --) -+value.__dict__[ -+ key -+] = "test" # set some Thrift field to non-None in the struct aa bb cc dd ee - - RE_ONE_BACKSLASH = { - "asdf_hjkl_jkl": re.compile( + value.__dict__[key] = ( @@ -449,8 +430,7 @@ RE_TWO_BACKSLASHES = { @@ -1627,9 +1617,9 @@ class xxxxxxxxxxxxxxxxxxxxx(xxxx.xxxxxxxxxxxxx): ], "xxxxxxxxxxx xxxxxxx xxxx (xxxxxx xxxx) %x xxx xxxxx" % xxxxxxx_xxxx -value.__dict__[ - key -] = "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +value.__dict__[key] = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) RE_ONE_BACKSLASH = { "asdf_hjkl_jkl": re.compile( diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_no_blank_line_before_docstring.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_no_blank_line_before_docstring.py.snap deleted file mode 100644 index bd93e24292..0000000000 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_no_blank_line_before_docstring.py.snap +++ /dev/null @@ -1,134 +0,0 @@ ---- -source: crates/ruff_python_formatter/tests/fixtures.rs -input_file: crates/ruff_python_formatter/resources/test/fixtures/black/cases/preview_no_blank_line_before_docstring.py ---- -## Input - -```python -def line_before_docstring(): - - """Please move me up""" - - -class LineBeforeDocstring: - - """Please move me up""" - - -class EvenIfThereIsAMethodAfter: - - """I'm the docstring""" - def method(self): - pass - - -class TwoLinesBeforeDocstring: - - - """I want to be treated the same as if I were closer""" - - -class MultilineDocstringsAsWell: - - """I'm so far - - and on so many lines... - """ -``` - -## Black Differences - -```diff ---- Black -+++ Ruff -@@ -3,10 +3,12 @@ - - - class LineBeforeDocstring: -+ - """Please move me up""" - - - class EvenIfThereIsAMethodAfter: -+ - """I'm the docstring""" - - def method(self): -@@ -14,10 +16,12 @@ - - - class TwoLinesBeforeDocstring: -+ - """I want to be treated the same as if I were closer""" - - - class MultilineDocstringsAsWell: -+ - """I'm so far - - and on so many lines... -``` - -## Ruff Output - -```python -def line_before_docstring(): - """Please move me up""" - - -class LineBeforeDocstring: - - """Please move me up""" - - -class EvenIfThereIsAMethodAfter: - - """I'm the docstring""" - - def method(self): - pass - - -class TwoLinesBeforeDocstring: - - """I want to be treated the same as if I were closer""" - - -class MultilineDocstringsAsWell: - - """I'm so far - - and on so many lines... - """ -``` - -## Black Output - -```python -def line_before_docstring(): - """Please move me up""" - - -class LineBeforeDocstring: - """Please move me up""" - - -class EvenIfThereIsAMethodAfter: - """I'm the docstring""" - - def method(self): - pass - - -class TwoLinesBeforeDocstring: - """I want to be treated the same as if I were closer""" - - -class MultilineDocstringsAsWell: - """I'm so far - - and on so many lines... - """ -``` - - diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_prefer_rhs_split.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_prefer_rhs_split.py.snap index 18b9fa9a06..e799efc314 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_prefer_rhs_split.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__preview_prefer_rhs_split.py.snap @@ -118,57 +118,7 @@ a = ( ```diff --- Black +++ Ruff -@@ -1,29 +1,31 @@ --first_item, second_item = ( -- some_looooooooong_module.some_looooooooooooooong_function_name( -- first_argument, second_argument, third_argument -- ) -+( -+ first_item, -+ second_item, -+) = some_looooooooong_module.some_looooooooooooooong_function_name( -+ first_argument, second_argument, third_argument - ) - --some_dict["with_a_long_key"] = ( -- some_looooooooong_module.some_looooooooooooooong_function_name( -- first_argument, second_argument, third_argument -- ) -+some_dict[ -+ "with_a_long_key" -+] = some_looooooooong_module.some_looooooooooooooong_function_name( -+ first_argument, second_argument, third_argument - ) - - # Make sure it works when the RHS only has one pair of (optional) parens. --first_item, second_item = ( -- some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name --) -+( -+ first_item, -+ second_item, -+) = some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name - --some_dict["with_a_long_key"] = ( -- some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name --) -+some_dict[ -+ "with_a_long_key" -+] = some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name - - # Make sure chaining assignments work. --first_item, second_item, third_item, forth_item = m["everything"] = ( -- some_looooooooong_module.some_looooooooooooooong_function_name( -- first_argument, second_argument, third_argument -- ) -+first_item, second_item, third_item, forth_item = m[ -+ "everything" -+] = some_looooooooong_module.some_looooooooooooooong_function_name( -+ first_argument, second_argument, third_argument - ) - - # Make sure when the RHS's first split at the non-optional paren fits, -@@ -60,9 +62,7 @@ +@@ -60,9 +60,7 @@ some_arg ).intersection(pk_cols) @@ -179,76 +129,37 @@ a = ( some_kind_of_table[ some_key # type: ignore # noqa: E501 -@@ -85,15 +85,29 @@ - ) - - # Multiple targets --a = b = ( -- ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc --) -+a = ( -+ b -+) = ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc - --a = b = c = d = e = f = g = ( -+a = ( -+ b -+) = ( -+ c -+) = ( -+ d -+) = ( -+ e -+) = ( -+ f -+) = ( -+ g -+) = ( - hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh --) = i = j = ( -- kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk --) -+) = ( -+ i -+) = ( -+ j -+) = kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk - - a = ( - bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb ``` ## Ruff Output ```python -( - first_item, - second_item, -) = some_looooooooong_module.some_looooooooooooooong_function_name( - first_argument, second_argument, third_argument +first_item, second_item = ( + some_looooooooong_module.some_looooooooooooooong_function_name( + first_argument, second_argument, third_argument + ) ) -some_dict[ - "with_a_long_key" -] = some_looooooooong_module.some_looooooooooooooong_function_name( - first_argument, second_argument, third_argument +some_dict["with_a_long_key"] = ( + some_looooooooong_module.some_looooooooooooooong_function_name( + first_argument, second_argument, third_argument + ) ) # Make sure it works when the RHS only has one pair of (optional) parens. -( - first_item, - second_item, -) = some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name +first_item, second_item = ( + some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name +) -some_dict[ - "with_a_long_key" -] = some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name +some_dict["with_a_long_key"] = ( + some_looooooooong_module.SomeClass.some_looooooooooooooong_variable_name +) # Make sure chaining assignments work. -first_item, second_item, third_item, forth_item = m[ - "everything" -] = some_looooooooong_module.some_looooooooooooooong_function_name( - first_argument, second_argument, third_argument +first_item, second_item, third_item, forth_item = m["everything"] = ( + some_looooooooong_module.some_looooooooooooooong_function_name( + first_argument, second_argument, third_argument + ) ) # Make sure when the RHS's first split at the non-optional paren fits, @@ -308,29 +219,15 @@ some_kind_of_instance.some_kind_of_map[a_key] = ( ) # Multiple targets -a = ( - b -) = ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +a = b = ( + ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +) -a = ( - b -) = ( - c -) = ( - d -) = ( - e -) = ( - f -) = ( - g -) = ( +a = b = c = d = e = f = g = ( hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh -) = ( - i -) = ( - j -) = kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk +) = i = j = ( + kkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk +) a = ( bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb diff --git a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__raw_docstring.py.snap b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__raw_docstring.py.snap index 8b27585265..bc66189376 100644 --- a/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__raw_docstring.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/black_compatibility@cases__raw_docstring.py.snap @@ -27,30 +27,21 @@ class UpperCaseR: ```diff --- Black +++ Ruff -@@ -1,4 +1,5 @@ - class C: -+ - r"""Raw""" - - -@@ -7,8 +8,9 @@ +@@ -7,7 +7,7 @@ class SingleQuotes: - r'''Raw''' - + r"""Raw""" -+ + class UpperCaseR: - R"""Raw""" ``` ## Ruff Output ```python class C: - r"""Raw""" @@ -59,7 +50,6 @@ def f(): class SingleQuotes: - r"""Raw""" diff --git a/crates/ruff_python_formatter/tests/snapshots/format@blank_line_before_class_docstring.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@blank_line_before_class_docstring.py.snap new file mode 100644 index 0000000000..b3986d0395 --- /dev/null +++ b/crates/ruff_python_formatter/tests/snapshots/format@blank_line_before_class_docstring.py.snap @@ -0,0 +1,95 @@ +--- +source: crates/ruff_python_formatter/tests/fixtures.rs +input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/blank_line_before_class_docstring.py +--- +## Input +```python +class NormalDocstring: + + """This is a docstring.""" + + +class DocstringWithComment0: + # This is a comment + """This is a docstring.""" + + +class DocstringWithComment1: + # This is a comment + + """This is a docstring.""" + + +class DocstringWithComment2: + + # This is a comment + """This is a docstring.""" + + +class DocstringWithComment3: + + # This is a comment + + """This is a docstring.""" + + +class DocstringWithComment4: + + + # This is a comment + + + """This is a docstring.""" + + +``` + +## Outputs +### Output 1 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Enabled +``` + +```python +class NormalDocstring: + """This is a docstring.""" + + +class DocstringWithComment0: + # This is a comment + """This is a docstring.""" + + +class DocstringWithComment1: + # This is a comment + + """This is a docstring.""" + + +class DocstringWithComment2: + # This is a comment + """This is a docstring.""" + + +class DocstringWithComment3: + # This is a comment + + """This is a docstring.""" + + +class DocstringWithComment4: + # This is a comment + + """This is a docstring.""" +``` + + + diff --git a/crates/ruff_python_formatter/tests/snapshots/format@docstring.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@docstring.py.snap index bac8a446b1..bf8fa7f40d 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@docstring.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@docstring.py.snap @@ -156,19 +156,25 @@ class TabbedIndent: Normal indented line - autor """ + + +def single_quoted(): + ' content\ ' + return ``` ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -324,19 +330,25 @@ class TabbedIndent: Normal indented line - autor """ + + +def single_quoted(): + "content\ " + return ``` ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 2 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 2 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -492,19 +504,25 @@ class TabbedIndent: Normal indented line - autor """ + + +def single_quoted(): + "content\ " + return ``` ### Output 3 ``` -indent-style = tab -line-width = 88 -indent-width = 8 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 8 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -660,19 +678,25 @@ class TabbedIndent: Normal indented line - autor """ + + +def single_quoted(): + "content\ " + return ``` ### Output 4 ``` -indent-style = tab -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -828,6 +852,185 @@ class TabbedIndent: Normal indented line - autor """ + + +def single_quoted(): + "content\ " + return +``` + + +### Output 5 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Single +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +def single_line_backslashes1(): + """content\ """ + return + + +def single_line_backslashes2(): + """content\\""" + return + + +def single_line_backslashes3(): + """content\\\ """ + return + + +def multiline_backslashes1(): + """This is a docstring with + some lines of text\ """ + return + + +def multiline_backslashes2(): + """This is a docstring with + some lines of text\\""" + return + + +def multiline_backslashes3(): + """This is a docstring with + some lines of text\\\ """ + return + + +def multiple_negatively_indented_docstring_lines(): + """a + b + c + d + e + """ + + +def overindentend_docstring(): + """a + over-indented + """ + + +def comment_before_docstring(): + # don't lose this function comment ... + """Does nothing. + + But it has comments + """ # ... neither lose this function comment + + +class CommentBeforeDocstring: + # don't lose this class comment ... + """Empty class. + + But it has comments + """ # ... neither lose this class comment + + +class IndentMeSome: + def doc_string_without_linebreak_after_colon(self): + """This is somewhat strange + a + b + We format this a is the docstring had started properly indented on the next + line if the target indentation. This may we incorrect since source and target + indentation can be incorrect, but this is also an edge case. + """ + + +class IgnoreImplicitlyConcatenatedStrings: + """""" '' + + +def docstring_that_ends_with_quote_and_a_line_break1(): + """ + he said "the news of my death have been greatly exaggerated" + """ + + +def docstring_that_ends_with_quote_and_a_line_break2(): + """he said "the news of my death have been greatly exaggerated" """ + + +def docstring_that_ends_with_quote_and_a_line_break3(): + """he said "the news of my death have been greatly exaggerated" """ + + +class ByteDocstring: + b""" has leading whitespace""" + first_statement = 1 + + +class CommentAfterDocstring1: + """Browse module classes and functions in IDLE.""" + + # This class is also the base class for pathbrowser.PathBrowser. + + def __init__(self): + pass + + +class CommentAfterDocstring2: + """Browse module classes and functions in IDLE.""" + + # This class is also the base class for pathbrowser.PathBrowser. + + def __init__(self): + pass + + +class CommentAfterDocstring3: + """Browse module classes and functions in IDLE.""" + + # This class is also the base class for pathbrowser.PathBrowser. + def __init__(self): + pass + + +class CommentAfterDocstring4: + """Browse module classes and functions in IDLE.""" + + # This class is also the base class for pathbrowser.PathBrowser. + def __init__(self): + pass + + +class CommentAfterDocstring5: + """Browse module classes and functions in IDLE.""" + + # This class is also the base class for pathbrowser.PathBrowser. + + +def f(): + """Browse module classes and functions in IDLE.""" + # ^ Do not insert a newline above here + + pass + + +class TabbedIndent: + def tabbed_indent(self): + """check for correct tabbed formatting + ^^^^^^^^^^ + Normal indented line + - autor + """ + + +def single_quoted(): + "content\ " + return ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples.py.snap index 5eeafa914f..672b7a715d 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples.py.snap @@ -73,6 +73,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff( x )""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff( x ) + ... more( y )""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -329,19 +350,1024 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff( 1 )""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff( 1 ) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff( 1 ) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff( 1 ) + cool_stuff( 2 ) + cool_stuff( 3 ) + cool_stuff( 4 ) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff( x ): + print( f"hi {x}" ); + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff( 1 ) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ```invalid + ''' + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( ''' + ``` + did i trick you? + ``` + ''' ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + + + """ + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 )""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + + + cool_stuff( 1 ) + + + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff( 1 ) + + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print( 5 ) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py +cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -414,6 +1440,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff( x )""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff( x ) + ... more( y )""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -673,19 +1720,1024 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff( 1 )""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff( 1 ) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff( 1 ) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff( 1 ) + cool_stuff( 2 ) + cool_stuff( 3 ) + cool_stuff( 4 ) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff( x ): + print( f"hi {x}" ); + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff( 1 ) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ```invalid + ''' + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( ''' + ``` + did i trick you? + ``` + ''' ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + + + """ + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 )""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + + + cool_stuff( 1 ) + + + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff( 1 ) + + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print( 5 ) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 2 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 2 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -758,6 +2810,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff( x )""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff( x ) + ... more( y )""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -1017,19 +3090,1024 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff( 1 )""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff( 1 ) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff( 1 ) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff( 1 ) + cool_stuff( 2 ) + cool_stuff( 3 ) + cool_stuff( 4 ) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff( x ): + print( f"hi {x}" ); + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff( 1 ) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ```invalid + ''' + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( ''' + ``` + did i trick you? + ``` + ''' ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + + + """ + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 )""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + + + cool_stuff( 1 ) + + + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff( 1 ) + + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print( 5 ) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ### Output 3 ``` -indent-style = tab -line-width = 88 -indent-width = 8 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 8 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -1102,6 +4180,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff( x )""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff( x ) + ... more( y )""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -1361,19 +4460,1024 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff( 1 )""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff( 1 ) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff( 1 ) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff( 1 ) + cool_stuff( 2 ) + cool_stuff( 3 ) + cool_stuff( 4 ) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff( x ): + print( f"hi {x}" ); + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff( 1 ) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ```invalid + ''' + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( ''' + ``` + did i trick you? + ``` + ''' ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + + + """ + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 )""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + + + cool_stuff( 1 ) + + + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff( 1 ) + + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print( 5 ) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ### Output 4 ``` -indent-style = tab -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -1446,6 +5550,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff( x )""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff( x ) + ... more( y )""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -1705,19 +5830,1024 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff( 1 )""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff( 1 ) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff( 1 ) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff( 1 ) + cool_stuff( 2 ) + cool_stuff( 3 ) + cool_stuff( 4 ) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff( x ): + print( f"hi {x}" ); + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff( 1 ) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ```invalid + ''' + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( ''' + ``` + did i trick you? + ``` + ''' ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + + + """ + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + """ + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 )""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff( x ): + print( f"hi {x}" ); + + def other_stuff( y ): + print( y ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + + + cool_stuff( 1 ) + + + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + + cool_stuff( 1 ) + + + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff( 1 ) + + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print( 5 ) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ### Output 5 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Enabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -1790,6 +6920,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x)""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff(x) + ... more(y)""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -1939,7 +7090,9 @@ def doctest_long_lines(): This won't get wrapped even though it exceeds our configured line width because it doesn't exceed the line width within this docstring. e.g, the `f` in `foo` is treated as the first column. - >>> foo, bar, quux = this_is_a_long_line(lion, giraffe, hippo, zeba, lemur, penguin, monkey) + >>> foo, bar, quux = this_is_a_long_line( + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey + ... ) But this one is long enough to get wrapped. >>> foo, bar, quux = this_is_a_long_line( @@ -2049,19 +7202,1019 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff(1) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff(1)""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff(1) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff(1) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff(1) + + + cool_stuff(2) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( + ''' + hiya''' + ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff(1) + cool_stuff(2) + cool_stuff(3) + cool_stuff(4) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff(1) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff(x): + print(f"hi {x}") + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff(1) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ''' + ```invalid + ''' + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( + ''' + ``` + did i trick you? + ``` + ''' + ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + + + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff(1) + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print(5) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ### Output 6 ``` -indent-style = space -line-width = 88 -indent-width = 2 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Enabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 2 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -2134,6 +8287,27 @@ def doctest_last_line_continued(): pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x)""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff(x) + ... more(y)""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -2283,7 +8457,9 @@ def doctest_long_lines(): This won't get wrapped even though it exceeds our configured line width because it doesn't exceed the line width within this docstring. e.g, the `f` in `foo` is treated as the first column. - >>> foo, bar, quux = this_is_a_long_line(lion, giraffe, hippo, zeba, lemur, penguin, monkey) + >>> foo, bar, quux = this_is_a_long_line( + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey + ... ) But this one is long enough to get wrapped. >>> foo, bar, quux = this_is_a_long_line( @@ -2393,19 +8569,1019 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff(1) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff(1)""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff(1) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff(1) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff(1) + + + cool_stuff(2) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( + ''' + hiya''' + ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff(1) + cool_stuff(2) + cool_stuff(3) + cool_stuff(4) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff(1) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff(x): + print(f"hi {x}") + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff(1) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ''' + ```invalid + ''' + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( + ''' + ``` + did i trick you? + ``` + ''' + ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + + + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff(1) + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print(5) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ### Output 7 ``` -indent-style = tab -line-width = 88 -indent-width = 8 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Enabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 8 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -2436,7 +9612,7 @@ def doctest_simple_continued(): Do cool stuff. >>> def cool_stuff(x): - ... print(f"hi {x}") + ... print(f"hi {x}") hi 2 """ pass @@ -2473,11 +9649,32 @@ def doctest_last_line_continued(): Do cool stuff. >>> def cool_stuff(x): - ... print(f"hi {x}") + ... print(f"hi {x}") """ pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x)""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff(x) + ... more(y)""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -2485,9 +9682,9 @@ def doctest_blank_continued(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) + ... print(x) ... - ... print(x) + ... print(x) """ pass @@ -2500,8 +9697,8 @@ def doctest_blank_end(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) - ... print(x) + ... print(x) + ... print(x) """ pass @@ -2513,8 +9710,8 @@ def doctest_blank_end_then_some_text(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) - ... print(x) + ... print(x) + ... print(x) And say something else. """ @@ -2586,11 +9783,11 @@ def doctest_nested_doctest_not_formatted(): Do cool stuff. >>> def nested(x): - ... """ - ... Do nested cool stuff. - ... >>> func_call( 5 ) - ... """ - ... pass + ... """ + ... Do nested cool stuff. + ... >>> func_call( 5 ) + ... """ + ... pass ''' pass @@ -2627,11 +9824,22 @@ def doctest_long_lines(): This won't get wrapped even though it exceeds our configured line width because it doesn't exceed the line width within this docstring. e.g, the `f` in `foo` is treated as the first column. - >>> foo, bar, quux = this_is_a_long_line(lion, giraffe, hippo, zeba, lemur, penguin, monkey) + >>> foo, bar, quux = this_is_a_long_line( + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey + ... ) But this one is long enough to get wrapped. >>> foo, bar, quux = this_is_a_long_line( - ... lion, giraffe, hippo, zeba, lemur, penguin, monkey, spider, bear, leopard + ... lion, + ... giraffe, + ... hippo, + ... zeba, + ... lemur, + ... penguin, + ... monkey, + ... spider, + ... bear, + ... leopard, ... ) """ # This demostrates a normal line that will get wrapped but won't @@ -2687,7 +9895,7 @@ def doctest_skipped_partial_inconsistent_indent(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) + ... print(x) ... print( f"hi {x}" ); hi 2 """ @@ -2737,19 +9945,1019 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff(1) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff(1)""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff(1) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff(1) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff(1) + + + cool_stuff(2) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( + ''' + hiya''' + ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff(1) + cool_stuff(2) + cool_stuff(3) + cool_stuff(4) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff(1) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff(x): + print(f"hi {x}") + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff(1) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ''' + ```invalid + ''' + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( + ''' + ``` + did i trick you? + ``` + ''' + ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + + + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff(1) + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print(5) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` ### Output 8 ``` -indent-style = tab -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Enabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -2780,7 +10988,7 @@ def doctest_simple_continued(): Do cool stuff. >>> def cool_stuff(x): - ... print(f"hi {x}") + ... print(f"hi {x}") hi 2 """ pass @@ -2817,11 +11025,32 @@ def doctest_last_line_continued(): Do cool stuff. >>> def cool_stuff(x): - ... print(f"hi {x}") + ... print(f"hi {x}") """ pass +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x)""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff(x) + ... more(y)""" + pass + + # Test that a doctest is correctly identified and formatted with a blank # continuation line. def doctest_blank_continued(): @@ -2829,9 +11058,9 @@ def doctest_blank_continued(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) + ... print(x) ... - ... print(x) + ... print(x) """ pass @@ -2844,8 +11073,8 @@ def doctest_blank_end(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) - ... print(x) + ... print(x) + ... print(x) """ pass @@ -2857,8 +11086,8 @@ def doctest_blank_end_then_some_text(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) - ... print(x) + ... print(x) + ... print(x) And say something else. """ @@ -2930,11 +11159,11 @@ def doctest_nested_doctest_not_formatted(): Do cool stuff. >>> def nested(x): - ... """ - ... Do nested cool stuff. - ... >>> func_call( 5 ) - ... """ - ... pass + ... """ + ... Do nested cool stuff. + ... >>> func_call( 5 ) + ... """ + ... pass ''' pass @@ -2971,11 +11200,13 @@ def doctest_long_lines(): This won't get wrapped even though it exceeds our configured line width because it doesn't exceed the line width within this docstring. e.g, the `f` in `foo` is treated as the first column. - >>> foo, bar, quux = this_is_a_long_line(lion, giraffe, hippo, zeba, lemur, penguin, monkey) + >>> foo, bar, quux = this_is_a_long_line( + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey + ... ) But this one is long enough to get wrapped. >>> foo, bar, quux = this_is_a_long_line( - ... lion, giraffe, hippo, zeba, lemur, penguin, monkey, spider, bear, leopard + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey, spider, bear, leopard ... ) """ # This demostrates a normal line that will get wrapped but won't @@ -3031,7 +11262,7 @@ def doctest_skipped_partial_inconsistent_indent(): Do cool stuff. >>> def cool_stuff(x): - ... print(x) + ... print(x) ... print( f"hi {x}" ); hi 2 """ @@ -3081,6 +11312,3748 @@ def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): >>> x = '\"\"\"' """ pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff(1) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff(1)""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff(1) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff(1) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff(1) + + + cool_stuff(2) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( + ''' + hiya''' + ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff(1) + cool_stuff(2) + cool_stuff(3) + cool_stuff(4) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff(1) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff(x): + print(f"hi {x}") + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff(1) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ''' + ```invalid + ''' + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( + ''' + ``` + did i trick you? + ``` + ''' + ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + + + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff(1) + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print(5) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass +``` + + +### Output 9 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = 60 +preview = Disabled +``` + +```python +############################################################################### +# DOCTEST CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Python's "doctest" format. +# +# See: https://docs.python.org/3/library/doctest.html +############################################################################### + +# The simplest doctest to ensure basic formatting works. +def doctest_simple(): + """ + Do cool stuff. + + >>> cool_stuff(1) + 2 + """ + pass + + +# Another simple test, but one where the Python code +# extends over multiple lines. +def doctest_simple_continued(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(f"hi {x}") + hi 2 + """ + pass + + +# Test that we support multiple directly adjacent +# doctests. +def doctest_adjacent(): + """ + Do cool stuff. + + >>> cool_stuff(x) + >>> cool_stuff(y) + 2 + """ + pass + + +# Test that a doctest on the last non-whitespace line of a docstring +# reformats correctly. +def doctest_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x) + """ + pass + + +# Test that a doctest that continues to the last non-whitespace line of +# a docstring reformats correctly. +def doctest_last_line_continued(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(f"hi {x}") + """ + pass + + +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x)""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff(x) + ... more(y)""" + pass + + +# Test that a doctest is correctly identified and formatted with a blank +# continuation line. +def doctest_blank_continued(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... + ... print(x) + """ + pass + + +# Tests that a blank PS2 line at the end of a doctest can get dropped. +# It is treated as part of the Python snippet which will trim the +# trailing whitespace. +def doctest_blank_end(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... print(x) + """ + pass + + +# Tests that a blank PS2 line at the end of a doctest can get dropped +# even when there is text following it. +def doctest_blank_end_then_some_text(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... print(x) + + And say something else. + """ + pass + + +# Test that a doctest containing a triple quoted string gets formatted +# correctly and doesn't result in invalid syntax. +def doctest_with_triple_single(): + """ + Do cool stuff. + + >>> x = '''tricksy''' + """ + pass + + +# Test that a doctest containing a triple quoted f-string gets +# formatted correctly and doesn't result in invalid syntax. +def doctest_with_triple_single(): + """ + Do cool stuff. + + >>> x = f'''tricksy''' + """ + pass + + +# Another nested multi-line string case, but with triple escaped double +# quotes inside a triple single quoted string. +def doctest_with_triple_escaped_double(): + """ + Do cool stuff. + + >>> x = '''\"\"\"''' + """ + pass + + +# Tests that inverting the triple quoting works as expected. +def doctest_with_triple_inverted(): + ''' + Do cool stuff. + + >>> x = """tricksy""" + ''' + pass + + +# Tests that inverting the triple quoting with an f-string works as +# expected. +def doctest_with_triple_inverted_fstring(): + ''' + Do cool stuff. + + >>> x = f"""tricksy""" + ''' + pass + + +# Tests nested doctests are ignored. That is, we don't format doctests +# recursively. We only recognize "top level" doctests. +# +# This restriction primarily exists to avoid needing to deal with +# nesting quotes. It also seems like a generally sensible restriction, +# although it could be lifted if necessary I believe. +def doctest_nested_doctest_not_formatted(): + ''' + Do cool stuff. + + >>> def nested(x): + ... """ + ... Do nested cool stuff. + ... >>> func_call( 5 ) + ... """ + ... pass + ''' + pass + + +# Tests that the starting column does not matter. +def doctest_varying_start_column(): + """ + Do cool stuff. + + >>> assert "Easy!" + >>> import math + >>> math.floor(1.9) + 1 + """ + pass + + +# Tests that long lines get wrapped... appropriately. +# +# The docstring code formatter uses the same line width settings as for +# formatting other code. This means that a line in the docstring can +# actually extend past the configured line limit. +# +# It's not quite clear whether this is desirable or not. We could in +# theory compute the intendation length of a code snippet and then +# adjust the line-width setting on a recursive call to the formatter. +# But there are assuredly pathological cases to consider. Another path +# would be to expose another formatter option for controlling the +# line-width of code snippets independently. +def doctest_long_lines(): + """ + Do cool stuff. + + This won't get wrapped even though it exceeds our configured + line width because it doesn't exceed the line width within this + docstring. e.g, the `f` in `foo` is treated as the first column. + >>> foo, bar, quux = this_is_a_long_line( + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey + ... ) + + But this one is long enough to get wrapped. + >>> foo, bar, quux = this_is_a_long_line( + ... lion, + ... giraffe, + ... hippo, + ... zeba, + ... lemur, + ... penguin, + ... monkey, + ... spider, + ... bear, + ... leopard, + ... ) + """ + # This demostrates a normal line that will get wrapped but won't + # get wrapped in the docstring above because of how the line-width + # setting gets reset at the first column in each code snippet. + foo, bar, quux = this_is_a_long_line( + lion, giraffe, hippo, zeba, lemur, penguin, monkey + ) + + +# Checks that a simple but invalid doctest gets skipped. +def doctest_skipped_simple(): + """ + Do cool stuff. + + >>> cool-stuff( x ): + 2 + """ + pass + + +# Checks that a simple doctest that is continued over multiple lines, +# but is invalid, gets skipped. +def doctest_skipped_simple_continued(): + """ + Do cool stuff. + + >>> def cool-stuff( x ): + ... print( f"hi {x}" ); + 2 + """ + pass + + +# Checks that a doctest with improper indentation gets skipped. +def doctest_skipped_inconsistent_indent(): + """ + Do cool stuff. + + >>> def cool_stuff( x ): + ... print( f"hi {x}" ); + hi 2 + """ + pass + + +# Checks that a doctest with some proper indentation and some improper +# indentation is "partially" formatted. That is, the part that appears +# before the inconsistent indentation is formatted. This requires that +# the part before it is valid Python. +def doctest_skipped_partial_inconsistent_indent(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... print( f"hi {x}" ); + hi 2 + """ + pass + + +# Checks that a doctest with improper triple single quoted string gets +# skipped. That is, the code snippet is itself invalid Python, so it is +# left as is. +def doctest_skipped_triple_incorrect(): + """ + Do cool stuff. + + >>> foo( x ) + ... '''tri'''cksy''' + """ + pass + + +# Tests that a doctest on a single line is skipped. +def doctest_skipped_one_line(): + ">>> foo( x )" + pass + + +# f-strings are not considered docstrings[1], so any doctests +# inside of them should not be formatted. +# +# [1]: https://docs.python.org/3/reference/lexical_analysis.html#formatted-string-literals +def doctest_skipped_fstring(): + f""" + Do cool stuff. + + >>> cool_stuff( 1 ) + 2 + """ + pass + + +# Test that a doctest containing a triple quoted string at least +# does not result in invalid Python code. Ideally this would format +# correctly, but at time of writing it does not. +def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): + """ + Do cool stuff. + + >>> x = '\"\"\"' + """ + pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff(1) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff(1)""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff(1) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff(1) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff(1) + + + cool_stuff(2) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( + ''' + hiya''' + ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff(1) + cool_stuff(2) + cool_stuff(3) + cool_stuff(4) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff(1) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff(x): + print(f"hi {x}") + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff(1) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ''' + ```invalid + ''' + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( + ''' + ``` + did i trick you? + ``` + ''' + ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + + + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff(1) + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print(5) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass +``` + + +### Output 10 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +############################################################################### +# DOCTEST CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Python's "doctest" format. +# +# See: https://docs.python.org/3/library/doctest.html +############################################################################### + +# The simplest doctest to ensure basic formatting works. +def doctest_simple(): + """ + Do cool stuff. + + >>> cool_stuff(1) + 2 + """ + pass + + +# Another simple test, but one where the Python code +# extends over multiple lines. +def doctest_simple_continued(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(f"hi {x}") + hi 2 + """ + pass + + +# Test that we support multiple directly adjacent +# doctests. +def doctest_adjacent(): + """ + Do cool stuff. + + >>> cool_stuff(x) + >>> cool_stuff(y) + 2 + """ + pass + + +# Test that a doctest on the last non-whitespace line of a docstring +# reformats correctly. +def doctest_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x) + """ + pass + + +# Test that a doctest that continues to the last non-whitespace line of +# a docstring reformats correctly. +def doctest_last_line_continued(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(f"hi {x}") + """ + pass + + +# Test that a doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line(): + """ + Do cool stuff. + + >>> cool_stuff(x)""" + pass + + +# Test that a continued doctest on the real last line of a docstring reformats +# correctly. +def doctest_really_last_line_continued(): + """ + Do cool stuff. + + >>> cool_stuff(x) + ... more(y)""" + pass + + +# Test that a doctest is correctly identified and formatted with a blank +# continuation line. +def doctest_blank_continued(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... + ... print(x) + """ + pass + + +# Tests that a blank PS2 line at the end of a doctest can get dropped. +# It is treated as part of the Python snippet which will trim the +# trailing whitespace. +def doctest_blank_end(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... print(x) + """ + pass + + +# Tests that a blank PS2 line at the end of a doctest can get dropped +# even when there is text following it. +def doctest_blank_end_then_some_text(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... print(x) + + And say something else. + """ + pass + + +# Test that a doctest containing a triple quoted string gets formatted +# correctly and doesn't result in invalid syntax. +def doctest_with_triple_single(): + """ + Do cool stuff. + + >>> x = '''tricksy''' + """ + pass + + +# Test that a doctest containing a triple quoted f-string gets +# formatted correctly and doesn't result in invalid syntax. +def doctest_with_triple_single(): + """ + Do cool stuff. + + >>> x = f'''tricksy''' + """ + pass + + +# Another nested multi-line string case, but with triple escaped double +# quotes inside a triple single quoted string. +def doctest_with_triple_escaped_double(): + """ + Do cool stuff. + + >>> x = '''\"\"\"''' + """ + pass + + +# Tests that inverting the triple quoting works as expected. +def doctest_with_triple_inverted(): + ''' + Do cool stuff. + + >>> x = """tricksy""" + ''' + pass + + +# Tests that inverting the triple quoting with an f-string works as +# expected. +def doctest_with_triple_inverted_fstring(): + ''' + Do cool stuff. + + >>> x = f"""tricksy""" + ''' + pass + + +# Tests nested doctests are ignored. That is, we don't format doctests +# recursively. We only recognize "top level" doctests. +# +# This restriction primarily exists to avoid needing to deal with +# nesting quotes. It also seems like a generally sensible restriction, +# although it could be lifted if necessary I believe. +def doctest_nested_doctest_not_formatted(): + ''' + Do cool stuff. + + >>> def nested(x): + ... """ + ... Do nested cool stuff. + ... >>> func_call( 5 ) + ... """ + ... pass + ''' + pass + + +# Tests that the starting column does not matter. +def doctest_varying_start_column(): + """ + Do cool stuff. + + >>> assert "Easy!" + >>> import math + >>> math.floor(1.9) + 1 + """ + pass + + +# Tests that long lines get wrapped... appropriately. +# +# The docstring code formatter uses the same line width settings as for +# formatting other code. This means that a line in the docstring can +# actually extend past the configured line limit. +# +# It's not quite clear whether this is desirable or not. We could in +# theory compute the intendation length of a code snippet and then +# adjust the line-width setting on a recursive call to the formatter. +# But there are assuredly pathological cases to consider. Another path +# would be to expose another formatter option for controlling the +# line-width of code snippets independently. +def doctest_long_lines(): + """ + Do cool stuff. + + This won't get wrapped even though it exceeds our configured + line width because it doesn't exceed the line width within this + docstring. e.g, the `f` in `foo` is treated as the first column. + >>> foo, bar, quux = this_is_a_long_line( + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey + ... ) + + But this one is long enough to get wrapped. + >>> foo, bar, quux = this_is_a_long_line( + ... lion, giraffe, hippo, zeba, lemur, penguin, monkey, spider, bear, leopard + ... ) + """ + # This demostrates a normal line that will get wrapped but won't + # get wrapped in the docstring above because of how the line-width + # setting gets reset at the first column in each code snippet. + foo, bar, quux = this_is_a_long_line( + lion, giraffe, hippo, zeba, lemur, penguin, monkey + ) + + +# Checks that a simple but invalid doctest gets skipped. +def doctest_skipped_simple(): + """ + Do cool stuff. + + >>> cool-stuff( x ): + 2 + """ + pass + + +# Checks that a simple doctest that is continued over multiple lines, +# but is invalid, gets skipped. +def doctest_skipped_simple_continued(): + """ + Do cool stuff. + + >>> def cool-stuff( x ): + ... print( f"hi {x}" ); + 2 + """ + pass + + +# Checks that a doctest with improper indentation gets skipped. +def doctest_skipped_inconsistent_indent(): + """ + Do cool stuff. + + >>> def cool_stuff( x ): + ... print( f"hi {x}" ); + hi 2 + """ + pass + + +# Checks that a doctest with some proper indentation and some improper +# indentation is "partially" formatted. That is, the part that appears +# before the inconsistent indentation is formatted. This requires that +# the part before it is valid Python. +def doctest_skipped_partial_inconsistent_indent(): + """ + Do cool stuff. + + >>> def cool_stuff(x): + ... print(x) + ... print( f"hi {x}" ); + hi 2 + """ + pass + + +# Checks that a doctest with improper triple single quoted string gets +# skipped. That is, the code snippet is itself invalid Python, so it is +# left as is. +def doctest_skipped_triple_incorrect(): + """ + Do cool stuff. + + >>> foo( x ) + ... '''tri'''cksy''' + """ + pass + + +# Tests that a doctest on a single line is skipped. +def doctest_skipped_one_line(): + ">>> foo( x )" + pass + + +# f-strings are not considered docstrings[1], so any doctests +# inside of them should not be formatted. +# +# [1]: https://docs.python.org/3/reference/lexical_analysis.html#formatted-string-literals +def doctest_skipped_fstring(): + f""" + Do cool stuff. + + >>> cool_stuff( 1 ) + 2 + """ + pass + + +# Test that a doctest containing a triple quoted string at least +# does not result in invalid Python code. Ideally this would format +# correctly, but at time of writing it does not. +def doctest_invalid_skipped_with_triple_double_in_single_quote_string(): + """ + Do cool stuff. + + >>> x = '\"\"\"' + """ + pass + + +############################################################################### +# reStructuredText CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# reStructuredText formatted code blocks. +# +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#literal-blocks +# See: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-30 +# See: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#toc-entry-38 +############################################################################### + + +def rst_literal_simple(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_literal_simple_continued(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + Done. + """ + pass + + +# Tests that we can end the literal block on the second +# to last line of the docstring. +def rst_literal_second_to_last(): + """ + Do cool stuff:: + + cool_stuff(1) + """ + pass + + +# Tests that we can end the literal block on the actual +# last line of the docstring. +def rst_literal_actually_last(): + """ + Do cool stuff:: + + cool_stuff(1)""" + pass + + +def rst_literal_with_blank_lines(): + """ + Do cool stuff:: + + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + + Done. + """ + pass + + +# Extra blanks should be preserved. +def rst_literal_extra_blanks(): + """ + Do cool stuff:: + + + + cool_stuff(1) + + + + Done. + """ + pass + + +# If a literal block is never properly ended (via a non-empty unindented line), +# then the end of the block should be the last non-empty line. And subsequent +# empty lines should be preserved as-is. +def rst_literal_extra_blanks_at_end(): + """ + Do cool stuff:: + + + cool_stuff(1) + + + + """ + pass + + +# A literal block can contain many empty lines and it should not end the block +# if it continues. +def rst_literal_extra_blanks_in_snippet(): + """ + Do cool stuff:: + + cool_stuff(1) + + + cool_stuff(2) + + Done. + """ + pass + + +# This tests that a unindented line appearing after an indented line (but where +# the indent is still beyond the minimum) gets formatted properly. +def rst_literal_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( + ''' + hiya''' + ) + + Done. + """ + pass + + +# This checks that if the first line in a code snippet has been indented with +# tabs, then so long as its "indentation length" is considered bigger than the +# line with `::`, it is reformatted as code. +# +# (If your tabwidth is set to 4, then it looks like the code snippet +# isn't indented at all, which is perhaps counter-intuitive. Indeed, reST +# itself also seems to recognize this as a code block, although it appears +# under-specified.) +def rst_literal_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Another test with tabs, except in this case, if your tabwidth is less than +# 8, than the code snippet actually looks like its indent is *less* than the +# opening line with a `::`. One might presume this means that the code snippet +# is not treated as a literal block and thus not reformatted, but since we +# assume all tabs have tabwidth=8 when computing indentation length, the code +# snippet is actually seen as being more indented than the opening `::` line. +# As with the above example, reST seems to behave the same way here. +def rst_literal_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + + Done. + """ + pass + + +# Like the test above, but with multiple lines. +def rst_literal_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that if two lines in a literal block are indented to the same level +# but by different means (tabs versus spaces), then we correctly recognize the +# block and format it. +def rst_literal_first_line_tab_second_line_spaces(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that when two lines in a code snippet have weird and inconsistent +# indentation, the code still gets formatted so long as the indent is greater +# than the indent of the `::` line. +# +# In this case, the minimum indent is 5 spaces (from the second line) where as +# the first line has an indent of 8 spaces via a tab (by assuming tabwidth=8). +# The minimum indent is stripped from each code line. Since tabs aren't +# divisible, the entire tab is stripped, which means the first and second lines +# wind up with the same level of indentation. +# +# An alternative behavior here would be that the tab is replaced with 3 spaces +# instead of being stripped entirely. The code snippet itself would then have +# inconsistent indentation to the point of being invalid Python, and thus code +# formatting would be skipped. +# +# I decided on the former behavior because it seems a bit easier to implement, +# but we might want to switch to the alternative if cases like this show up in +# the real world. ---AG +def rst_literal_odd_indentation(): + """ + Do cool stuff:: + + cool_stuff(1) + cool_stuff(2) + + Done. + """ + pass + + +# Tests that having a line with a lone `::` works as an introduction of a +# literal block. +def rst_literal_lone_colon(): + """ + Do cool stuff. + + :: + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_simple(): + """ + .. code-block:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_case_insensitive(): + """ + .. cOdE-bLoCk:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_sourcecode(): + """ + .. sourcecode:: python + + cool_stuff(1) + + Done. + """ + pass + + +def rst_directive_options(): + """ + .. code-block:: python + :linenos: + :emphasize-lines: 2,3 + :name: blah blah + + cool_stuff(1) + cool_stuff(2) + cool_stuff(3) + cool_stuff(4) + + Done. + """ + pass + + +# In this case, since `pycon` isn't recognized as a Python code snippet, the +# docstring reformatter ignores it. But it then picks up the doctest and +# reformats it. +def rst_directive_doctest(): + """ + .. code-block:: pycon + + >>> cool_stuff(1) + + Done. + """ + pass + + +# This checks that if the first non-empty line after the start of a literal +# block is not indented more than the line containing the `::`, then it is not +# treated as a code snippet. +def rst_literal_skipped_first_line_not_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the test above, but inserts an indented line after the un-indented one. +# This should not cause the literal block to be resumed. +def rst_literal_skipped_first_line_not_indented_then_indented(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# This also checks that a code snippet is not reformatted when the indentation +# of the first line is not more than the line with `::`, but this uses tabs to +# make it a little more confounding. It relies on the fact that indentation +# length is computed by assuming a tabwidth equal to 8. reST also rejects this +# and doesn't treat it as a literal block. +def rst_literal_skipped_first_line_not_indented_tab(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# Like the previous test, but adds a second line. +def rst_literal_skipped_first_line_not_indented_tab_multiple(): + """ + Do cool stuff:: + + cool_stuff( 1 ) + cool_stuff( 2 ) + + Done. + """ + pass + + +# Tests that a code block with a second line that is not properly indented gets +# skipped. A valid code block needs to have an empty line separating these. +# +# One trick here is that we need to make sure the Python code in the snippet is +# valid, otherwise it would be skipped because of invalid Python. +def rst_literal_skipped_subsequent_line_not_indented(): + """ + Do cool stuff:: + + if True: + cool_stuff( ''' + hiya''' ) + + Done. + """ + pass + + +# In this test, we write what looks like a code-block, but it should be treated +# as invalid due to the missing `language` argument. +# +# It does still look like it could be a literal block according to the literal +# rules, but we currently consider the `.. ` prefix to indicate that it is not +# a literal block. +def rst_literal_skipped_not_directive(): + """ + .. code-block:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# In this test, we start a line with `.. `, which makes it look like it might +# be a directive. But instead continue it as if it was just some periods from +# the previous line, and then try to end it by starting a literal block. +# +# But because of the `.. ` in the beginning, we wind up not treating this as a +# code snippet. The reST render I was using to test things does actually treat +# this as a code block, so we may be out of conformance here. +def rst_literal_skipped_possible_false_negative(): + """ + This is a test. + .. This is a test:: + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This tests that a doctest inside of a reST literal block doesn't get +# reformatted. It's plausible this isn't the right behavior, but it also seems +# like it might be the right behavior since it is a literal block. (The doctest +# makes the Python code invalid.) +def rst_literal_skipped_doctest(): + """ + Do cool stuff:: + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_literal_skipped_markdown(): + """ + Do cool stuff:: + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def rst_directive_skipped_not_indented(): + """ + .. code-block:: python + + cool_stuff( 1 ) + + Done. + """ + pass + + +def rst_directive_skipped_wrong_language(): + """ + .. code-block:: rust + + cool_stuff( 1 ) + + Done. + """ + pass + + +# This gets skipped for the same reason that the doctest in a literal block +# gets skipped. +def rst_directive_skipped_doctest(): + """ + .. code-block:: python + + >>> cool_stuff( 1 ) + + Done. + """ + pass + + +############################################################################### +# Markdown CODE EXAMPLES +# +# This section shows examples of docstrings that contain code snippets in +# Markdown fenced code blocks. +# +# See: https://spec.commonmark.org/0.30/#fenced-code-blocks +############################################################################### + + +def markdown_simple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_simple_continued(): + """ + Do cool stuff. + + ```python + def cool_stuff(x): + print(f"hi {x}") + ``` + + Done. + """ + pass + + +# Tests that unlabeled Markdown fenced code blocks are assumed to be Python. +def markdown_unlabeled(): + """ + Do cool stuff. + + ``` + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that fenced code blocks using tildes work. +def markdown_tildes(): + """ + Do cool stuff. + + ~~~py + cool_stuff(1) + ~~~ + + Done. + """ + pass + + +# Tests that a longer closing fence is just fine and dandy. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + `````` + + Done. + """ + pass + + +# Tests that an invalid closing fence is treated as invalid. +# +# We embed it into a docstring so that the surrounding Python +# remains valid. +def markdown_longer_closing_fence(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ''' + ```invalid + ''' + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Tests that one can nest fenced code blocks by using different numbers of +# backticks. +def markdown_nested_fences(): + """ + Do cool stuff. + + `````` + do_something( + ''' + ``` + did i trick you? + ``` + ''' + ) + `````` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring. When it's only empty lines, those are passed into the formatter +# and thus stripped. +def markdown_unclosed_empty_lines(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the second to last line of the +# docstring. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + """ + pass + + +# Tests that an unclosed block with one extra line at the end is treated +# correctly. As per the CommonMark spec, an unclosed fenced code block contains +# everything following the opening fences. Since formatting the code snippet +# trims lines, the last empty line is removed here. +def markdown_second_to_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +# Tests that we can end the block on the actual last line of the docstring. +def markdown_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ```""" + pass + + +# Tests that an unclosed block that ends on the last line of a docstring +# is handled correctly. +def markdown_unclosed_actually_last(): + """ + Do cool stuff. + + ```py + cool_stuff(1)""" + pass + + +def markdown_with_blank_lines(): + """ + Do cool stuff. + + ```py + def cool_stuff(x): + print(f"hi {x}") + + + def other_stuff(y): + print(y) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_4spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +def markdown_first_line_indent_uses_tabs_8spaces_multiple(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_first_line_tab_second_line_spaces(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_odd_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + cool_stuff(2) + ``` + + Done. + """ + pass + + +# Extra blanks should be *not* be preserved (unlike reST) because they are part +# of the code snippet (per CommonMark spec), and thus get trimmed as part of +# code formatting. +def markdown_extra_blanks(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + ``` + + Done. + """ + pass + + +# A block can contain many empty lines within it. +def markdown_extra_blanks_in_snippet(): + """ + Do cool stuff. + + ```py + cool_stuff(1) + + + cool_stuff(2) + ``` + + Done. + """ + pass + + +def markdown_weird_closing(): + """ + Code block with weirdly placed closing fences. + + ```python + cool_stuff(1) + ``` + # The above fences look like it shouldn't close the block, but we + # allow it to. The fences below re-open a block (until the end of + # the docstring), but it's invalid Python and thus doesn't get + # reformatted. + a = 10 + ``` + + Now the code block is closed + """ + pass + + +def markdown_over_indented(): + """ + A docstring + over intended + ```python + print(5) + ``` + """ + pass + + +# This tests that we can have additional text after the language specifier. +def markdown_additional_info_string(): + """ + Do cool stuff. + + ```python tab="plugin.py" + cool_stuff(1) + ``` + + Done. + """ + pass + + +# Tests that an unclosed block gobbles up everything remaining in the +# docstring, even if it isn't valid Python. Since it isn't valid Python, +# reformatting fails and the entire thing is skipped. +def markdown_skipped_unclosed_non_python(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + + I forgot to close the code block, and this is definitely not + Python. So nothing here gets formatted. + """ + pass + + +# This has a Python snippet with a docstring that contains a closing fence. +# This splits the embedded docstring and makes the overall snippet invalid. +def markdown_skipped_accidental_closure(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ''' + ``` + ''' + ``` + + Done. + """ + pass + + +# When a line is unindented all the way out before the standard indent of the +# docstring, the code reformatting ends up interacting poorly with the standard +# docstring whitespace normalization logic. This is probably a bug, and we +# should probably treat the Markdown block as valid, but for now, we detect +# the unindented line and declare the block as invalid and thus do no code +# reformatting. +# +# FIXME: Fixing this (if we think it's a bug) probably requires refactoring the +# docstring whitespace normalization to be aware of code snippets. Or perhaps +# plausibly, to do normalization *after* code snippets have been formatted. +def markdown_skipped_unindented_completely(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This test is fallout from treating fenced code blocks with unindented lines +# as invalid. We probably should treat this as a valid block. Indeed, if we +# remove the logic that makes the `markdown_skipped_unindented_completely` test +# pass, then this code snippet will get reformatted correctly. +def markdown_skipped_unindented_somewhat(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +# This tests that if a Markdown block contains a line that has less of an +# indent than another line. +# +# There is some judgment involved in what the right behavior is here. We +# could "normalize" the indentation so that the minimum is the indent of the +# opening fence line. If we did that here, then the code snippet would become +# valid and format as Python. But at time of writing, we don't, which leads to +# inconsistent indentation and thus invalid Python. +def markdown_skipped_unindented_with_inconsistent_indentation(): + """ + Do cool stuff. + + ```py + cool_stuff( 1 ) + cool_stuff( 2 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_doctest(): + """ + Do cool stuff. + + ```py + >>> cool_stuff( 1 ) + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_literal(): + """ + Do cool stuff. + + ```py + And do this:: + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass + + +def markdown_skipped_rst_directive(): + """ + Do cool stuff. + + ```py + .. code-block:: python + + cool_stuff( 1 ) + + ``` + + Done. + """ + pass ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap index 7391fb6938..9f7fd5b9ac 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_crlf.py.snap @@ -18,14 +18,15 @@ def doctest_line_ending(): ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = CarriageReturnLineFeed -magic-trailing-comma = Respect -docstring-code = Enabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = CarriageReturnLineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_dynamic_line_width.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_dynamic_line_width.py.snap new file mode 100644 index 0000000000..1bab5f433b --- /dev/null +++ b/crates/ruff_python_formatter/tests/snapshots/format@docstring_code_examples_dynamic_line_width.py.snap @@ -0,0 +1,1875 @@ +--- +source: crates/ruff_python_formatter/tests/fixtures.rs +input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/docstring_code_examples_dynamic_line_width.py +--- +## Input +```python +def simple(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + ``` + + Done. + """ + pass + + +# Like simple, but we double everything up to ensure the indent level is +# tracked correctly. +def repeated(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) + ``` + + Done. + """ + pass + + +# Like simple, but we make one line exactly one character longer than the limit +# (for 4-space indents) and make sure it gets wrapped. +def barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678) + return 5 + self.x = doit( 5 ) + ``` + + Done. + """ + pass + + +# This tests that if the code block is unindented, that it gets indented and +# the dynamic line width setting is applied correctly. +def unindented(): + """ + First line. + +```py +class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + self.x = doit( 5 ) +``` + + Done. + """ + pass + + +# Like unindented, but contains a `print` line where it just barely exceeds the +# globally configured line width *after* its indentation has been corrected. +def unindented_barely_exceeds_limit(): + """ + First line. + +```py +class Abcdefghijklmopqrstuvwxyz(Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5): + def abcdefghijklmnopqrstuvwxyz(self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + def abcdefghijklmnopqrstuvwxyz(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678) + return 5 + self.x = doit( 5 ) +``` + + Done. + """ + pass + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent1(): + """ + Docstring example containing a class. + + Examples + -------- + >>> @pl.api.register_dataframe_namespace("split") + ... class SplitFrame: + ... def __init__(self, df: pl.DataFrame): + ... self._df = df + ... + ... def by_first_letter_of_column_values(self, col: str) -> list[pl.DataFrame]: + ... return [ + ... self._df.filter(pl.col(col).str.starts_with(c)) + ... for c in sorted( + ... set(df.select(pl.col(col).str.slice(0, 1)).to_series()) + ... ) + ... ] + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +class DoctestExtraIndent2: + def example2(): + """ + Regular docstring of class method. + + Examples + -------- + >>> df = pl.DataFrame( + ... {"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]} + ... ) + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent3(): + """ + Pragma comment. + + Examples + -------- + >>> af1, af2, af3 = pl.align_frames( + ... df1, df2, df3, on="dt" + ... ) # doctest: +IGNORE_RESULT + """ +``` + +## Outputs +### Output 1 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +def simple(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we double everything up to ensure the indent level is +# tracked correctly. +def repeated(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we make one line exactly one character longer than the limit +# (for 4-space indents) and make sure it gets wrapped. +def barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678 + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# This tests that if the code block is unindented, that it gets indented and +# the dynamic line width setting is applied correctly. +def unindented(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like unindented, but contains a `print` line where it just barely exceeds the +# globally configured line width *after* its indentation has been corrected. +def unindented_barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678 + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent1(): + """ + Docstring example containing a class. + + Examples + -------- + >>> @pl.api.register_dataframe_namespace("split") + ... class SplitFrame: + ... def __init__(self, df: pl.DataFrame): + ... self._df = df + ... + ... def by_first_letter_of_column_values(self, col: str) -> list[pl.DataFrame]: + ... return [ + ... self._df.filter(pl.col(col).str.starts_with(c)) + ... for c in sorted( + ... set(df.select(pl.col(col).str.slice(0, 1)).to_series()) + ... ) + ... ] + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +class DoctestExtraIndent2: + def example2(): + """ + Regular docstring of class method. + + Examples + -------- + >>> df = pl.DataFrame( + ... {"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]} + ... ) + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent3(): + """ + Pragma comment. + + Examples + -------- + >>> af1, af2, af3 = pl.align_frames( + ... df1, df2, df3, on="dt" + ... ) # doctest: +IGNORE_RESULT + """ +``` + + +### Output 2 +``` +indent-style = space +line-width = 88 +indent-width = 2 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +def simple(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we double everything up to ensure the indent level is +# tracked correctly. +def repeated(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we make one line exactly one character longer than the limit +# (for 4-space indents) and make sure it gets wrapped. +def barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# This tests that if the code block is unindented, that it gets indented and +# the dynamic line width setting is applied correctly. +def unindented(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like unindented, but contains a `print` line where it just barely exceeds the +# globally configured line width *after* its indentation has been corrected. +def unindented_barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent1(): + """ + Docstring example containing a class. + + Examples + -------- + >>> @pl.api.register_dataframe_namespace("split") + ... class SplitFrame: + ... def __init__(self, df: pl.DataFrame): + ... self._df = df + ... + ... def by_first_letter_of_column_values(self, col: str) -> list[pl.DataFrame]: + ... return [ + ... self._df.filter(pl.col(col).str.starts_with(c)) + ... for c in sorted(set(df.select(pl.col(col).str.slice(0, 1)).to_series())) + ... ] + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +class DoctestExtraIndent2: + def example2(): + """ + Regular docstring of class method. + + Examples + -------- + >>> df = pl.DataFrame({"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]}) + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent3(): + """ + Pragma comment. + + Examples + -------- + >>> af1, af2, af3 = pl.align_frames(df1, df2, df3, on="dt") # doctest: +IGNORE_RESULT + """ +``` + + +### Output 3 +``` +indent-style = tab +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +def simple(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we double everything up to ensure the indent level is +# tracked correctly. +def repeated(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we make one line exactly one character longer than the limit +# (for 4-space indents) and make sure it gets wrapped. +def barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678 + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# This tests that if the code block is unindented, that it gets indented and +# the dynamic line width setting is applied correctly. +def unindented(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print(abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a567) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like unindented, but contains a `print` line where it just barely exceeds the +# globally configured line width *after* its indentation has been corrected. +def unindented_barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + def abcdefghijklmnopqrstuvwxyz( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4 + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print( + abc, ddef, ghi, jkl, mno, pqr, stu, vwx, yz, a1, a2, a3, a4, a5678 + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent1(): + """ + Docstring example containing a class. + + Examples + -------- + >>> @pl.api.register_dataframe_namespace("split") + ... class SplitFrame: + ... def __init__(self, df: pl.DataFrame): + ... self._df = df + ... + ... def by_first_letter_of_column_values(self, col: str) -> list[pl.DataFrame]: + ... return [ + ... self._df.filter(pl.col(col).str.starts_with(c)) + ... for c in sorted( + ... set(df.select(pl.col(col).str.slice(0, 1)).to_series()) + ... ) + ... ] + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +class DoctestExtraIndent2: + def example2(): + """ + Regular docstring of class method. + + Examples + -------- + >>> df = pl.DataFrame( + ... {"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]} + ... ) + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent3(): + """ + Pragma comment. + + Examples + -------- + >>> af1, af2, af3 = pl.align_frames( + ... df1, df2, df3, on="dt" + ... ) # doctest: +IGNORE_RESULT + """ +``` + + +### Output 4 +``` +indent-style = tab +line-width = 88 +indent-width = 8 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Enabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +def simple(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we double everything up to ensure the indent level is +# tracked correctly. +def repeated(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + + + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like simple, but we make one line exactly one character longer than the limit +# (for 4-space indents) and make sure it gets wrapped. +def barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a5678, + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# This tests that if the code block is unindented, that it gets indented and +# the dynamic line width setting is applied correctly. +def unindented(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is just one character shy of + # tripping the default line width of 88. So it should not be + # wrapped. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a567, + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# Like unindented, but contains a `print` line where it just barely exceeds the +# globally configured line width *after* its indentation has been corrected. +def unindented_barely_exceeds_limit(): + """ + First line. + + ```py + class Abcdefghijklmopqrstuvwxyz( + Abc, Def, Ghi, Jkl, Mno, Pqr, Stu, Vwx, Yz, A1, A2, A3, A4, A5 + ): + def abcdefghijklmnopqrstuvwxyz( + self, + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + def abcdefghijklmnopqrstuvwxyz( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + ): + # For 4 space indents, this is 89 columns, which is one + # more than the limit. Therefore, it should get wrapped for + # indent_width >= 4. + print( + abc, + ddef, + ghi, + jkl, + mno, + pqr, + stu, + vwx, + yz, + a1, + a2, + a3, + a4, + a5678, + ) + return 5 + + self.x = doit(5) + ``` + + Done. + """ + pass + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent1(): + """ + Docstring example containing a class. + + Examples + -------- + >>> @pl.api.register_dataframe_namespace("split") + ... class SplitFrame: + ... def __init__(self, df: pl.DataFrame): + ... self._df = df + ... + ... def by_first_letter_of_column_values( + ... self, col: str + ... ) -> list[pl.DataFrame]: + ... return [ + ... self._df.filter(pl.col(col).str.starts_with(c)) + ... for c in sorted( + ... set( + ... df.select( + ... pl.col(col).str.slice(0, 1) + ... ).to_series() + ... ) + ... ) + ... ] + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +class DoctestExtraIndent2: + def example2(): + """ + Regular docstring of class method. + + Examples + -------- + >>> df = pl.DataFrame( + ... {"foo": [1, 2, 3], "bar": [6, 7, 8], "ham": ["a", "b", "c"]} + ... ) + """ + + +# See: https://github.com/astral-sh/ruff/issues/9126 +def doctest_extra_indent3(): + """ + Pragma comment. + + Examples + -------- + >>> af1, af2, af3 = pl.align_frames( + ... df1, df2, df3, on="dt" + ... ) # doctest: +IGNORE_RESULT + """ +``` + + + diff --git a/crates/ruff_python_formatter/tests/snapshots/format@expression__binary.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@expression__binary.py.snap index e245378863..1d59b5b13a 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@expression__binary.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@expression__binary.py.snap @@ -325,6 +325,18 @@ expected_content = ( ) ) +# Skip FString content when determining whether to omit optional parentheses or not.0 +# The below expression should be parenthesized because it ends with an fstring and starts with a name. +# (Call expressions at the beginning don't count as parenthesized because they don't start with parens). +assert ( + format.format_event(spec) + == f'Event("_remove_cookie", {{key:`testkey`,options:{json.dumps(options)}}})' +) +# Avoid parentheses for this example because it starts with a tuple expression. +assert ( + (spec, format) + == f'Event("_remove_cookie", {{key:`testkey`,options:{json.dumps(options)}}})' +) rowuses = [(1 << j) | # column ordinal (1 << (n + i-j + n-1)) | # NW-SE ordinal @@ -790,6 +802,18 @@ expected_content = ( ) ) +# Skip FString content when determining whether to omit optional parentheses or not.0 +# The below expression should be parenthesized because it ends with an fstring and starts with a name. +# (Call expressions at the beginning don't count as parenthesized because they don't start with parens). +assert ( + format.format_event(spec) + == f'Event("_remove_cookie", {{key:`testkey`,options:{json.dumps(options)}}})' +) +# Avoid parentheses for this example because it starts with a tuple expression. +assert ( + spec, + format, +) == f'Event("_remove_cookie", {{key:`testkey`,options:{json.dumps(options)}}})' rowuses = [ (1 << j) # column ordinal diff --git a/crates/ruff_python_formatter/tests/snapshots/format@expression__bytes.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@expression__bytes.py.snap index ba906f5a34..b741654c94 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@expression__bytes.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@expression__bytes.py.snap @@ -129,14 +129,15 @@ test_particular = [ ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -280,14 +281,15 @@ test_particular = [ ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Single -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Single +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_formatter/tests/snapshots/format@expression__optional_parentheses_comments.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@expression__optional_parentheses_comments.py.snap index 4279cd03f2..23c95ef3a2 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@expression__optional_parentheses_comments.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@expression__optional_parentheses_comments.py.snap @@ -421,4 +421,44 @@ def test6(): ``` +## Preview changes +```diff +--- Stable ++++ Preview +@@ -72,13 +72,13 @@ + ## Breaking left + + # Should break `[a]` first +-____[ +- a +-] = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvv # c ++____[a] = ( ++ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvv # c ++) + +-____[ +- a +-] = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvv # cc ++____[a] = ( ++ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvv # cc ++) + + ( + # some weird comments +@@ -136,9 +136,9 @@ + # 89 characters parenthesized (collapse) + ____a: a = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvv # c + +-_a: a[ +- b +-] = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvv # c ++_a: a[b] = ( ++ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvv # c ++) + + ## Augmented Assign + +``` + + diff --git a/crates/ruff_python_formatter/tests/snapshots/format@expression__string.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@expression__string.py.snap index af7a9ecb62..306a10a49f 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@expression__string.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@expression__string.py.snap @@ -144,14 +144,15 @@ trailing_preferred_quote_texts = [''' "''', ''' ""''', ''' """''', ''' """"'''] ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -319,14 +320,15 @@ trailing_preferred_quote_texts = [''' "''', ''' ""''', ''' """''', ''' """"'''] ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Single -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Single +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__fmt_off_docstring.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__fmt_off_docstring.py.snap index cd2da8896b..2c5ce6935f 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__fmt_off_docstring.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__fmt_off_docstring.py.snap @@ -28,14 +28,15 @@ def test(): ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -63,14 +64,15 @@ def test(): ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 2 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 2 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__indent.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__indent.py.snap index 12a58faa13..f1db6d7a8b 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__indent.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__indent.py.snap @@ -9,14 +9,15 @@ input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/fmt_on_off ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -25,14 +26,15 @@ preview = Disabled ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 1 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 1 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -41,14 +43,15 @@ preview = Disabled ### Output 3 ``` -indent-style = tab -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__mixed_space_and_tab.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__mixed_space_and_tab.py.snap index 54e970077c..7ff04c9571 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__mixed_space_and_tab.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@fmt_on_off__mixed_space_and_tab.py.snap @@ -24,14 +24,15 @@ not_fixed ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -56,14 +57,15 @@ not_fixed ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 2 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 2 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -88,14 +90,15 @@ not_fixed ### Output 3 ``` -indent-style = tab -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = tab +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_formatter/tests/snapshots/format@preview.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@preview.py.snap index c3504c47f8..888c6f938a 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@preview.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@preview.py.snap @@ -75,14 +75,15 @@ def f(): ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -156,14 +157,15 @@ def f(): ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Enabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Enabled ``` ```python @@ -196,23 +198,22 @@ def reference_docstring_newlines(): class RemoveNewlineBeforeClassDocstring: - """Black's `Preview.no_blank_line_before_class_docstring`""" def f(): """Black's `Preview.prefer_splitting_right_hand_side_of_assignments`""" - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa[ - bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb - ] = cccccccc.ccccccccccccc.cccccccc + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa[bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb] = ( + cccccccc.ccccccccccccc.cccccccc + ) - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa[ - bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb - ] = cccccccc.ccccccccccccc().cccccccc + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa[bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb] = ( + cccccccc.ccccccccccccc().cccccccc + ) - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa[ - bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb - ] = cccccccc.ccccccccccccc(d).cccccccc + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa[bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb] = ( + cccccccc.ccccccccccccc(d).cccccccc + ) aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa[bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb] = ( cccccccc.ccccccccccccc(d).cccccccc + e @@ -226,12 +227,12 @@ def f(): + eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee ) - self._cache: dict[ - DependencyCacheKey, list[list[DependencyPackage]] - ] = collections.defaultdict(list) - self._cached_dependencies_by_level: dict[ - int, list[DependencyCacheKey] - ] = collections.defaultdict(list) + self._cache: dict[DependencyCacheKey, list[list[DependencyPackage]]] = ( + collections.defaultdict(list) + ) + self._cached_dependencies_by_level: dict[int, list[DependencyCacheKey]] = ( + collections.defaultdict(list) + ) ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@quote_style.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@quote_style.py.snap new file mode 100644 index 0000000000..ed5c09022e --- /dev/null +++ b/crates/ruff_python_formatter/tests/snapshots/format@quote_style.py.snap @@ -0,0 +1,273 @@ +--- +source: crates/ruff_python_formatter/tests/fixtures.rs +input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/quote_style.py +--- +## Input +```python +'single' +"double" +r'r single' +r"r double" +f'f single' +f"f double" +fr'fr single' +fr"fr double" +rf'rf single' +rf"rf double" +b'b single' +b"b double" +rb'rb single' +rb"rb double" +br'br single' +br"br double" + +'''single triple''' +"""double triple""" +r'''r single triple''' +r"""r double triple""" +f'''f single triple''' +f"""f double triple""" +fr'''fr single triple''' +fr"""fr double triple""" +rf'''rf single triple''' +rf"""rf double triple""" +b'''b single triple''' +b"""b double triple""" +rb'''rb single triple''' +rb"""rb double triple""" +br'''br single triple''' +br"""br double triple""" + +'single1' 'single2' +'single1' "double2" +"double1" 'single2' +"double1" "double2" + +def docstring_single_triple(): + '''single triple''' + +def docstring_double_triple(): + """double triple""" + +def docstring_double(): + "double triple" + +def docstring_single(): + 'single' +``` + +## Outputs +### Output 1 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Single +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +'single' +'double' +r'r single' +r'r double' +f'f single' +f'f double' +rf'fr single' +rf'fr double' +rf'rf single' +rf'rf double' +b'b single' +b'b double' +rb'rb single' +rb'rb double' +rb'br single' +rb'br double' + +"""single triple""" +"""double triple""" +r"""r single triple""" +r"""r double triple""" +f"""f single triple""" +f"""f double triple""" +rf"""fr single triple""" +rf"""fr double triple""" +rf"""rf single triple""" +rf"""rf double triple""" +b"""b single triple""" +b"""b double triple""" +rb"""rb single triple""" +rb"""rb double triple""" +rb"""br single triple""" +rb"""br double triple""" + +'single1' 'single2' +'single1' 'double2' +'double1' 'single2' +'double1' 'double2' + + +def docstring_single_triple(): + """single triple""" + + +def docstring_double_triple(): + """double triple""" + + +def docstring_double(): + "double triple" + + +def docstring_single(): + "single" +``` + + +### Output 2 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +"single" +"double" +r"r single" +r"r double" +f"f single" +f"f double" +rf"fr single" +rf"fr double" +rf"rf single" +rf"rf double" +b"b single" +b"b double" +rb"rb single" +rb"rb double" +rb"br single" +rb"br double" + +"""single triple""" +"""double triple""" +r"""r single triple""" +r"""r double triple""" +f"""f single triple""" +f"""f double triple""" +rf"""fr single triple""" +rf"""fr double triple""" +rf"""rf single triple""" +rf"""rf double triple""" +b"""b single triple""" +b"""b double triple""" +rb"""rb single triple""" +rb"""rb double triple""" +rb"""br single triple""" +rb"""br double triple""" + +"single1" "single2" +"single1" "double2" +"double1" "single2" +"double1" "double2" + + +def docstring_single_triple(): + """single triple""" + + +def docstring_double_triple(): + """double triple""" + + +def docstring_double(): + "double triple" + + +def docstring_single(): + "single" +``` + + +### Output 3 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Preserve +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled +``` + +```python +'single' +"double" +r'r single' +r"r double" +f'f single' +f"f double" +rf'fr single' +rf"fr double" +rf'rf single' +rf"rf double" +b'b single' +b"b double" +rb'rb single' +rb"rb double" +rb'br single' +rb"br double" + +"""single triple""" +"""double triple""" +r"""r single triple""" +r"""r double triple""" +f"""f single triple""" +f"""f double triple""" +rf"""fr single triple""" +rf"""fr double triple""" +rf"""rf single triple""" +rf"""rf double triple""" +b"""b single triple""" +b"""b double triple""" +rb"""rb single triple""" +rb"""rb double triple""" +rb"""br single triple""" +rb"""br double triple""" + +'single1' 'single2' +'single1' "double2" +"double1" 'single2' +"double1" "double2" + + +def docstring_single_triple(): + """single triple""" + + +def docstring_double_triple(): + """double triple""" + + +def docstring_double(): + "double triple" + + +def docstring_single(): + "single" +``` + + + diff --git a/crates/ruff_python_formatter/tests/snapshots/format@skip_magic_trailing_comma.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@skip_magic_trailing_comma.py.snap index 4add8537b1..83b67689f4 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@skip_magic_trailing_comma.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@skip_magic_trailing_comma.py.snap @@ -42,14 +42,15 @@ with (a,): # magic trailing comma ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -97,14 +98,15 @@ with ( ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Ignore -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Ignore +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__ann_assign.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__ann_assign.py.snap index 69490f3caf..191b6d141b 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__ann_assign.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__ann_assign.py.snap @@ -67,4 +67,24 @@ class DefaultRunner: ``` +## Preview changes +```diff +--- Stable ++++ Preview +@@ -7,9 +7,9 @@ + Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb() + ) + +-bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb: ( +- Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb +-) = Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb() ++bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb: Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb = ( ++ Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb() ++) + + JSONSerializable: TypeAlias = ( + "str | int | float | bool | None | list | tuple | JSONMapping" +``` + + diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__assign.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__assign.py.snap index 075824ab64..b78a768777 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__assign.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__assign.py.snap @@ -73,6 +73,15 @@ def main() -> None: db_request.POST["name"] ) )[0] + + +c = b[dddddd, aaaaaa] = ( + a[ + aaaaaaa, + bbbbbbbbbbbbbbbbbbb + ] + # comment +) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ``` ## Output @@ -151,6 +160,48 @@ def main() -> None: db_request.POST["name"] ) )[0] + + +c = b[dddddd, aaaaaa] = ( + a[aaaaaaa, bbbbbbbbbbbbbbbbbbb] + # comment +) = xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +``` + + +## Preview changes +```diff +--- Stable ++++ Preview +@@ -1,7 +1,5 @@ + # break left hand side +-a1akjdshflkjahdslkfjlasfdahjlfds = ( +- bakjdshflkjahdslkfjlasfdahjlfds +-) = ( ++a1akjdshflkjahdslkfjlasfdahjlfds = bakjdshflkjahdslkfjlasfdahjlfds = ( + cakjdshflkjahdslkfjlasfdahjlfds + ) = kjaödkjaföjfahlfdalfhaöfaöfhaöfha = fkjaödkjaföjfahlfdalfhaöfaöfhaöfha = g = 3 + +@@ -9,15 +7,13 @@ + a2 = b2 = 2 + + # Break the last element +-a = ( +- asdf +-) = ( ++a = asdf = ( + fjhalsdljfalflaflapamsakjsdhflakjdslfjhalsdljfalflaflapamsakjsdhflakjdslfjhalsdljfal + ) = 1 + +-aa = [ +- bakjdshflkjahdslkfjlasfdahjlfds +-] = dddd = ddd = fkjaödkjaföjfahlfdalfhaöfaöfhaöfha = g = [3] ++aa = [bakjdshflkjahdslkfjlasfdahjlfds] = dddd = ddd = ( ++ fkjaödkjaföjfahlfdalfhaöfaöfhaöfha ++) = g = [3] + + aa = [] = dddd = ddd = fkjaödkjaföjfahlfdalfhaöfaöfhaöfha = g = [3] + ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__assignment_split_value_first.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__assignment_split_value_first.py.snap new file mode 100644 index 0000000000..38e2e31a44 --- /dev/null +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__assignment_split_value_first.py.snap @@ -0,0 +1,457 @@ +--- +source: crates/ruff_python_formatter/tests/fixtures.rs +input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/assignment_split_value_first.py +--- +## Input +```python +####### +# Unsplittable target and value + +# Only parenthesize the value if it makes it fit, otherwise avoid parentheses. +b = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee + +bbbbbbbbbbbbbbbb = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvv + +# Avoid parenthesizing the value even if the target exceeds the configured width +bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb = bbb + + +############ +# Splittable targets + +# Does not double-parenthesize tuples +( + first_item, + second_item, +) = some_looooooooong_module.some_loooooog_function_name( + first_argument, second_argument, third_argument +) + + +# Preserve parentheses around the first target +( + req["ticket"]["steps"]["step"][0]["tasks"]["task"]["fields"]["field"][ + "access_request" + ]["destinations"]["destination"][0]["ip_address"] +) = dst + +# Augmented assignment +req["ticket"]["steps"]["step"][0]["tasks"]["task"]["fields"]["field"][ + "access_request" +] += dst + +# Always parenthesize the value if it avoids splitting the target, regardless of the value's width. +_a: a[aaaa] = ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv +) + +##### +# Avoid parenthesizing the value if the expression right before the `=` splits to avoid an unnecessary pair of parentheses + +# The type annotation is guaranteed to split because it is too long. +_a: a[ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv +] = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv + +# The target is too long +( + aaaaaaaaaaa, + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv + +# The target splits because of a magic trailing comma +( + a, + b, +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +# The targets split because of a comment +( + # leading + a +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +( + a + # trailing +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +( + a, # nested + b +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +####### +# Multi targets + +# Black always parenthesizes the right if using multiple targets regardless if the parenthesized value exceeds the +# the configured line width or not +aaaa = bbbbbbbbbbbbbbbb = ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee +) + +# Black does parenthesize the target if the target itself exceeds the line width and only parenthesizes +# the values if it makes it fit. +# The second target is too long to ever fit into the configured line width. +aaaa = ( + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbdddd +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee + +# Does also apply for other multi target assignments, as soon as a single target exceeds the configured +# width +aaaaaa = a["aaa"] = bbbbb[aa, bbb, cccc] = dddddddddd = eeeeee = ( + fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + + +###################### +# Call expressions: +# For unsplittable targets: Parenthesize the call expression if it makes it fit. +# +# For splittable targets: +# Only parenthesize a call expression if the parens of the call don't fit on the same line +# as the target. Don't parenthesize the call expression if the target (or annotation) right before +# splits. + +# Don't parenthesize the function call if the left is unsplittable. +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = a.b.function( + arg1, arg2, arg3 +) +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function( + [1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3 +) +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function( + [1, 2, 3], + arg1, + [1, 2, 3], + arg2, + [1, 2, 3], + arg3, + dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, + eeeeeeeeeeeeee, +) + +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = ( + function() +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = ( + a.b.function(arg1, arg2, arg3) +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function() +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function( + [1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3 +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function( + [1, 2, 3], + arg1, + [1, 2, 3], + arg2, + [1, 2, 3], + arg3, + dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, + eeeeeeeeeeeeee, +) + +####### Fluent call expressions +# Uses the regular `Multiline` layout where the entire `value` gets parenthesized +# if it doesn't fit on the line. +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use = ( + function().b().c([1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3) +) + + +####### +# Test comment inlining +value.__dict__[key] = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) +value.__dict__.keye = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) +value.__dict__.keye = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) + + +# Don't parenthesize the value because the target's trailing comma forces it to split. +a[ + aaaaaaa, + b, +] = cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc # comment + +# Parenthesize the value, but don't duplicate the comment. +a[aaaaaaa, b] = ( + cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc # comment +) + +# Format both as flat, but don't loos the comment. +a[aaaaaaa, b] = bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb # comment + +####################################################### +# Test the case where a parenthesized value now fits: +a[ + aaaaaaa, + b +] = ( + cccccccc # comment +) + +# Splits the target but not the value because of the magic trailing comma. +a[ + aaaaaaa, + b, +] = ( + cccccccc # comment +) + +# Splits the second target because of the comment and the first target because of the trailing comma. +a[ + aaaaaaa, + b, +] = ( + # leading comment + b +) = ( + cccccccc # comment +) + + +######## +# Type Alias Statement +type A[str, int, number] = VeryLongTypeNameThatShouldBreakFirstToTheRightBeforeSplitngtin + +type A[VeryLongTypeNameThatShouldBreakFirstToTheRightBeforeSplitngtinthatExceedsTheWidth] = str + +``` + +## Outputs +### Output 1 +``` +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Enabled +``` + +```python +####### +# Unsplittable target and value + +# Only parenthesize the value if it makes it fit, otherwise avoid parentheses. +b = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee + +bbbbbbbbbbbbbbbb = ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvv +) + +# Avoid parenthesizing the value even if the target exceeds the configured width +bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb = bbb + + +############ +# Splittable targets + +# Does not double-parenthesize tuples +( + first_item, + second_item, +) = some_looooooooong_module.some_loooooog_function_name( + first_argument, second_argument, third_argument +) + + +# Preserve parentheses around the first target +( + req["ticket"]["steps"]["step"][0]["tasks"]["task"]["fields"]["field"][ + "access_request" + ]["destinations"]["destination"][0]["ip_address"] +) = dst + +# Augmented assignment +req["ticket"]["steps"]["step"][0]["tasks"]["task"]["fields"]["field"][ + "access_request" +] += dst + +# Always parenthesize the value if it avoids splitting the target, regardless of the value's width. +_a: a[aaaa] = ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv +) + +##### +# Avoid parenthesizing the value if the expression right before the `=` splits to avoid an unnecessary pair of parentheses + +# The type annotation is guaranteed to split because it is too long. +_a: a[ + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv +] = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv + +# The target is too long +( + aaaaaaaaaaa, + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb, +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvv + +# The target splits because of a magic trailing comma +( + a, + b, +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +# The targets split because of a comment +( + # leading + a +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +( + a + # trailing +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +( + a, # nested + b, +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvvvv + +####### +# Multi targets + +# Black always parenthesizes the right if using multiple targets regardless if the parenthesized value exceeds the +# the configured line width or not +aaaa = bbbbbbbbbbbbbbbb = ( + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee +) + +# Black does parenthesize the target if the target itself exceeds the line width and only parenthesizes +# the values if it makes it fit. +# The second target is too long to ever fit into the configured line width. +aaaa = ( + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbdddd +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbvvvvvvvvvvvvvvvvvee + +# Does also apply for other multi target assignments, as soon as a single target exceeds the configured +# width +aaaaaa = a["aaa"] = bbbbb[aa, bbb, cccc] = dddddddddd = eeeeee = ( + fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff +) = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + + +###################### +# Call expressions: +# For unsplittable targets: Parenthesize the call expression if it makes it fit. +# +# For splittable targets: +# Only parenthesize a call expression if the parens of the call don't fit on the same line +# as the target. Don't parenthesize the call expression if the target (or annotation) right before +# splits. + +# Don't parenthesize the function call if the left is unsplittable. +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = a.b.function( + arg1, arg2, arg3 +) +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function( + [1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3 +) +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function( + [1, 2, 3], + arg1, + [1, 2, 3], + arg2, + [1, 2, 3], + arg3, + dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, + eeeeeeeeeeeeee, +) + +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use_one_like_it = function() +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = ( + a.b.function(arg1, arg2, arg3) +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function() +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function( + [1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3 +) +this_is_a_ridiculously_long_name_and_nobodyddddddddddddddddddddddddddddddd = function( + [1, 2, 3], + arg1, + [1, 2, 3], + arg2, + [1, 2, 3], + arg3, + dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd, + eeeeeeeeeeeeee, +) + +####### Fluent call expressions +# Uses the regular `Multiline` layout where the entire `value` gets parenthesized +# if it doesn't fit on the line. +this_is_a_ridiculously_long_name_and_nobody_in_their_right_mind_would_use = ( + function().b().c([1, 2, 3], arg1, [1, 2, 3], arg2, [1, 2, 3], arg3) +) + + +####### +# Test comment inlining +value.__dict__[key] = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) +value.__dict__.keye = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) +value.__dict__.keye = ( + "test" # set some Thrift field to non-None in the struct aa bb cc dd ee +) + + +# Don't parenthesize the value because the target's trailing comma forces it to split. +a[ + aaaaaaa, + b, +] = cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc # comment + +# Parenthesize the value, but don't duplicate the comment. +a[aaaaaaa, b] = ( + cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc # comment +) + +# Format both as flat, but don't loos the comment. +a[aaaaaaa, b] = bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb # comment + +####################################################### +# Test the case where a parenthesized value now fits: +a[aaaaaaa, b] = cccccccc # comment + +# Splits the target but not the value because of the magic trailing comma. +a[ + aaaaaaa, + b, +] = cccccccc # comment + +# Splits the second target because of the comment and the first target because of the trailing comma. +a[ + aaaaaaa, + b, +] = ( + # leading comment + b +) = cccccccc # comment + + +######## +# Type Alias Statement +type A[str, int, number] = ( + VeryLongTypeNameThatShouldBreakFirstToTheRightBeforeSplitngtin +) + +type A[ + VeryLongTypeNameThatShouldBreakFirstToTheRightBeforeSplitngtinthatExceedsTheWidth +] = str +``` + + + diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__class_definition.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__class_definition.py.snap index de82b7126c..eec7f540f7 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__class_definition.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__class_definition.py.snap @@ -513,7 +513,30 @@ class QuerySet(AltersData): class Test( -@@ -159,20 +158,17 @@ +@@ -94,7 +93,6 @@ + + + class Test: +- + """Docstring""" + + +@@ -111,14 +109,12 @@ + + + class Test: +- + """Docstring""" + + x = 1 + + + class Test: +- + """Docstring""" + + # comment +@@ -159,20 +155,17 @@ @dataclass # Copied from transformers.models.clip.modeling_clip.CLIPOutput with CLIP->AltCLIP diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__ellipsis.pyi.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__ellipsis.pyi.snap index 756fbff9f8..89bab19aef 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__ellipsis.pyi.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__ellipsis.pyi.snap @@ -74,6 +74,10 @@ with True: with True: ... # comment +with True: + ... + # comment + match x: case 1: ... @@ -105,7 +109,8 @@ try: except: ... # comment finally: - ... # comment``` + ... # comment +``` ## Output ```python @@ -163,6 +168,10 @@ with True: with True: ... # comment +with True: + ... + # comment + match x: case 1: ... case 2: diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__function.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__function.py.snap index 7a8e97566e..4c040409d7 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__function.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__function.py.snap @@ -416,6 +416,16 @@ def default_arg_comments2(# # ): print(x) + +def function_with_one_argument_and_a_positional_separator( + argument: str, / +) -> ReallyReallyReallyReallyReallyReallyReallyReallyLongName: + pass + +def function_with_one_argument_and_a_keyword_separator( + *, argument: str +) -> ReallyReallyReallyReallyReallyReallyReallyReallyLongName: + pass ``` ## Output @@ -993,6 +1003,18 @@ def default_arg_comments2( # # ): print(x) + + +def function_with_one_argument_and_a_positional_separator( + argument: str, / +) -> ReallyReallyReallyReallyReallyReallyReallyReallyLongName: + pass + + +def function_with_one_argument_and_a_keyword_separator( + *, argument: str +) -> ReallyReallyReallyReallyReallyReallyReallyReallyLongName: + pass ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap index 01e0715610..dad7a3c526 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap @@ -584,6 +584,11 @@ match n % 3, n % 5: print("Buzz") case _: print(n) + +# Unparenthesized tuples +match x: + case Child(aaaaaaaaa, bbbbbbbbbbbbbbb, cccccc), Doc(aaaaa, bbbbbbbbbb, ddddddddddddd): + pass ``` ## Output @@ -1210,6 +1215,13 @@ match n % 3, n % 5: print("Buzz") case _: print(n) + +# Unparenthesized tuples +match x: + case Child(aaaaaaaaa, bbbbbbbbbbbbbbb, cccccc), Doc( + aaaaa, bbbbbbbbbb, ddddddddddddd + ): + pass ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__type_alias.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__type_alias.py.snap index ccc44b8db9..6320591b80 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__type_alias.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__type_alias.py.snap @@ -22,10 +22,12 @@ type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx[Aaaaaaaaaaaaaaaaaaaaaaaaaaaa] = int type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx[Aaaaaaaaaaaaaaaaaaaaaaaaaaaa, Bbbbbbbbbbbbb] = int type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx = Tttttttttttttttttttttttttttttttttttttttttttttttttttttttt +type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx = Tttttttttttttttttttttttttttttttttttttttttttttttttttttttt # with comment # long value type X = Ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt type X = Aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa | Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb | Ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc +type XXXXXXXXXXXXX = Tttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt # with comment # soft keyword as alias name type type = int @@ -127,6 +129,9 @@ type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx Bbbbbbbbbbbbb, ] = int type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx = Tttttttttttttttttttttttttttttttttttttttttttttttttttttttt +type Xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx = ( + Tttttttttttttttttttttttttttttttttttttttttttttttttttttttt # with comment +) # long value type X = Ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt @@ -135,6 +140,9 @@ type X = ( | Bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb | Ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc ) +type XXXXXXXXXXXXX = ( + Tttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt # with comment +) # soft keyword as alias name type type = int diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__with.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__with.py.snap index 7b2280c6ca..62018dfe4e 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@statement__with.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__with.py.snap @@ -309,6 +309,10 @@ if True: if True: with anyio.CancelScope(shield=True) if get_running_loop() else contextlib.nullcontext(): pass + + +with Child(aaaaaaaaa, bbbbbbbbbbbbbbb, cccccc), Document(aaaaa, bbbbbbbbbb, ddddddddddddd): + pass ``` ## Output @@ -640,6 +644,12 @@ if True: shield=True ) if get_running_loop() else contextlib.nullcontext(): pass + + +with Child(aaaaaaaaa, bbbbbbbbbbbbbbb, cccccc), Document( + aaaaa, bbbbbbbbbb, ddddddddddddd +): + pass ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@tab_width.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@tab_width.py.snap index d57334c7ca..ea85babc1b 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@tab_width.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@tab_width.py.snap @@ -17,14 +17,15 @@ input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/tab_width. ## Outputs ### Output 1 ``` -indent-style = space -line-width = 88 -indent-width = 2 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 2 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -41,14 +42,15 @@ preview = Disabled ### Output 2 ``` -indent-style = space -line-width = 88 -indent-width = 4 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 4 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python @@ -68,14 +70,15 @@ preview = Disabled ### Output 3 ``` -indent-style = space -line-width = 88 -indent-width = 8 -quote-style = Double -line-ending = LineFeed -magic-trailing-comma = Respect -docstring-code = Disabled -preview = Disabled +indent-style = space +line-width = 88 +indent-width = 8 +quote-style = Double +line-ending = LineFeed +magic-trailing-comma = Respect +docstring-code = Disabled +docstring-code-line-width = "dynamic" +preview = Disabled ``` ```python diff --git a/crates/ruff_python_parser/src/invalid.rs b/crates/ruff_python_parser/src/invalid.rs index 8424567113..91f81bd5ea 100644 --- a/crates/ruff_python_parser/src/invalid.rs +++ b/crates/ruff_python_parser/src/invalid.rs @@ -59,7 +59,6 @@ pub(crate) fn assignment_target(target: &Expr) -> Result<(), LexicalError> { YieldFrom(ref e) => Err(err(e.range.start())), Compare(ref e) => Err(err(e.range.start())), Call(ref e) => Err(err(e.range.start())), - FormattedValue(ref e) => Err(err(e.range.start())), // FString is recursive, but all its forms are invalid as an // assignment target, so we can reject it without exploring it. FString(ref e) => Err(err(e.range.start())), diff --git a/crates/ruff_python_parser/src/lexer/cursor.rs b/crates/ruff_python_parser/src/lexer/cursor.rs index 91c7d30c53..26f3bb8a5b 100644 --- a/crates/ruff_python_parser/src/lexer/cursor.rs +++ b/crates/ruff_python_parser/src/lexer/cursor.rs @@ -120,6 +120,7 @@ impl<'a> Cursor<'a> { } /// Eats symbols while predicate returns true or until the end of file is reached. + #[inline] pub(super) fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) { // It was tried making optimized version of this for eg. line comments, but // LLVM can inline all of this and compile it down to fast iteration over bytes. diff --git a/crates/ruff_python_parser/src/parser.rs b/crates/ruff_python_parser/src/parser.rs index 963d3a3306..26603ca3dd 100644 --- a/crates/ruff_python_parser/src/parser.rs +++ b/crates/ruff_python_parser/src/parser.rs @@ -16,7 +16,7 @@ use std::{fmt, iter}; use itertools::Itertools; pub(super) use lalrpop_util::ParseError as LalrpopError; -use ruff_text_size::{TextRange, TextSize}; +use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::lexer::{lex, lex_starts_at, Spanned}; use crate::{ @@ -25,8 +25,14 @@ use crate::{ token::Tok, Mode, }; -use ruff_python_ast as ast; -use ruff_python_ast::{Mod, ModModule, Suite}; +use ruff_python_ast::{ + Expr, ExprAttribute, ExprAwait, ExprBinOp, ExprBoolOp, ExprBooleanLiteral, ExprBytesLiteral, + ExprCall, ExprCompare, ExprDict, ExprDictComp, ExprEllipsisLiteral, ExprFString, + ExprGeneratorExp, ExprIfExp, ExprIpyEscapeCommand, ExprLambda, ExprList, ExprListComp, + ExprName, ExprNamedExpr, ExprNoneLiteral, ExprNumberLiteral, ExprSet, ExprSetComp, ExprSlice, + ExprStarred, ExprStringLiteral, ExprSubscript, ExprTuple, ExprUnaryOp, ExprYield, + ExprYieldFrom, Mod, ModModule, Suite, +}; /// Parse a full Python program usually consisting of multiple lines. /// @@ -76,7 +82,7 @@ pub fn parse_suite(source: &str, source_path: &str) -> Result /// assert!(expr.is_ok()); /// /// ``` -pub fn parse_expression(source: &str, source_path: &str) -> Result { +pub fn parse_expression(source: &str, source_path: &str) -> Result { let lexer = lex(source, Mode::Expression); match parse_tokens(lexer, source, Mode::Expression, source_path)? { Mod::Expression(expression) => Ok(*expression.body), @@ -105,7 +111,7 @@ pub fn parse_expression_starts_at( source: &str, source_path: &str, offset: TextSize, -) -> Result { +) -> Result { let lexer = lex_starts_at(source, Mode::Module, offset); match parse_tokens(lexer, source, Mode::Expression, source_path)? { Mod::Expression(expression) => Ok(*expression.body), @@ -418,6 +424,209 @@ impl ParseErrorType { } } +/// An expression that may be parenthesized. +#[derive(Clone, Debug)] +pub(super) struct ParenthesizedExpr { + /// The range of the expression, including any parentheses. + pub(super) range: TextRange, + /// The underlying expression. + pub(super) expr: Expr, +} + +impl ParenthesizedExpr { + /// Returns `true` if the expression is parenthesized. + pub(super) fn is_parenthesized(&self) -> bool { + self.range.start() != self.expr.range().start() + } +} + +impl Ranged for ParenthesizedExpr { + fn range(&self) -> TextRange { + self.range + } +} +impl From for ParenthesizedExpr { + fn from(expr: Expr) -> Self { + ParenthesizedExpr { + range: expr.range(), + expr, + } + } +} +impl From for Expr { + fn from(parenthesized_expr: ParenthesizedExpr) -> Self { + parenthesized_expr.expr + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprIpyEscapeCommand) -> Self { + Expr::IpyEscapeCommand(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprBoolOp) -> Self { + Expr::BoolOp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprNamedExpr) -> Self { + Expr::NamedExpr(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprBinOp) -> Self { + Expr::BinOp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprUnaryOp) -> Self { + Expr::UnaryOp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprLambda) -> Self { + Expr::Lambda(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprIfExp) -> Self { + Expr::IfExp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprDict) -> Self { + Expr::Dict(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprSet) -> Self { + Expr::Set(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprListComp) -> Self { + Expr::ListComp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprSetComp) -> Self { + Expr::SetComp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprDictComp) -> Self { + Expr::DictComp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprGeneratorExp) -> Self { + Expr::GeneratorExp(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprAwait) -> Self { + Expr::Await(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprYield) -> Self { + Expr::Yield(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprYieldFrom) -> Self { + Expr::YieldFrom(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprCompare) -> Self { + Expr::Compare(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprCall) -> Self { + Expr::Call(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprFString) -> Self { + Expr::FString(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprStringLiteral) -> Self { + Expr::StringLiteral(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprBytesLiteral) -> Self { + Expr::BytesLiteral(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprNumberLiteral) -> Self { + Expr::NumberLiteral(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprBooleanLiteral) -> Self { + Expr::BooleanLiteral(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprNoneLiteral) -> Self { + Expr::NoneLiteral(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprEllipsisLiteral) -> Self { + Expr::EllipsisLiteral(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprAttribute) -> Self { + Expr::Attribute(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprSubscript) -> Self { + Expr::Subscript(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprStarred) -> Self { + Expr::Starred(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprName) -> Self { + Expr::Name(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprList) -> Self { + Expr::List(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprTuple) -> Self { + Expr::Tuple(payload).into() + } +} +impl From for ParenthesizedExpr { + fn from(payload: ExprSlice) -> Self { + Expr::Slice(payload).into() + } +} + +#[cfg(target_pointer_width = "64")] +mod size_assertions { + use crate::parser::ParenthesizedExpr; + use static_assertions::assert_eq_size; + + assert_eq_size!(ParenthesizedExpr, [u8; 88]); +} + #[cfg(test)] mod tests { use insta::assert_debug_snapshot; diff --git a/crates/ruff_python_parser/src/python.lalrpop b/crates/ruff_python_parser/src/python.lalrpop index 98ac90fc73..aa87fcd72d 100644 --- a/crates/ruff_python_parser/src/python.lalrpop +++ b/crates/ruff_python_parser/src/python.lalrpop @@ -11,7 +11,7 @@ use crate::{ lexer::{LexicalError, LexicalErrorType}, function::{ArgumentList, parse_arguments, validate_pos_params, validate_arguments}, context::set_context, - string::{StringType, concatenated_strings, parse_fstring_middle, parse_string_literal}, + string::{StringType, concatenated_strings, parse_fstring_literal_element, parse_string_literal}, token::{self, StringKind}, invalid, }; @@ -156,33 +156,33 @@ ExpressionStatement: ast::Stmt = { }, }; -AssignSuffix: ast::ParenthesizedExpr = { +AssignSuffix: crate::parser::ParenthesizedExpr = { "=" => e, "=" => e }; -TestListOrYieldExpr: ast::ParenthesizedExpr = { +TestListOrYieldExpr: crate::parser::ParenthesizedExpr = { TestList, YieldExpr } #[inline] -TestOrStarExprList: ast::ParenthesizedExpr = { +TestOrStarExprList: crate::parser::ParenthesizedExpr = { // as far as I can tell, these were the same TestList }; -TestOrStarExpr: ast::ParenthesizedExpr = { +TestOrStarExpr: crate::parser::ParenthesizedExpr = { Test<"all">, StarExpr, }; -NamedOrStarExpr: ast::ParenthesizedExpr = { +NamedOrStarExpr: crate::parser::ParenthesizedExpr = { NamedExpression, StarExpr, }; -TestOrStarNamedExpr: ast::ParenthesizedExpr = { +TestOrStarNamedExpr: crate::parser::ParenthesizedExpr = { NamedExpressionTest, StarExpr, }; @@ -345,7 +345,7 @@ IpyEscapeCommandStatement: ast::Stmt = { } } -IpyEscapeCommandExpr: ast::ParenthesizedExpr = { +IpyEscapeCommandExpr: crate::parser::ParenthesizedExpr = { =>? { if mode == Mode::Ipython { // This should never occur as the lexer won't allow it. @@ -630,13 +630,13 @@ StarPattern: ast::Pattern = { }.into(), } -NumberAtom: ast::ParenthesizedExpr = { +NumberAtom: crate::parser::ParenthesizedExpr = { => ast::Expr::NumberLiteral( ast::ExprNumberLiteral { value, range: (location..end_location).into() } ).into(), } -NumberExpr: ast::ParenthesizedExpr = { +NumberExpr: crate::parser::ParenthesizedExpr = { NumberAtom, "-" => ast::Expr::UnaryOp( ast::ExprUnaryOp { @@ -647,7 +647,7 @@ NumberExpr: ast::ParenthesizedExpr = { ).into(), } -AddOpExpr: ast::ParenthesizedExpr = { +AddOpExpr: crate::parser::ParenthesizedExpr = { => ast::ExprBinOp { left: Box::new(left.into()), op, @@ -1316,7 +1316,7 @@ Decorator: ast::Decorator = { }, }; -YieldExpr: ast::ParenthesizedExpr = { +YieldExpr: crate::parser::ParenthesizedExpr = { "yield" => ast::ExprYield { value: value.map(ast::Expr::from).map(Box::new), range: (location..end_location).into(), @@ -1327,7 +1327,7 @@ YieldExpr: ast::ParenthesizedExpr = { }.into(), }; -Test: ast::ParenthesizedExpr = { +Test: crate::parser::ParenthesizedExpr = { > "if" > "else" > => ast::ExprIfExp { test: Box::new(test.into()), body: Box::new(body.into()), @@ -1338,12 +1338,12 @@ Test: ast::ParenthesizedExpr = { LambdaDef, }; -NamedExpressionTest: ast::ParenthesizedExpr = { +NamedExpressionTest: crate::parser::ParenthesizedExpr = { NamedExpression, Test<"all">, } -NamedExpressionName: ast::ParenthesizedExpr = { +NamedExpressionName: crate::parser::ParenthesizedExpr = { => ast::ExprName { id: id.into(), ctx: ast::ExprContext::Store, @@ -1351,7 +1351,7 @@ NamedExpressionName: ast::ParenthesizedExpr = { }.into(), } -NamedExpression: ast::ParenthesizedExpr = { +NamedExpression: crate::parser::ParenthesizedExpr = { ":=" > => { ast::ExprNamedExpr { target: Box::new(target.into()), @@ -1361,7 +1361,7 @@ NamedExpression: ast::ParenthesizedExpr = { }, }; -LambdaDef: ast::ParenthesizedExpr = { +LambdaDef: crate::parser::ParenthesizedExpr = { "lambda" ?> ":" > =>? { if fstring_middle.is_some() { return Err(LexicalError { @@ -1379,7 +1379,7 @@ LambdaDef: ast::ParenthesizedExpr = { } } -OrTest: ast::ParenthesizedExpr = { +OrTest: crate::parser::ParenthesizedExpr = { > "or")+> > => { let values = values.into_iter().chain(std::iter::once(last)).map(ast::Expr::from).collect(); ast::ExprBoolOp { op: ast::BoolOp::Or, values, range: (location..end_location).into() }.into() @@ -1387,7 +1387,7 @@ OrTest: ast::ParenthesizedExpr = { AndTest, }; -AndTest: ast::ParenthesizedExpr = { +AndTest: crate::parser::ParenthesizedExpr = { > "and")+> > => { let values = values.into_iter().chain(std::iter::once(last)).map(ast::Expr::from).collect(); ast::ExprBoolOp { op: ast::BoolOp::And, values, range: (location..end_location).into() }.into() @@ -1395,7 +1395,7 @@ AndTest: ast::ParenthesizedExpr = { NotTest, }; -NotTest: ast::ParenthesizedExpr = { +NotTest: crate::parser::ParenthesizedExpr = { "not" > => ast::ExprUnaryOp { operand: Box::new(operand.into()), op: ast::UnaryOp::Not, @@ -1404,7 +1404,7 @@ NotTest: ast::ParenthesizedExpr = { Comparison, }; -Comparison: ast::ParenthesizedExpr = { +Comparison: crate::parser::ParenthesizedExpr = { > )+> => { let (ops, comparators) = comparisons.into_iter().map(|(op, comparator)| (op, ast::Expr::from(comparator))).unzip(); ast::ExprCompare { left: Box::new(left.into()), ops, comparators, range: (location..end_location).into() }.into() @@ -1425,7 +1425,7 @@ CompOp: ast::CmpOp = { "is" "not" => ast::CmpOp::IsNot, }; -Expression: ast::ParenthesizedExpr = { +Expression: crate::parser::ParenthesizedExpr = { > "|" > => ast::ExprBinOp { left: Box::new(left.into()), op: ast::Operator::BitOr, @@ -1435,7 +1435,7 @@ Expression: ast::ParenthesizedExpr = { XorExpression, }; -XorExpression: ast::ParenthesizedExpr = { +XorExpression: crate::parser::ParenthesizedExpr = { > "^" > => ast::ExprBinOp { left: Box::new(left.into()), op: ast::Operator::BitXor, @@ -1445,7 +1445,7 @@ XorExpression: ast::ParenthesizedExpr = { AndExpression, }; -AndExpression: ast::ParenthesizedExpr = { +AndExpression: crate::parser::ParenthesizedExpr = { > "&" > => ast::ExprBinOp { left: Box::new(left.into()), op: ast::Operator::BitAnd, @@ -1455,7 +1455,7 @@ AndExpression: ast::ParenthesizedExpr = { ShiftExpression, }; -ShiftExpression: ast::ParenthesizedExpr = { +ShiftExpression: crate::parser::ParenthesizedExpr = { > > => ast::ExprBinOp { left: Box::new(left.into()), op, @@ -1470,7 +1470,7 @@ ShiftOp: ast::Operator = { ">>" => ast::Operator::RShift, }; -ArithmeticExpression: ast::ParenthesizedExpr = { +ArithmeticExpression: crate::parser::ParenthesizedExpr = { > > => ast::ExprBinOp { left: Box::new(left.into()), op, @@ -1485,7 +1485,7 @@ AddOp: ast::Operator = { "-" => ast::Operator::Sub, }; -Term: ast::ParenthesizedExpr = { +Term: crate::parser::ParenthesizedExpr = { > > => ast::ExprBinOp { left: Box::new(left.into()), op, @@ -1503,7 +1503,7 @@ MulOp: ast::Operator = { "@" => ast::Operator::MatMult, }; -Factor: ast::ParenthesizedExpr = { +Factor: crate::parser::ParenthesizedExpr = { > => ast::ExprUnaryOp { operand: Box::new(operand.into()), op, @@ -1518,7 +1518,7 @@ UnaryOp: ast::UnaryOp = { "~" => ast::UnaryOp::Invert, }; -Power: ast::ParenthesizedExpr = { +Power: crate::parser::ParenthesizedExpr = { > "**" > => ast::ExprBinOp { left: Box::new(left.into()), op: ast::Operator::Pow, @@ -1528,14 +1528,14 @@ Power: ast::ParenthesizedExpr = { AtomExpr, }; -AtomExpr: ast::ParenthesizedExpr = { +AtomExpr: crate::parser::ParenthesizedExpr = { "await" > => { ast::ExprAwait { value: Box::new(value.into()), range: (location..end_location).into() }.into() }, AtomExpr2, } -AtomExpr2: ast::ParenthesizedExpr = { +AtomExpr2: crate::parser::ParenthesizedExpr = { Atom, > => ast::ExprCall { func: Box::new(func.into()), @@ -1556,7 +1556,7 @@ AtomExpr2: ast::ParenthesizedExpr = { }.into(), }; -SubscriptList: ast::ParenthesizedExpr = { +SubscriptList: crate::parser::ParenthesizedExpr = { Subscript, "," => { ast::ExprTuple { @@ -1575,7 +1575,7 @@ SubscriptList: ast::ParenthesizedExpr = { } }; -Subscript: ast::ParenthesizedExpr = { +Subscript: crate::parser::ParenthesizedExpr = { TestOrStarNamedExpr, ?> ":" ?> => { let lower = lower.map(ast::Expr::from).map(Box::new); @@ -1587,7 +1587,7 @@ Subscript: ast::ParenthesizedExpr = { } }; -SliceOp: Option = { +SliceOp: Option = { ":" ?> => e, } @@ -1611,23 +1611,23 @@ StringLiteral: StringType = { }; FStringExpr: StringType = { - FStringStart FStringEnd => { + FStringStart FStringEnd => { StringType::FString(ast::FString { - values, + elements, range: (location..end_location).into() }) } }; -FStringMiddlePattern: ast::Expr = { +FStringMiddlePattern: ast::FStringElement = { FStringReplacementField, =>? { let (source, is_raw) = fstring_middle; - Ok(parse_fstring_middle(&source, is_raw, (location..end_location).into())?) + Ok(parse_fstring_literal_element(&source, is_raw, (location..end_location).into())?) } }; -FStringReplacementField: ast::Expr = { +FStringReplacementField: ast::FStringElement = { "{" "}" =>? { if value.expr.is_lambda_expr() && !value.is_parenthesized() { return Err(LexicalError { @@ -1651,30 +1651,27 @@ FStringReplacementField: ast::Expr = { } }); Ok( - ast::ExprFormattedValue { - value: Box::new(value.into()), + ast::FStringElement::Expression(ast::FStringExpressionElement { + expression: Box::new(value.into()), debug_text, conversion: conversion.map_or(ast::ConversionFlag::None, |(_, conversion_flag)| { conversion_flag }), format_spec: format_spec.map(Box::new), range: (location..end_location).into(), - } - .into() + }) ) } }; -FStringFormatSpecSuffix: ast::Expr = { +FStringFormatSpecSuffix: ast::FStringFormatSpec = { ":" => format_spec }; -FStringFormatSpec: ast::Expr = { - => { - ast::FString { - values, - range: (location..end_location).into() - }.into() +FStringFormatSpec: ast::FStringFormatSpec = { + => ast::FStringFormatSpec { + elements, + range: (location..end_location).into(), }, }; @@ -1693,7 +1690,7 @@ FStringConversion: (TextSize, ast::ConversionFlag) = { } }; -Atom: ast::ParenthesizedExpr = { +Atom: crate::parser::ParenthesizedExpr = { => expr.into(), => ast::ExprNumberLiteral { value, @@ -1713,7 +1710,7 @@ Atom: ast::ParenthesizedExpr = { }, "(" >> ")" if Goal != "no-withitems" => { if elts.len() == 1 && trailing_comma.is_none() { - ast::ParenthesizedExpr { + crate::parser::ParenthesizedExpr { expr: elts.into_iter().next().unwrap().into(), range: (location..end_location).into(), } @@ -1730,7 +1727,7 @@ Atom: ast::ParenthesizedExpr = { location: mid.start(), })?; } - Ok(ast::ParenthesizedExpr { + Ok(crate::parser::ParenthesizedExpr { expr: mid.into(), range: (location..end_location).into(), }) @@ -1744,7 +1741,7 @@ Atom: ast::ParenthesizedExpr = { ctx: ast::ExprContext::Load, range: (location..end_location).into(), }.into(), - "(" ")" => ast::ParenthesizedExpr { + "(" ")" => crate::parser::ParenthesizedExpr { expr: e.into(), range: (location..end_location).into(), }, @@ -1793,37 +1790,37 @@ Atom: ast::ParenthesizedExpr = { "..." => ast::ExprEllipsisLiteral { range: (location..end_location).into() }.into(), }; -ListLiteralValues: Vec = { +ListLiteralValues: Vec = { > ","? => e, }; -DictLiteralValues: Vec<(Option>, ast::ParenthesizedExpr)> = { +DictLiteralValues: Vec<(Option>, crate::parser::ParenthesizedExpr)> = { > ","? => elements, }; -DictEntry: (ast::ParenthesizedExpr, ast::ParenthesizedExpr) = { +DictEntry: (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr) = { > ":" > => (e1, e2), }; -DictElement: (Option>, ast::ParenthesizedExpr) = { +DictElement: (Option>, crate::parser::ParenthesizedExpr) = { => (Some(Box::new(e.0)), e.1), "**" > => (None, e), }; -SetLiteralValues: Vec = { +SetLiteralValues: Vec = { > ","? => e1 }; -ExpressionOrStarExpression: ast::ParenthesizedExpr = { +ExpressionOrStarExpression: crate::parser::ParenthesizedExpr = { Expression<"all">, StarExpr }; -ExpressionList: ast::ParenthesizedExpr = { +ExpressionList: crate::parser::ParenthesizedExpr = { GenericList }; -ExpressionList2: Vec = { +ExpressionList2: Vec = { > ","? => elements, }; @@ -1832,14 +1829,14 @@ ExpressionList2: Vec = { // - a single expression // - a single expression followed by a trailing comma #[inline] -TestList: ast::ParenthesizedExpr = { +TestList: crate::parser::ParenthesizedExpr = { GenericList }; -GenericList: ast::ParenthesizedExpr = { +GenericList: crate::parser::ParenthesizedExpr = { > => { if elts.len() == 1 && trailing_comma.is_none() { - ast::ParenthesizedExpr { + crate::parser::ParenthesizedExpr { expr: elts.into_iter().next().unwrap().into(), range: (location..end_location).into(), } @@ -1851,7 +1848,7 @@ GenericList: ast::ParenthesizedExpr = { } // Test -StarExpr: ast::ParenthesizedExpr = { +StarExpr: crate::parser::ParenthesizedExpr = { "*" > => ast::ExprStarred { value: Box::new(value.into()), ctx: ast::ExprContext::Load, @@ -1876,8 +1873,8 @@ SingleForComprehension: ast::Comprehension = { } }; -ExpressionNoCond: ast::ParenthesizedExpr = OrTest<"all">; -ComprehensionIf: ast::ParenthesizedExpr = "if" => c; +ExpressionNoCond: crate::parser::ParenthesizedExpr = OrTest<"all">; +ComprehensionIf: crate::parser::ParenthesizedExpr = "if" => c; Arguments: ast::Arguments = { "(" > ")" =>? { diff --git a/crates/ruff_python_parser/src/python.rs b/crates/ruff_python_parser/src/python.rs index 3b2174bc69..5771d7099f 100644 --- a/crates/ruff_python_parser/src/python.rs +++ b/crates/ruff_python_parser/src/python.rs @@ -1,5 +1,5 @@ // auto-generated: "lalrpop 0.20.0" -// sha3: c7c0b9368fa05f7d2fc1d06a665ff4232555f276a1d9569afdbc86d0905b3a2a +// sha3: 031689e389556292d9dbd8a1b1ff8ca29bac76d83f1b345630481d620b89e1c2 use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use ruff_python_ast::{self as ast, Int, IpyEscapeKind}; use crate::{ @@ -8,7 +8,7 @@ use crate::{ lexer::{LexicalError, LexicalErrorType}, function::{ArgumentList, parse_arguments, validate_pos_params, validate_arguments}, context::set_context, - string::{StringType, concatenated_strings, parse_fstring_middle, parse_string_literal}, + string::{StringType, concatenated_strings, parse_fstring_literal_element, parse_string_literal}, token::{self, StringKind}, invalid, }; @@ -32,7 +32,7 @@ mod __parse__Top { lexer::{LexicalError, LexicalErrorType}, function::{ArgumentList, parse_arguments, validate_pos_params, validate_arguments}, context::set_context, - string::{StringType, concatenated_strings, parse_fstring_middle, parse_string_literal}, + string::{StringType, concatenated_strings, parse_fstring_literal_element, parse_string_literal}, token::{self, StringKind}, invalid, }; @@ -62,9 +62,9 @@ mod __parse__Top { Variant12(alloc::vec::Vec), Variant13((Option>, Vec, Option>)), Variant14(core::option::Option<(Option>, Vec, Option>)>), - Variant15(ast::ParenthesizedExpr), - Variant16(core::option::Option), - Variant17(alloc::vec::Vec), + Variant15(crate::parser::ParenthesizedExpr), + Variant16(core::option::Option), + Variant17(alloc::vec::Vec), Variant18(ast::WithItem), Variant19(alloc::vec::Vec), Variant20((token::Tok, ast::Identifier)), @@ -74,23 +74,23 @@ mod __parse__Top { Variant24(core::option::Option), Variant25(ast::Suite), Variant26(core::option::Option), - Variant27((TextSize, ast::ParenthesizedExpr, ast::Suite)), - Variant28(alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>), + Variant27((TextSize, crate::parser::ParenthesizedExpr, ast::Suite)), + Variant28(alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>), Variant29((TextSize, ast::Suite)), Variant30(core::option::Option<(TextSize, ast::Suite)>), Variant31((Option<(TextSize, TextSize, Option)>, ast::Expr)), Variant32(alloc::vec::Vec<(Option<(TextSize, TextSize, Option)>, ast::Expr)>), - Variant33(Vec), - Variant34(core::option::Option>), + Variant33(Vec), + Variant34(core::option::Option>), Variant35(ast::Pattern), Variant36(alloc::vec::Vec), Variant37(ast::Stmt), Variant38(alloc::vec::Vec), - Variant39((ast::ParenthesizedExpr, ast::Identifier)), + Variant39((crate::parser::ParenthesizedExpr, ast::Identifier)), Variant40(Vec), Variant41(core::option::Option>), - Variant42((ast::CmpOp, ast::ParenthesizedExpr)), - Variant43(alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>), + Variant42((ast::CmpOp, crate::parser::ParenthesizedExpr)), + Variant43(alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>), Variant44(ast::Expr), Variant45(core::option::Option), Variant46(ast::Parameters), @@ -106,10 +106,10 @@ mod __parse__Top { Variant56(ast::CmpOp), Variant57(ast::Decorator), Variant58(alloc::vec::Vec), - Variant59((Option>, ast::ParenthesizedExpr)), - Variant60((ast::ParenthesizedExpr, ast::ParenthesizedExpr)), - Variant61(Vec<(Option>, ast::ParenthesizedExpr)>), - Variant62(core::option::Option>, ast::ParenthesizedExpr)>>), + Variant59((Option>, crate::parser::ParenthesizedExpr)), + Variant60((crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr)), + Variant61(Vec<(Option>, crate::parser::ParenthesizedExpr)>), + Variant62(core::option::Option>, crate::parser::ParenthesizedExpr)>>), Variant63(ast::Parameter), Variant64(core::option::Option), Variant65(ast::ExceptHandler), @@ -117,38 +117,41 @@ mod __parse__Top { Variant67((TextSize, ast::ConversionFlag)), Variant68(core::option::Option<(TextSize, ast::ConversionFlag)>), Variant69(StringType), - Variant70(alloc::vec::Vec), - Variant71(core::option::Option<(Option<(TextSize, TextSize, Option)>, ast::Expr)>), - Variant72(ast::Alias), - Variant73(Vec), - Variant74(u32), - Variant75(alloc::vec::Vec), - Variant76((Option, Option)), - Variant77(ast::MatchCase), - Variant78(alloc::vec::Vec), - Variant79(ast::PatternKeyword), - Variant80((ast::Expr, ast::Pattern)), - Variant81(ast::Number), - Variant82(Vec), - Variant83(Vec), - Variant84(Vec<(ast::Expr, ast::Pattern)>), - Variant85(Vec), - Variant86(Vec), - Variant87((Vec, Vec)), - Variant88(core::option::Option), - Variant89(ast::PatternArguments), - Variant90(ast::Comprehension), - Variant91(alloc::vec::Vec), - Variant92(Option), - Variant93(core::option::Option>), - Variant94(Vec), - Variant95(ast::Mod), - Variant96(Vec), - Variant97(ast::TypeParam), - Variant98(ast::TypeParams), - Variant99(core::option::Option), - Variant100(ast::UnaryOp), - Variant101(core::option::Option<(String, bool)>), + Variant70(ast::FStringFormatSpec), + Variant71(core::option::Option), + Variant72(ast::FStringElement), + Variant73(alloc::vec::Vec), + Variant74(core::option::Option<(Option<(TextSize, TextSize, Option)>, ast::Expr)>), + Variant75(ast::Alias), + Variant76(Vec), + Variant77(u32), + Variant78(alloc::vec::Vec), + Variant79((Option, Option)), + Variant80(ast::MatchCase), + Variant81(alloc::vec::Vec), + Variant82(ast::PatternKeyword), + Variant83((ast::Expr, ast::Pattern)), + Variant84(ast::Number), + Variant85(Vec), + Variant86(Vec), + Variant87(Vec<(ast::Expr, ast::Pattern)>), + Variant88(Vec), + Variant89(Vec), + Variant90((Vec, Vec)), + Variant91(core::option::Option), + Variant92(ast::PatternArguments), + Variant93(ast::Comprehension), + Variant94(alloc::vec::Vec), + Variant95(Option), + Variant96(core::option::Option>), + Variant97(Vec), + Variant98(ast::Mod), + Variant99(Vec), + Variant100(ast::TypeParam), + Variant101(ast::TypeParams), + Variant102(core::option::Option), + Variant103(ast::UnaryOp), + Variant104(core::option::Option<(String, bool)>), } const __ACTION: &[i16] = &[ // State 0 @@ -13866,7 +13869,7 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (1, 144) } 374 => { @@ -13885,7 +13888,7 @@ mod __parse__Top { // FStringReplacementField = "{", TestListOrYieldExpr, "=", FStringConversion, FStringFormatSpecSuffix, "}" => ActionFn(1581); assert!(__symbols.len() >= 6); let __sym5 = __pop_Variant0(__symbols); - let __sym4 = __pop_Variant44(__symbols); + let __sym4 = __pop_Variant70(__symbols); let __sym3 = __pop_Variant67(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant15(__symbols); @@ -13896,7 +13899,7 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (6, 147) } 379 => { @@ -13913,14 +13916,14 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (5, 147) } 380 => { // FStringReplacementField = "{", TestListOrYieldExpr, "=", FStringFormatSpecSuffix, "}" => ActionFn(1583); assert!(__symbols.len() >= 5); let __sym4 = __pop_Variant0(__symbols); - let __sym3 = __pop_Variant44(__symbols); + let __sym3 = __pop_Variant70(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant15(__symbols); let __sym0 = __pop_Variant0(__symbols); @@ -13930,7 +13933,7 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (5, 147) } 381 => { @@ -13946,14 +13949,14 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (4, 147) } 382 => { // FStringReplacementField = "{", TestListOrYieldExpr, FStringConversion, FStringFormatSpecSuffix, "}" => ActionFn(1585); assert!(__symbols.len() >= 5); let __sym4 = __pop_Variant0(__symbols); - let __sym3 = __pop_Variant44(__symbols); + let __sym3 = __pop_Variant70(__symbols); let __sym2 = __pop_Variant67(__symbols); let __sym1 = __pop_Variant15(__symbols); let __sym0 = __pop_Variant0(__symbols); @@ -13963,7 +13966,7 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (5, 147) } 383 => { @@ -13979,14 +13982,14 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (4, 147) } 384 => { // FStringReplacementField = "{", TestListOrYieldExpr, FStringFormatSpecSuffix, "}" => ActionFn(1587); assert!(__symbols.len() >= 4); let __sym3 = __pop_Variant0(__symbols); - let __sym2 = __pop_Variant44(__symbols); + let __sym2 = __pop_Variant70(__symbols); let __sym1 = __pop_Variant15(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -13995,7 +13998,7 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (4, 147) } 385 => { @@ -14010,7 +14013,7 @@ mod __parse__Top { Ok(v) => v, Err(e) => return Some(Err(e)), }; - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (3, 147) } 386 => { @@ -14360,7 +14363,7 @@ mod __parse__Top { } 474 => { // LiteralPattern = TwoOrMore => ActionFn(1354); - let __sym0 = __pop_Variant96(__symbols); + let __sym0 = __pop_Variant99(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = match super::__action1354::<>(source_code, mode, __sym0) { @@ -14670,7 +14673,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1607::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -14691,7 +14694,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1608::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -14713,7 +14716,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1609::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -14731,7 +14734,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1610::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -14751,7 +14754,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1611::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -14772,7 +14775,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1612::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -14792,7 +14795,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1613::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -14814,7 +14817,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1614::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -14837,7 +14840,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym10.2; let __nt = match super::__action1615::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9, __sym10) { @@ -14856,7 +14859,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1616::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -14877,7 +14880,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1617::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -14899,7 +14902,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1618::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -14916,7 +14919,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1619::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -14935,7 +14938,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1620::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -14955,7 +14958,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1621::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -14971,7 +14974,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1622::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -14989,7 +14992,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1623::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15008,7 +15011,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1624::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15026,7 +15029,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1625::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15046,7 +15049,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1626::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -15067,7 +15070,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1627::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -15084,7 +15087,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1628::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -15103,7 +15106,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1629::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15123,7 +15126,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1630::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -15137,7 +15140,7 @@ mod __parse__Top { // ParameterList = OneOrMore>, "," => ActionFn(1631); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = match super::__action1631::<>(source_code, mode, __sym0, __sym1) { @@ -15153,7 +15156,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1632::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -15170,7 +15173,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1633::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -15188,7 +15191,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1634::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15208,7 +15211,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1635::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -15229,7 +15232,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1636::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -15246,7 +15249,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1637::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -15265,7 +15268,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1638::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15285,7 +15288,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1639::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -15304,7 +15307,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1640::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15325,7 +15328,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1641::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -15347,7 +15350,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1642::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -15365,7 +15368,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1643::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15385,7 +15388,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1644::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -15406,7 +15409,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1645::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -15422,7 +15425,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1646::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -15440,7 +15443,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1647::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15459,7 +15462,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1648::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15474,7 +15477,7 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = match super::__action1649::<>(source_code, mode, __sym0, __sym1, __sym2) { @@ -15491,7 +15494,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1650::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -15509,7 +15512,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1651::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15526,7 +15529,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1652::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -15545,7 +15548,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1653::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15565,7 +15568,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1654::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -15581,7 +15584,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1655::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -15599,7 +15602,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1656::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15618,7 +15621,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1657::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15630,7 +15633,7 @@ mod __parse__Top { } 623 => { // ParameterList = OneOrMore> => ActionFn(1658); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = match super::__action1658::<>(source_code, mode, __sym0) { @@ -15645,7 +15648,7 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = match super::__action1659::<>(source_code, mode, __sym0, __sym1, __sym2) { @@ -15661,7 +15664,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1660::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -15677,7 +15680,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant9(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1661::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -15695,7 +15698,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1662::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -15714,7 +15717,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1663::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -15729,7 +15732,7 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant9(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = match super::__action1664::<>(source_code, mode, __sym0, __sym1, __sym2) { @@ -15746,7 +15749,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1665::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -15764,7 +15767,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1666::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16036,7 +16039,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1667::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16057,7 +16060,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1668::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -16079,7 +16082,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1669::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -16097,7 +16100,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1670::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16117,7 +16120,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1671::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16138,7 +16141,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1672::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -16158,7 +16161,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1673::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16180,7 +16183,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1674::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -16203,7 +16206,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym10.2; let __nt = match super::__action1675::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9, __sym10) { @@ -16222,7 +16225,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1676::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16243,7 +16246,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1677::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -16265,7 +16268,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1678::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -16282,7 +16285,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1679::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -16301,7 +16304,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1680::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16321,7 +16324,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1681::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16337,7 +16340,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1682::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -16355,7 +16358,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1683::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16374,7 +16377,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1684::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16392,7 +16395,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1685::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16412,7 +16415,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1686::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16433,7 +16436,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1687::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -16450,7 +16453,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1688::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -16469,7 +16472,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1689::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16489,7 +16492,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1690::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16503,7 +16506,7 @@ mod __parse__Top { // ParameterList = OneOrMore>, "," => ActionFn(1691); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = match super::__action1691::<>(source_code, mode, __sym0, __sym1) { @@ -16519,7 +16522,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1692::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -16536,7 +16539,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1693::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -16554,7 +16557,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1694::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16574,7 +16577,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1695::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16595,7 +16598,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1696::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -16612,7 +16615,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1697::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -16631,7 +16634,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1698::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16651,7 +16654,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1699::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16670,7 +16673,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1700::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16691,7 +16694,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1701::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -16713,7 +16716,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym9.2; let __nt = match super::__action1702::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8, __sym9) { @@ -16731,7 +16734,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1703::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16751,7 +16754,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1704::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16772,7 +16775,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym8.2; let __nt = match super::__action1705::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7, __sym8) { @@ -16788,7 +16791,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1706::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -16806,7 +16809,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1707::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16825,7 +16828,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1708::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16840,7 +16843,7 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = match super::__action1709::<>(source_code, mode, __sym0, __sym1, __sym2) { @@ -16857,7 +16860,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1710::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -16875,7 +16878,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1711::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16892,7 +16895,7 @@ mod __parse__Top { let __sym3 = __pop_Variant63(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1712::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -16911,7 +16914,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1713::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16931,7 +16934,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym7.2; let __nt = match super::__action1714::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6, __sym7) { @@ -16947,7 +16950,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1715::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -16965,7 +16968,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1716::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -16984,7 +16987,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1717::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -16996,7 +16999,7 @@ mod __parse__Top { } 701 => { // ParameterList = OneOrMore> => ActionFn(1718); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = match super::__action1718::<>(source_code, mode, __sym0) { @@ -17011,7 +17014,7 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = match super::__action1719::<>(source_code, mode, __sym0, __sym1, __sym2) { @@ -17027,7 +17030,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1720::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -17043,7 +17046,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant9(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = match super::__action1721::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3) { @@ -17061,7 +17064,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1722::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -17080,7 +17083,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym6.2; let __nt = match super::__action1723::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5, __sym6) { @@ -17095,7 +17098,7 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant9(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = match super::__action1724::<>(source_code, mode, __sym0, __sym1, __sym2) { @@ -17112,7 +17115,7 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = match super::__action1725::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4) { @@ -17130,7 +17133,7 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = match super::__action1726::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5) { @@ -17932,7 +17935,7 @@ mod __parse__Top { } 836 => { // String = TwoOrMore => ActionFn(1493); - let __sym0 = __pop_Variant96(__symbols); + let __sym0 = __pop_Variant99(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = match super::__action1493::<>(source_code, mode, __sym0) { @@ -18295,7 +18298,7 @@ mod __parse__Top { } 951 => { // __Top = Top => ActionFn(0); - let __sym0 = __pop_Variant95(__symbols); + let __sym0 = __pop_Variant98(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action0::<>(source_code, mode, __sym0); @@ -18353,20 +18356,20 @@ mod __parse__Top { fn __pop_Variant59< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, (Option>, ast::ParenthesizedExpr), TextSize) + ) -> (TextSize, (Option>, crate::parser::ParenthesizedExpr), TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant59(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant76< + fn __pop_Variant79< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, (Option, Option), TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant76(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant79(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18400,16 +18403,6 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant27< - >( - __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, (TextSize, ast::ParenthesizedExpr, ast::Suite), TextSize) - { - match __symbols.pop() { - Some((__l, __Symbol::Variant27(__v), __r)) => (__l, __v, __r), - _ => __symbol_type_mismatch() - } - } fn __pop_Variant29< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -18420,40 +18413,50 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant87< + fn __pop_Variant27< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, (TextSize, crate::parser::ParenthesizedExpr, ast::Suite), TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant27(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant90< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, (Vec, Vec), TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant87(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant90(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } fn __pop_Variant42< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, (ast::CmpOp, ast::ParenthesizedExpr), TextSize) + ) -> (TextSize, (ast::CmpOp, crate::parser::ParenthesizedExpr), TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant42(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant80< + fn __pop_Variant83< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, (ast::Expr, ast::Pattern), TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant80(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant83(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } fn __pop_Variant39< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, (ast::ParenthesizedExpr, ast::Identifier), TextSize) + ) -> (TextSize, (crate::parser::ParenthesizedExpr, ast::Identifier), TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant39(__v), __r)) => (__l, __v, __r), @@ -18463,7 +18466,7 @@ mod __parse__Top { fn __pop_Variant60< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize) + ) -> (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant60(__v), __r)) => (__l, __v, __r), @@ -18510,13 +18513,13 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant92< + fn __pop_Variant95< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, Option, TextSize) + ) -> (TextSize, Option, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant92(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant95(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18563,40 +18566,40 @@ mod __parse__Top { fn __pop_Variant61< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize) + ) -> (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant61(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant84< + fn __pop_Variant87< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, Vec<(ast::Expr, ast::Pattern)>, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant84(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant87(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant96< + fn __pop_Variant99< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant96(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant99(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant73< + fn __pop_Variant76< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant73(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant76(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18610,33 +18613,23 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant82< - >( - __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, Vec, TextSize) - { - match __symbols.pop() { - Some((__l, __Symbol::Variant82(__v), __r)) => (__l, __v, __r), - _ => __symbol_type_mismatch() - } - } fn __pop_Variant85< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, Vec, TextSize) + ) -> (TextSize, Vec, TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant85(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant33< + fn __pop_Variant88< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, Vec, TextSize) + ) -> (TextSize, Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant33(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant88(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18650,33 +18643,33 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant83< + fn __pop_Variant86< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant83(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant86(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant94< + fn __pop_Variant97< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant94(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant97(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant86< + fn __pop_Variant89< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant86(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant89(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18690,6 +18683,16 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } + fn __pop_Variant33< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, Vec, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant33(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } fn __pop_Variant32< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -18703,7 +18706,7 @@ mod __parse__Top { fn __pop_Variant28< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize) + ) -> (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant28(__v), __r)) => (__l, __v, __r), @@ -18713,7 +18716,7 @@ mod __parse__Top { fn __pop_Variant43< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize) + ) -> (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant43(__v), __r)) => (__l, __v, __r), @@ -18730,13 +18733,13 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant91< + fn __pop_Variant94< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, alloc::vec::Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant91(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant94(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18760,23 +18763,23 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant70< + fn __pop_Variant73< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, alloc::vec::Vec, TextSize) + ) -> (TextSize, alloc::vec::Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant70(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant73(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant78< + fn __pop_Variant81< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, alloc::vec::Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant78(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant81(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18790,16 +18793,6 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant17< - >( - __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, alloc::vec::Vec, TextSize) - { - match __symbols.pop() { - Some((__l, __Symbol::Variant17(__v), __r)) => (__l, __v, __r), - _ => __symbol_type_mismatch() - } - } fn __pop_Variant36< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -18830,6 +18823,16 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } + fn __pop_Variant17< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, alloc::vec::Vec, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant17(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } fn __pop_Variant22< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -18840,23 +18843,23 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant75< + fn __pop_Variant78< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, alloc::vec::Vec, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant75(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant78(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant72< + fn __pop_Variant75< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::Alias, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant72(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant75(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18880,13 +18883,13 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant90< + fn __pop_Variant93< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::Comprehension, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant90(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant93(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -18920,6 +18923,26 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } + fn __pop_Variant72< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, ast::FStringElement, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant72(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant70< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, ast::FStringFormatSpec, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant70(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } fn __pop_Variant23< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -18930,33 +18953,33 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant77< + fn __pop_Variant80< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::MatchCase, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant77(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant80(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant95< + fn __pop_Variant98< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::Mod, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant95(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant98(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant81< + fn __pop_Variant84< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::Number, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant81(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant84(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -19000,16 +19023,6 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant15< - >( - __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, ast::ParenthesizedExpr, TextSize) - { - match __symbols.pop() { - Some((__l, __Symbol::Variant15(__v), __r)) => (__l, __v, __r), - _ => __symbol_type_mismatch() - } - } fn __pop_Variant35< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -19020,23 +19033,23 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant89< + fn __pop_Variant92< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::PatternArguments, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant89(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant92(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant79< + fn __pop_Variant82< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::PatternKeyword, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant79(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant82(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -19060,33 +19073,33 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant97< + fn __pop_Variant100< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::TypeParam, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant97(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant100(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant98< + fn __pop_Variant101< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::TypeParams, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant98(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant101(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } - fn __pop_Variant100< + fn __pop_Variant103< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, ast::UnaryOp, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant100(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant103(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -19100,13 +19113,13 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant71< + fn __pop_Variant74< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, core::option::Option<(Option<(TextSize, TextSize, Option)>, ast::Expr)>, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant71(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant74(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -19120,13 +19133,13 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant101< + fn __pop_Variant104< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, core::option::Option<(String, bool)>, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant101(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant104(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -19160,20 +19173,20 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant93< + fn __pop_Variant96< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, core::option::Option>, TextSize) + ) -> (TextSize, core::option::Option>, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant93(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant96(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } fn __pop_Variant62< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, core::option::Option>, ast::ParenthesizedExpr)>>, TextSize) + ) -> (TextSize, core::option::Option>, crate::parser::ParenthesizedExpr)>>, TextSize) { match __symbols.pop() { Some((__l, __Symbol::Variant62(__v), __r)) => (__l, __v, __r), @@ -19190,16 +19203,6 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant34< - >( - __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, core::option::Option>, TextSize) - { - match __symbols.pop() { - Some((__l, __Symbol::Variant34(__v), __r)) => (__l, __v, __r), - _ => __symbol_type_mismatch() - } - } fn __pop_Variant41< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -19210,6 +19213,16 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } + fn __pop_Variant34< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, core::option::Option>, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant34(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } fn __pop_Variant51< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -19230,6 +19243,16 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } + fn __pop_Variant71< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, core::option::Option, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant71(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } fn __pop_Variant24< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -19260,23 +19283,13 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant16< - >( - __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> - ) -> (TextSize, core::option::Option, TextSize) - { - match __symbols.pop() { - Some((__l, __Symbol::Variant16(__v), __r)) => (__l, __v, __r), - _ => __symbol_type_mismatch() - } - } - fn __pop_Variant88< + fn __pop_Variant91< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, core::option::Option, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant88(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant91(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -19290,13 +19303,23 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant99< + fn __pop_Variant102< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, core::option::Option, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant99(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant102(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } + fn __pop_Variant16< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, core::option::Option, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant16(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -19310,6 +19333,16 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } + fn __pop_Variant15< + >( + __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> + ) -> (TextSize, crate::parser::ParenthesizedExpr, TextSize) + { + match __symbols.pop() { + Some((__l, __Symbol::Variant15(__v), __r)) => (__l, __v, __r), + _ => __symbol_type_mismatch() + } + } fn __pop_Variant2< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> @@ -19330,13 +19363,13 @@ mod __parse__Top { _ => __symbol_type_mismatch() } } - fn __pop_Variant74< + fn __pop_Variant77< >( __symbols: &mut alloc::vec::Vec<(TextSize,__Symbol<>,TextSize)> ) -> (TextSize, u32, TextSize) { match __symbols.pop() { - Some((__l, __Symbol::Variant74(__v), __r)) => (__l, __v, __r), + Some((__l, __Symbol::Variant77(__v), __r)) => (__l, __v, __r), _ => __symbol_type_mismatch() } } @@ -21983,7 +22016,7 @@ mod __parse__Top { ) -> (usize, usize) { // Atom<"all"> = Number => ActionFn(1239); - let __sym0 = __pop_Variant81(__symbols); + let __sym0 = __pop_Variant84(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1239::<>(source_code, mode, __sym0); @@ -22364,7 +22397,7 @@ mod __parse__Top { ) -> (usize, usize) { // Atom<"no-withitems"> = Number => ActionFn(1265); - let __sym0 = __pop_Variant81(__symbols); + let __sym0 = __pop_Variant84(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1265::<>(source_code, mode, __sym0); @@ -23155,7 +23188,7 @@ mod __parse__Top { let __sym5 = __pop_Variant25(__symbols); let __sym4 = __pop_Variant0(__symbols); let __sym3 = __pop_Variant50(__symbols); - let __sym2 = __pop_Variant98(__symbols); + let __sym2 = __pop_Variant101(__symbols); let __sym1 = __pop_Variant23(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -23200,7 +23233,7 @@ mod __parse__Top { let __sym6 = __pop_Variant25(__symbols); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant50(__symbols); - let __sym3 = __pop_Variant98(__symbols); + let __sym3 = __pop_Variant101(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant58(__symbols); @@ -23246,7 +23279,7 @@ mod __parse__Top { assert!(__symbols.len() >= 5); let __sym4 = __pop_Variant25(__symbols); let __sym3 = __pop_Variant0(__symbols); - let __sym2 = __pop_Variant98(__symbols); + let __sym2 = __pop_Variant101(__symbols); let __sym1 = __pop_Variant23(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -23289,7 +23322,7 @@ mod __parse__Top { assert!(__symbols.len() >= 6); let __sym5 = __pop_Variant25(__symbols); let __sym4 = __pop_Variant0(__symbols); - let __sym3 = __pop_Variant98(__symbols); + let __sym3 = __pop_Variant101(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant58(__symbols); @@ -23332,7 +23365,7 @@ mod __parse__Top { { // ClassPattern = MatchName, PatternArguments => ActionFn(1298); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant89(__symbols); + let __sym1 = __pop_Variant92(__symbols); let __sym0 = __pop_Variant44(__symbols); let __start = __sym0.0; let __end = __sym1.2; @@ -23351,7 +23384,7 @@ mod __parse__Top { { // ClassPattern = MatchNameOrAttr, PatternArguments => ActionFn(1299); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant89(__symbols); + let __sym1 = __pop_Variant92(__symbols); let __sym0 = __pop_Variant44(__symbols); let __start = __sym0.0; let __end = __sym1.2; @@ -23626,7 +23659,7 @@ mod __parse__Top { ) -> (usize, usize) { // CompFor = SingleForComprehension+ => ActionFn(237); - let __sym0 = __pop_Variant91(__symbols); + let __sym0 = __pop_Variant94(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action237::<>(source_code, mode, __sym0); @@ -24899,7 +24932,7 @@ mod __parse__Top { // FStringExpr = FStringStart, FStringMiddlePattern+, FStringEnd => ActionFn(1590); assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant70(__symbols); + let __sym1 = __pop_Variant73(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym2.2; @@ -24920,7 +24953,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action1591::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant70(__nt), __end)); (0, 141) } pub(crate) fn __reduce368< @@ -24933,11 +24966,11 @@ mod __parse__Top { ) -> (usize, usize) { // FStringFormatSpec = FStringMiddlePattern+ => ActionFn(1592); - let __sym0 = __pop_Variant70(__symbols); + let __sym0 = __pop_Variant73(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1592::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant70(__nt), __end)); (1, 141) } pub(crate) fn __reduce369< @@ -24951,12 +24984,12 @@ mod __parse__Top { { // FStringFormatSpecSuffix = ":", FStringFormatSpec => ActionFn(222); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant44(__symbols); + let __sym1 = __pop_Variant70(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action222::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant70(__nt), __end)); (2, 142) } pub(crate) fn __reduce370< @@ -24969,11 +25002,11 @@ mod __parse__Top { ) -> (usize, usize) { // FStringFormatSpecSuffix? = FStringFormatSpecSuffix => ActionFn(267); - let __sym0 = __pop_Variant44(__symbols); + let __sym0 = __pop_Variant70(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action267::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant45(__nt), __end)); + __symbols.push((__start, __Symbol::Variant71(__nt), __end)); (1, 143) } pub(crate) fn __reduce371< @@ -24989,7 +25022,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action268::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant45(__nt), __end)); + __symbols.push((__start, __Symbol::Variant71(__nt), __end)); (0, 143) } pub(crate) fn __reduce372< @@ -25002,11 +25035,11 @@ mod __parse__Top { ) -> (usize, usize) { // FStringMiddlePattern = FStringReplacementField => ActionFn(219); - let __sym0 = __pop_Variant44(__symbols); + let __sym0 = __pop_Variant72(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action219::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant44(__nt), __end)); + __symbols.push((__start, __Symbol::Variant72(__nt), __end)); (1, 144) } pub(crate) fn __reduce374< @@ -25022,7 +25055,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action273::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant70(__nt), __end)); + __symbols.push((__start, __Symbol::Variant73(__nt), __end)); (0, 145) } pub(crate) fn __reduce375< @@ -25035,11 +25068,11 @@ mod __parse__Top { ) -> (usize, usize) { // FStringMiddlePattern* = FStringMiddlePattern+ => ActionFn(274); - let __sym0 = __pop_Variant70(__symbols); + let __sym0 = __pop_Variant73(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action274::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant70(__nt), __end)); + __symbols.push((__start, __Symbol::Variant73(__nt), __end)); (1, 145) } pub(crate) fn __reduce376< @@ -25052,11 +25085,11 @@ mod __parse__Top { ) -> (usize, usize) { // FStringMiddlePattern+ = FStringMiddlePattern => ActionFn(456); - let __sym0 = __pop_Variant44(__symbols); + let __sym0 = __pop_Variant72(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action456::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant70(__nt), __end)); + __symbols.push((__start, __Symbol::Variant73(__nt), __end)); (1, 146) } pub(crate) fn __reduce377< @@ -25070,12 +25103,12 @@ mod __parse__Top { { // FStringMiddlePattern+ = FStringMiddlePattern+, FStringMiddlePattern => ActionFn(457); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant44(__symbols); - let __sym0 = __pop_Variant70(__symbols); + let __sym1 = __pop_Variant72(__symbols); + let __sym0 = __pop_Variant73(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action457::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant70(__nt), __end)); + __symbols.push((__start, __Symbol::Variant73(__nt), __end)); (2, 146) } pub(crate) fn __reduce386< @@ -25090,7 +25123,7 @@ mod __parse__Top { // Factor<"all"> = UnaryOp, Factor<"all"> => ActionFn(1318); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant15(__symbols); - let __sym0 = __pop_Variant100(__symbols); + let __sym0 = __pop_Variant103(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1318::<>(source_code, mode, __sym0, __sym1); @@ -25126,7 +25159,7 @@ mod __parse__Top { // Factor<"no-withitems"> = UnaryOp, Factor<"all"> => ActionFn(1319); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant15(__symbols); - let __sym0 = __pop_Variant100(__symbols); + let __sym0 = __pop_Variant103(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1319::<>(source_code, mode, __sym0, __sym1); @@ -25370,7 +25403,7 @@ mod __parse__Top { let __sym6 = __pop_Variant15(__symbols); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant46(__symbols); - let __sym3 = __pop_Variant98(__symbols); + let __sym3 = __pop_Variant101(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant0(__symbols); @@ -25421,7 +25454,7 @@ mod __parse__Top { let __sym7 = __pop_Variant15(__symbols); let __sym6 = __pop_Variant0(__symbols); let __sym5 = __pop_Variant46(__symbols); - let __sym4 = __pop_Variant98(__symbols); + let __sym4 = __pop_Variant101(__symbols); let __sym3 = __pop_Variant23(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); @@ -25472,7 +25505,7 @@ mod __parse__Top { let __sym6 = __pop_Variant25(__symbols); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant46(__symbols); - let __sym3 = __pop_Variant98(__symbols); + let __sym3 = __pop_Variant101(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant0(__symbols); @@ -25519,7 +25552,7 @@ mod __parse__Top { let __sym7 = __pop_Variant25(__symbols); let __sym6 = __pop_Variant0(__symbols); let __sym5 = __pop_Variant46(__symbols); - let __sym4 = __pop_Variant98(__symbols); + let __sym4 = __pop_Variant101(__symbols); let __sym3 = __pop_Variant23(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); @@ -25570,7 +25603,7 @@ mod __parse__Top { let __sym5 = __pop_Variant15(__symbols); let __sym4 = __pop_Variant0(__symbols); let __sym3 = __pop_Variant46(__symbols); - let __sym2 = __pop_Variant98(__symbols); + let __sym2 = __pop_Variant101(__symbols); let __sym1 = __pop_Variant23(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -25619,7 +25652,7 @@ mod __parse__Top { let __sym6 = __pop_Variant15(__symbols); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant46(__symbols); - let __sym3 = __pop_Variant98(__symbols); + let __sym3 = __pop_Variant101(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant58(__symbols); @@ -25668,7 +25701,7 @@ mod __parse__Top { let __sym5 = __pop_Variant25(__symbols); let __sym4 = __pop_Variant0(__symbols); let __sym3 = __pop_Variant46(__symbols); - let __sym2 = __pop_Variant98(__symbols); + let __sym2 = __pop_Variant101(__symbols); let __sym1 = __pop_Variant23(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -25713,7 +25746,7 @@ mod __parse__Top { let __sym6 = __pop_Variant25(__symbols); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant46(__symbols); - let __sym3 = __pop_Variant98(__symbols); + let __sym3 = __pop_Variant101(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant58(__symbols); @@ -25854,7 +25887,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action467::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant71(__nt), __end)); + __symbols.push((__start, __Symbol::Variant74(__nt), __end)); (1, 154) } pub(crate) fn __reduce422< @@ -25870,7 +25903,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action468::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant71(__nt), __end)); + __symbols.push((__start, __Symbol::Variant74(__nt), __end)); (0, 154) } pub(crate) fn __reduce423< @@ -25956,7 +25989,7 @@ mod __parse__Top { { // GlobalStatement = "global", OneOrMore => ActionFn(1332); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant82(__symbols); + let __sym1 = __pop_Variant85(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym1.2; @@ -26109,7 +26142,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1334::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant72(__nt), __end)); + __symbols.push((__start, __Symbol::Variant75(__nt), __end)); (3, 161) } pub(crate) fn __reduce435< @@ -26126,7 +26159,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1335::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant72(__nt), __end)); + __symbols.push((__start, __Symbol::Variant75(__nt), __end)); (1, 161) } pub(crate) fn __reduce436< @@ -26146,7 +26179,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1336::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant72(__nt), __end)); + __symbols.push((__start, __Symbol::Variant75(__nt), __end)); (3, 162) } pub(crate) fn __reduce437< @@ -26163,7 +26196,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1337::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant72(__nt), __end)); + __symbols.push((__start, __Symbol::Variant75(__nt), __end)); (1, 162) } pub(crate) fn __reduce438< @@ -26176,11 +26209,11 @@ mod __parse__Top { ) -> (usize, usize) { // ImportAsNames = OneOrMore> => ActionFn(1338); - let __sym0 = __pop_Variant73(__symbols); + let __sym0 = __pop_Variant76(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1338::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (1, 163) } pub(crate) fn __reduce439< @@ -26196,12 +26229,12 @@ mod __parse__Top { assert!(__symbols.len() >= 4); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant73(__symbols); + let __sym1 = __pop_Variant76(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1339::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (4, 163) } pub(crate) fn __reduce440< @@ -26216,12 +26249,12 @@ mod __parse__Top { // ImportAsNames = "(", OneOrMore>, ")" => ActionFn(1340); assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant73(__symbols); + let __sym1 = __pop_Variant76(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1340::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (3, 163) } pub(crate) fn __reduce441< @@ -26238,7 +26271,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1341::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (1, 163) } pub(crate) fn __reduce442< @@ -26255,7 +26288,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action64::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant74(__nt), __end)); + __symbols.push((__start, __Symbol::Variant77(__nt), __end)); (1, 164) } pub(crate) fn __reduce443< @@ -26272,7 +26305,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action65::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant74(__nt), __end)); + __symbols.push((__start, __Symbol::Variant77(__nt), __end)); (1, 164) } pub(crate) fn __reduce444< @@ -26288,7 +26321,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action391::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant75(__nt), __end)); + __symbols.push((__start, __Symbol::Variant78(__nt), __end)); (0, 165) } pub(crate) fn __reduce445< @@ -26301,11 +26334,11 @@ mod __parse__Top { ) -> (usize, usize) { // ImportDots* = ImportDots+ => ActionFn(392); - let __sym0 = __pop_Variant75(__symbols); + let __sym0 = __pop_Variant78(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action392::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant75(__nt), __end)); + __symbols.push((__start, __Symbol::Variant78(__nt), __end)); (1, 165) } pub(crate) fn __reduce446< @@ -26318,11 +26351,11 @@ mod __parse__Top { ) -> (usize, usize) { // ImportDots+ = ImportDots => ActionFn(389); - let __sym0 = __pop_Variant74(__symbols); + let __sym0 = __pop_Variant77(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action389::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant75(__nt), __end)); + __symbols.push((__start, __Symbol::Variant78(__nt), __end)); (1, 166) } pub(crate) fn __reduce447< @@ -26336,12 +26369,12 @@ mod __parse__Top { { // ImportDots+ = ImportDots+, ImportDots => ActionFn(390); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant74(__symbols); - let __sym0 = __pop_Variant75(__symbols); + let __sym1 = __pop_Variant77(__symbols); + let __sym0 = __pop_Variant78(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action390::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant75(__nt), __end)); + __symbols.push((__start, __Symbol::Variant78(__nt), __end)); (2, 166) } pub(crate) fn __reduce448< @@ -26358,7 +26391,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1601::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant76(__nt), __end)); + __symbols.push((__start, __Symbol::Variant79(__nt), __end)); (1, 167) } pub(crate) fn __reduce449< @@ -26373,11 +26406,11 @@ mod __parse__Top { // ImportFromLocation = ImportDots+, DottedName => ActionFn(1602); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant23(__symbols); - let __sym0 = __pop_Variant75(__symbols); + let __sym0 = __pop_Variant78(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1602::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant76(__nt), __end)); + __symbols.push((__start, __Symbol::Variant79(__nt), __end)); (2, 167) } pub(crate) fn __reduce450< @@ -26390,11 +26423,11 @@ mod __parse__Top { ) -> (usize, usize) { // ImportFromLocation = ImportDots+ => ActionFn(63); - let __sym0 = __pop_Variant75(__symbols); + let __sym0 = __pop_Variant78(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action63::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant76(__nt), __end)); + __symbols.push((__start, __Symbol::Variant79(__nt), __end)); (1, 167) } pub(crate) fn __reduce451< @@ -26408,7 +26441,7 @@ mod __parse__Top { { // ImportStatement = "import", OneOrMore> => ActionFn(1342); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant73(__symbols); + let __sym1 = __pop_Variant76(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym1.2; @@ -26427,9 +26460,9 @@ mod __parse__Top { { // ImportStatement = "from", ImportFromLocation, "import", ImportAsNames => ActionFn(1343); assert!(__symbols.len() >= 4); - let __sym3 = __pop_Variant73(__symbols); + let __sym3 = __pop_Variant76(__symbols); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant76(__symbols); + let __sym1 = __pop_Variant79(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym3.2; @@ -26831,7 +26864,7 @@ mod __parse__Top { assert!(__symbols.len() >= 4); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant84(__symbols); + let __sym1 = __pop_Variant87(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym3.2; @@ -26851,7 +26884,7 @@ mod __parse__Top { // MappingPattern = "{", OneOrMore, "}" => ActionFn(1360); assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant84(__symbols); + let __sym1 = __pop_Variant87(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym2.2; @@ -26918,7 +26951,7 @@ mod __parse__Top { let __sym4 = __pop_Variant23(__symbols); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant84(__symbols); + let __sym1 = __pop_Variant87(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym6.2; @@ -26941,7 +26974,7 @@ mod __parse__Top { let __sym4 = __pop_Variant23(__symbols); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant84(__symbols); + let __sym1 = __pop_Variant87(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym5.2; @@ -26968,7 +27001,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym4.2; let __nt = super::__action1223::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4); - __symbols.push((__start, __Symbol::Variant77(__nt), __end)); + __symbols.push((__start, __Symbol::Variant80(__nt), __end)); (5, 180) } pub(crate) fn __reduce490< @@ -26989,7 +27022,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1224::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant77(__nt), __end)); + __symbols.push((__start, __Symbol::Variant80(__nt), __end)); (4, 180) } pub(crate) fn __reduce491< @@ -27002,11 +27035,11 @@ mod __parse__Top { ) -> (usize, usize) { // MatchCase+ = MatchCase => ActionFn(369); - let __sym0 = __pop_Variant77(__symbols); + let __sym0 = __pop_Variant80(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action369::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant78(__nt), __end)); + __symbols.push((__start, __Symbol::Variant81(__nt), __end)); (1, 181) } pub(crate) fn __reduce492< @@ -27020,12 +27053,12 @@ mod __parse__Top { { // MatchCase+ = MatchCase+, MatchCase => ActionFn(370); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant77(__symbols); - let __sym0 = __pop_Variant78(__symbols); + let __sym1 = __pop_Variant80(__symbols); + let __sym0 = __pop_Variant81(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action370::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant78(__nt), __end)); + __symbols.push((__start, __Symbol::Variant81(__nt), __end)); (2, 181) } pub(crate) fn __reduce493< @@ -27045,7 +27078,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1365::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant79(__nt), __end)); + __symbols.push((__start, __Symbol::Variant82(__nt), __end)); (3, 182) } pub(crate) fn __reduce494< @@ -27065,7 +27098,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action134::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant80(__nt), __end)); + __symbols.push((__start, __Symbol::Variant83(__nt), __end)); (3, 183) } pub(crate) fn __reduce495< @@ -27137,7 +27170,7 @@ mod __parse__Top { // MatchStatement = "match", TestOrStarNamedExpr, ":", "\n", Indent, MatchCase+, Dedent => ActionFn(862); assert!(__symbols.len() >= 7); let __sym6 = __pop_Variant0(__symbols); - let __sym5 = __pop_Variant78(__symbols); + let __sym5 = __pop_Variant81(__symbols); let __sym4 = __pop_Variant0(__symbols); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); @@ -27161,7 +27194,7 @@ mod __parse__Top { // MatchStatement = "match", TestOrStarNamedExpr, ",", ":", "\n", Indent, MatchCase+, Dedent => ActionFn(1369); assert!(__symbols.len() >= 8); let __sym7 = __pop_Variant0(__symbols); - let __sym6 = __pop_Variant78(__symbols); + let __sym6 = __pop_Variant81(__symbols); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant0(__symbols); let __sym3 = __pop_Variant0(__symbols); @@ -27186,7 +27219,7 @@ mod __parse__Top { // MatchStatement = "match", TwoOrMoreSep, ",", ":", "\n", Indent, MatchCase+, Dedent => ActionFn(1370); assert!(__symbols.len() >= 8); let __sym7 = __pop_Variant0(__symbols); - let __sym6 = __pop_Variant78(__symbols); + let __sym6 = __pop_Variant81(__symbols); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant0(__symbols); let __sym3 = __pop_Variant0(__symbols); @@ -27211,7 +27244,7 @@ mod __parse__Top { // MatchStatement = "match", TwoOrMoreSep, ":", "\n", Indent, MatchCase+, Dedent => ActionFn(1371); assert!(__symbols.len() >= 7); let __sym6 = __pop_Variant0(__symbols); - let __sym5 = __pop_Variant78(__symbols); + let __sym5 = __pop_Variant81(__symbols); let __sym4 = __pop_Variant0(__symbols); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); @@ -27424,7 +27457,7 @@ mod __parse__Top { { // NonlocalStatement = "nonlocal", OneOrMore => ActionFn(1374); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant82(__symbols); + let __sym1 = __pop_Variant85(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym1.2; @@ -27518,7 +27551,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action246::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant81(__nt), __end)); + __symbols.push((__start, __Symbol::Variant84(__nt), __end)); (1, 195) } pub(crate) fn __reduce519< @@ -27535,7 +27568,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action247::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant81(__nt), __end)); + __symbols.push((__start, __Symbol::Variant84(__nt), __end)); (1, 195) } pub(crate) fn __reduce520< @@ -27552,7 +27585,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action248::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant81(__nt), __end)); + __symbols.push((__start, __Symbol::Variant84(__nt), __end)); (1, 195) } pub(crate) fn __reduce521< @@ -27565,7 +27598,7 @@ mod __parse__Top { ) -> (usize, usize) { // NumberAtom = Number => ActionFn(1377); - let __sym0 = __pop_Variant81(__symbols); + let __sym0 = __pop_Variant84(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1377::<>(source_code, mode, __sym0); @@ -27696,7 +27729,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action379::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant82(__nt), __end)); + __symbols.push((__start, __Symbol::Variant85(__nt), __end)); (1, 200) } pub(crate) fn __reduce529< @@ -27712,11 +27745,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant82(__symbols); + let __sym0 = __pop_Variant85(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action380::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant82(__nt), __end)); + __symbols.push((__start, __Symbol::Variant85(__nt), __end)); (3, 200) } pub(crate) fn __reduce530< @@ -27736,7 +27769,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1593::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (3, 201) } pub(crate) fn __reduce531< @@ -27753,7 +27786,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1594::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (1, 201) } pub(crate) fn __reduce532< @@ -27771,11 +27804,11 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant73(__symbols); + let __sym0 = __pop_Variant76(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = super::__action1595::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (5, 201) } pub(crate) fn __reduce533< @@ -27791,11 +27824,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant73(__symbols); + let __sym0 = __pop_Variant76(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1596::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (3, 201) } pub(crate) fn __reduce534< @@ -27815,7 +27848,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1597::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (3, 202) } pub(crate) fn __reduce535< @@ -27832,7 +27865,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1598::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (1, 202) } pub(crate) fn __reduce536< @@ -27850,11 +27883,11 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant73(__symbols); + let __sym0 = __pop_Variant76(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = super::__action1599::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (5, 202) } pub(crate) fn __reduce537< @@ -27870,11 +27903,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant23(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant73(__symbols); + let __sym0 = __pop_Variant76(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1600::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant73(__nt), __end)); + __symbols.push((__start, __Symbol::Variant76(__nt), __end)); (3, 202) } pub(crate) fn __reduce538< @@ -27887,11 +27920,11 @@ mod __parse__Top { ) -> (usize, usize) { // OneOrMore = MatchKeywordEntry => ActionFn(348); - let __sym0 = __pop_Variant79(__symbols); + let __sym0 = __pop_Variant82(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action348::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant83(__nt), __end)); + __symbols.push((__start, __Symbol::Variant86(__nt), __end)); (1, 203) } pub(crate) fn __reduce539< @@ -27905,13 +27938,13 @@ mod __parse__Top { { // OneOrMore = OneOrMore, ",", MatchKeywordEntry => ActionFn(349); assert!(__symbols.len() >= 3); - let __sym2 = __pop_Variant79(__symbols); + let __sym2 = __pop_Variant82(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant83(__symbols); + let __sym0 = __pop_Variant86(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action349::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant83(__nt), __end)); + __symbols.push((__start, __Symbol::Variant86(__nt), __end)); (3, 203) } pub(crate) fn __reduce540< @@ -27924,11 +27957,11 @@ mod __parse__Top { ) -> (usize, usize) { // OneOrMore = MatchMappingEntry => ActionFn(352); - let __sym0 = __pop_Variant80(__symbols); + let __sym0 = __pop_Variant83(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action352::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant84(__nt), __end)); + __symbols.push((__start, __Symbol::Variant87(__nt), __end)); (1, 204) } pub(crate) fn __reduce541< @@ -27942,13 +27975,13 @@ mod __parse__Top { { // OneOrMore = OneOrMore, ",", MatchMappingEntry => ActionFn(353); assert!(__symbols.len() >= 3); - let __sym2 = __pop_Variant80(__symbols); + let __sym2 = __pop_Variant83(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant84(__symbols); + let __sym0 = __pop_Variant87(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action353::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant84(__nt), __end)); + __symbols.push((__start, __Symbol::Variant87(__nt), __end)); (3, 204) } pub(crate) fn __reduce542< @@ -27965,7 +27998,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action490::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant85(__nt), __end)); + __symbols.push((__start, __Symbol::Variant88(__nt), __end)); (1, 205) } pub(crate) fn __reduce543< @@ -27981,11 +28014,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant11(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action491::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant85(__nt), __end)); + __symbols.push((__start, __Symbol::Variant88(__nt), __end)); (3, 205) } pub(crate) fn __reduce544< @@ -28002,7 +28035,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action479::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant85(__nt), __end)); + __symbols.push((__start, __Symbol::Variant88(__nt), __end)); (1, 206) } pub(crate) fn __reduce545< @@ -28018,11 +28051,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant11(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action480::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant85(__nt), __end)); + __symbols.push((__start, __Symbol::Variant88(__nt), __end)); (3, 206) } pub(crate) fn __reduce546< @@ -28183,11 +28216,11 @@ mod __parse__Top { ) -> (usize, usize) { // OneOrMore = TypeParam => ActionFn(289); - let __sym0 = __pop_Variant97(__symbols); + let __sym0 = __pop_Variant100(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action289::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant86(__nt), __end)); + __symbols.push((__start, __Symbol::Variant89(__nt), __end)); (1, 211) } pub(crate) fn __reduce555< @@ -28201,13 +28234,13 @@ mod __parse__Top { { // OneOrMore = OneOrMore, ",", TypeParam => ActionFn(290); assert!(__symbols.len() >= 3); - let __sym2 = __pop_Variant97(__symbols); + let __sym2 = __pop_Variant100(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant86(__symbols); + let __sym0 = __pop_Variant89(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action290::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant86(__nt), __end)); + __symbols.push((__start, __Symbol::Variant89(__nt), __end)); (3, 211) } pub(crate) fn __reduce556< @@ -28400,11 +28433,11 @@ mod __parse__Top { ) -> (usize, usize) { // ParameterDefs = OneOrMore> => ActionFn(446); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action446::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant87(__nt), __end)); + __symbols.push((__start, __Symbol::Variant90(__nt), __end)); (1, 217) } pub(crate) fn __reduce567< @@ -28420,11 +28453,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action701::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant87(__nt), __end)); + __symbols.push((__start, __Symbol::Variant90(__nt), __end)); (3, 217) } pub(crate) fn __reduce568< @@ -28441,11 +28474,11 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action702::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant87(__nt), __end)); + __symbols.push((__start, __Symbol::Variant90(__nt), __end)); (4, 217) } pub(crate) fn __reduce569< @@ -28458,11 +28491,11 @@ mod __parse__Top { ) -> (usize, usize) { // ParameterDefs = OneOrMore> => ActionFn(454); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action454::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant87(__nt), __end)); + __symbols.push((__start, __Symbol::Variant90(__nt), __end)); (1, 218) } pub(crate) fn __reduce570< @@ -28478,11 +28511,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action709::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant87(__nt), __end)); + __symbols.push((__start, __Symbol::Variant90(__nt), __end)); (3, 218) } pub(crate) fn __reduce571< @@ -28499,11 +28532,11 @@ mod __parse__Top { let __sym3 = __pop_Variant12(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant0(__symbols); - let __sym0 = __pop_Variant85(__symbols); + let __sym0 = __pop_Variant88(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action710::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant87(__nt), __end)); + __symbols.push((__start, __Symbol::Variant90(__nt), __end)); (4, 218) } pub(crate) fn __reduce648< @@ -28676,7 +28709,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action429::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant88(__nt), __end)); + __symbols.push((__start, __Symbol::Variant91(__nt), __end)); (1, 227) } pub(crate) fn __reduce752< @@ -28692,7 +28725,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action430::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant88(__nt), __end)); + __symbols.push((__start, __Symbol::Variant91(__nt), __end)); (0, 227) } pub(crate) fn __reduce753< @@ -28708,14 +28741,14 @@ mod __parse__Top { assert!(__symbols.len() >= 6); let __sym5 = __pop_Variant0(__symbols); let __sym4 = __pop_Variant0(__symbols); - let __sym3 = __pop_Variant83(__symbols); + let __sym3 = __pop_Variant86(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant53(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym5.2; let __nt = super::__action1463::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5); - __symbols.push((__start, __Symbol::Variant89(__nt), __end)); + __symbols.push((__start, __Symbol::Variant92(__nt), __end)); (6, 228) } pub(crate) fn __reduce754< @@ -28730,14 +28763,14 @@ mod __parse__Top { // PatternArguments = "(", OneOrMore, ",", OneOrMore, ")" => ActionFn(1464); assert!(__symbols.len() >= 5); let __sym4 = __pop_Variant0(__symbols); - let __sym3 = __pop_Variant83(__symbols); + let __sym3 = __pop_Variant86(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant53(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = super::__action1464::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4); - __symbols.push((__start, __Symbol::Variant89(__nt), __end)); + __symbols.push((__start, __Symbol::Variant92(__nt), __end)); (5, 228) } pub(crate) fn __reduce755< @@ -28758,7 +28791,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1465::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant89(__nt), __end)); + __symbols.push((__start, __Symbol::Variant92(__nt), __end)); (4, 228) } pub(crate) fn __reduce756< @@ -28778,7 +28811,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1466::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant89(__nt), __end)); + __symbols.push((__start, __Symbol::Variant92(__nt), __end)); (3, 228) } pub(crate) fn __reduce757< @@ -28794,12 +28827,12 @@ mod __parse__Top { assert!(__symbols.len() >= 4); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant83(__symbols); + let __sym1 = __pop_Variant86(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1467::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant89(__nt), __end)); + __symbols.push((__start, __Symbol::Variant92(__nt), __end)); (4, 228) } pub(crate) fn __reduce758< @@ -28814,12 +28847,12 @@ mod __parse__Top { // PatternArguments = "(", OneOrMore, ")" => ActionFn(1468); assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant83(__symbols); + let __sym1 = __pop_Variant86(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1468::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant89(__nt), __end)); + __symbols.push((__start, __Symbol::Variant92(__nt), __end)); (3, 228) } pub(crate) fn __reduce759< @@ -28838,7 +28871,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1469::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant89(__nt), __end)); + __symbols.push((__start, __Symbol::Variant92(__nt), __end)); (2, 228) } pub(crate) fn __reduce760< @@ -29528,7 +29561,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym4.2; let __nt = super::__action1555::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4); - __symbols.push((__start, __Symbol::Variant90(__nt), __end)); + __symbols.push((__start, __Symbol::Variant93(__nt), __end)); (5, 239) } pub(crate) fn __reduce796< @@ -29551,7 +29584,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym5.2; let __nt = super::__action1556::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4, __sym5); - __symbols.push((__start, __Symbol::Variant90(__nt), __end)); + __symbols.push((__start, __Symbol::Variant93(__nt), __end)); (6, 239) } pub(crate) fn __reduce797< @@ -29572,7 +29605,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1557::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant90(__nt), __end)); + __symbols.push((__start, __Symbol::Variant93(__nt), __end)); (4, 239) } pub(crate) fn __reduce798< @@ -29594,7 +29627,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym4.2; let __nt = super::__action1558::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4); - __symbols.push((__start, __Symbol::Variant90(__nt), __end)); + __symbols.push((__start, __Symbol::Variant93(__nt), __end)); (5, 239) } pub(crate) fn __reduce799< @@ -29607,11 +29640,11 @@ mod __parse__Top { ) -> (usize, usize) { // SingleForComprehension+ = SingleForComprehension => ActionFn(257); - let __sym0 = __pop_Variant90(__symbols); + let __sym0 = __pop_Variant93(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action257::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant91(__nt), __end)); + __symbols.push((__start, __Symbol::Variant94(__nt), __end)); (1, 240) } pub(crate) fn __reduce800< @@ -29625,12 +29658,12 @@ mod __parse__Top { { // SingleForComprehension+ = SingleForComprehension+, SingleForComprehension => ActionFn(258); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant90(__symbols); - let __sym0 = __pop_Variant91(__symbols); + let __sym1 = __pop_Variant93(__symbols); + let __sym0 = __pop_Variant94(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action258::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant91(__nt), __end)); + __symbols.push((__start, __Symbol::Variant94(__nt), __end)); (2, 240) } pub(crate) fn __reduce801< @@ -29649,7 +29682,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1733::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant92(__nt), __end)); + __symbols.push((__start, __Symbol::Variant95(__nt), __end)); (2, 241) } pub(crate) fn __reduce802< @@ -29666,7 +29699,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1734::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant92(__nt), __end)); + __symbols.push((__start, __Symbol::Variant95(__nt), __end)); (1, 241) } pub(crate) fn __reduce803< @@ -29679,11 +29712,11 @@ mod __parse__Top { ) -> (usize, usize) { // SliceOp? = SliceOp => ActionFn(277); - let __sym0 = __pop_Variant92(__symbols); + let __sym0 = __pop_Variant95(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action277::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant93(__nt), __end)); + __symbols.push((__start, __Symbol::Variant96(__nt), __end)); (1, 242) } pub(crate) fn __reduce804< @@ -29699,7 +29732,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action278::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant93(__nt), __end)); + __symbols.push((__start, __Symbol::Variant96(__nt), __end)); (0, 242) } pub(crate) fn __reduce805< @@ -30064,7 +30097,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1194::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (3, 250) } pub(crate) fn __reduce826< @@ -30085,7 +30118,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1195::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (4, 250) } pub(crate) fn __reduce827< @@ -30104,7 +30137,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1196::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (2, 250) } pub(crate) fn __reduce828< @@ -30124,7 +30157,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1197::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (3, 250) } pub(crate) fn __reduce829< @@ -30141,7 +30174,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action10::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (1, 250) } pub(crate) fn __reduce830< @@ -30156,11 +30189,11 @@ mod __parse__Top { // Statements = Statements, CompoundStatement => ActionFn(11); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant37(__symbols); - let __sym0 = __pop_Variant94(__symbols); + let __sym0 = __pop_Variant97(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action11::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (2, 250) } pub(crate) fn __reduce831< @@ -30177,11 +30210,11 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant37(__symbols); - let __sym0 = __pop_Variant94(__symbols); + let __sym0 = __pop_Variant97(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1198::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (4, 250) } pub(crate) fn __reduce832< @@ -30199,11 +30232,11 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant37(__symbols); let __sym1 = __pop_Variant38(__symbols); - let __sym0 = __pop_Variant94(__symbols); + let __sym0 = __pop_Variant97(__symbols); let __start = __sym0.0; let __end = __sym4.2; let __nt = super::__action1199::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3, __sym4); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (5, 250) } pub(crate) fn __reduce833< @@ -30219,11 +30252,11 @@ mod __parse__Top { assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); let __sym1 = __pop_Variant37(__symbols); - let __sym0 = __pop_Variant94(__symbols); + let __sym0 = __pop_Variant97(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1200::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (3, 250) } pub(crate) fn __reduce834< @@ -30240,11 +30273,11 @@ mod __parse__Top { let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant37(__symbols); let __sym1 = __pop_Variant38(__symbols); - let __sym0 = __pop_Variant94(__symbols); + let __sym0 = __pop_Variant97(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1201::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant94(__nt), __end)); + __symbols.push((__start, __Symbol::Variant97(__nt), __end)); (4, 250) } pub(crate) fn __reduce835< @@ -30326,7 +30359,7 @@ mod __parse__Top { { // Subscript = Test<"all">, ":", Test<"all">, SliceOp => ActionFn(1735); assert!(__symbols.len() >= 4); - let __sym3 = __pop_Variant92(__symbols); + let __sym3 = __pop_Variant95(__symbols); let __sym2 = __pop_Variant15(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant15(__symbols); @@ -30347,7 +30380,7 @@ mod __parse__Top { { // Subscript = Test<"all">, ":", SliceOp => ActionFn(1736); assert!(__symbols.len() >= 3); - let __sym2 = __pop_Variant92(__symbols); + let __sym2 = __pop_Variant95(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant15(__symbols); let __start = __sym0.0; @@ -30367,7 +30400,7 @@ mod __parse__Top { { // Subscript = ":", Test<"all">, SliceOp => ActionFn(1737); assert!(__symbols.len() >= 3); - let __sym2 = __pop_Variant92(__symbols); + let __sym2 = __pop_Variant95(__symbols); let __sym1 = __pop_Variant15(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -30387,7 +30420,7 @@ mod __parse__Top { { // Subscript = ":", SliceOp => ActionFn(1738); assert!(__symbols.len() >= 2); - let __sym1 = __pop_Variant92(__symbols); + let __sym1 = __pop_Variant95(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym1.2; @@ -30634,7 +30667,7 @@ mod __parse__Top { // Suite = "\n", Indent, Statements, Dedent => ActionFn(8); assert!(__symbols.len() >= 4); let __sym3 = __pop_Variant0(__symbols); - let __sym2 = __pop_Variant94(__symbols); + let __sym2 = __pop_Variant97(__symbols); let __sym1 = __pop_Variant0(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -31047,7 +31080,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1503::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant95(__nt), __end)); + __symbols.push((__start, __Symbol::Variant98(__nt), __end)); (2, 268) } pub(crate) fn __reduce881< @@ -31066,7 +31099,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1750::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant95(__nt), __end)); + __symbols.push((__start, __Symbol::Variant98(__nt), __end)); (2, 268) } pub(crate) fn __reduce882< @@ -31086,7 +31119,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1751::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant95(__nt), __end)); + __symbols.push((__start, __Symbol::Variant98(__nt), __end)); (3, 268) } pub(crate) fn __reduce883< @@ -31320,7 +31353,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action354::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant96(__nt), __end)); + __symbols.push((__start, __Symbol::Variant99(__nt), __end)); (2, 270) } pub(crate) fn __reduce893< @@ -31335,11 +31368,11 @@ mod __parse__Top { // TwoOrMore = TwoOrMore, StringLiteral => ActionFn(355); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant69(__symbols); - let __sym0 = __pop_Variant96(__symbols); + let __sym0 = __pop_Variant99(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action355::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant96(__nt), __end)); + __symbols.push((__start, __Symbol::Variant99(__nt), __end)); (2, 270) } pub(crate) fn __reduce894< @@ -31358,7 +31391,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action275::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant96(__nt), __end)); + __symbols.push((__start, __Symbol::Variant99(__nt), __end)); (2, 271) } pub(crate) fn __reduce895< @@ -31373,11 +31406,11 @@ mod __parse__Top { // TwoOrMore = TwoOrMore, StringLiteralOrFString => ActionFn(276); assert!(__symbols.len() >= 2); let __sym1 = __pop_Variant69(__symbols); - let __sym0 = __pop_Variant96(__symbols); + let __sym0 = __pop_Variant99(__symbols); let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action276::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant96(__nt), __end)); + __symbols.push((__start, __Symbol::Variant99(__nt), __end)); (2, 271) } pub(crate) fn __reduce896< @@ -31570,7 +31603,7 @@ mod __parse__Top { assert!(__symbols.len() >= 5); let __sym4 = __pop_Variant15(__symbols); let __sym3 = __pop_Variant0(__symbols); - let __sym2 = __pop_Variant98(__symbols); + let __sym2 = __pop_Variant101(__symbols); let __sym1 = __pop_Variant44(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; @@ -31617,7 +31650,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1516::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant97(__nt), __end)); + __symbols.push((__start, __Symbol::Variant100(__nt), __end)); (3, 278) } pub(crate) fn __reduce908< @@ -31634,7 +31667,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action1517::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant97(__nt), __end)); + __symbols.push((__start, __Symbol::Variant100(__nt), __end)); (1, 278) } pub(crate) fn __reduce909< @@ -31653,7 +31686,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1518::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant97(__nt), __end)); + __symbols.push((__start, __Symbol::Variant100(__nt), __end)); (2, 278) } pub(crate) fn __reduce910< @@ -31672,7 +31705,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym1.2; let __nt = super::__action1519::<>(source_code, mode, __sym0, __sym1); - __symbols.push((__start, __Symbol::Variant97(__nt), __end)); + __symbols.push((__start, __Symbol::Variant100(__nt), __end)); (2, 278) } pub(crate) fn __reduce911< @@ -31688,12 +31721,12 @@ mod __parse__Top { assert!(__symbols.len() >= 4); let __sym3 = __pop_Variant0(__symbols); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant86(__symbols); + let __sym1 = __pop_Variant89(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym3.2; let __nt = super::__action1520::<>(source_code, mode, __sym0, __sym1, __sym2, __sym3); - __symbols.push((__start, __Symbol::Variant98(__nt), __end)); + __symbols.push((__start, __Symbol::Variant101(__nt), __end)); (4, 279) } pub(crate) fn __reduce912< @@ -31708,12 +31741,12 @@ mod __parse__Top { // TypeParams = "[", OneOrMore, "]" => ActionFn(1521); assert!(__symbols.len() >= 3); let __sym2 = __pop_Variant0(__symbols); - let __sym1 = __pop_Variant86(__symbols); + let __sym1 = __pop_Variant89(__symbols); let __sym0 = __pop_Variant0(__symbols); let __start = __sym0.0; let __end = __sym2.2; let __nt = super::__action1521::<>(source_code, mode, __sym0, __sym1, __sym2); - __symbols.push((__start, __Symbol::Variant98(__nt), __end)); + __symbols.push((__start, __Symbol::Variant101(__nt), __end)); (3, 279) } pub(crate) fn __reduce913< @@ -31726,11 +31759,11 @@ mod __parse__Top { ) -> (usize, usize) { // TypeParams? = TypeParams => ActionFn(309); - let __sym0 = __pop_Variant98(__symbols); + let __sym0 = __pop_Variant101(__symbols); let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action309::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant99(__nt), __end)); + __symbols.push((__start, __Symbol::Variant102(__nt), __end)); (1, 280) } pub(crate) fn __reduce914< @@ -31746,7 +31779,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action310::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant99(__nt), __end)); + __symbols.push((__start, __Symbol::Variant102(__nt), __end)); (0, 280) } pub(crate) fn __reduce915< @@ -31800,7 +31833,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action204::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant100(__nt), __end)); + __symbols.push((__start, __Symbol::Variant103(__nt), __end)); (1, 282) } pub(crate) fn __reduce918< @@ -31817,7 +31850,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action205::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant100(__nt), __end)); + __symbols.push((__start, __Symbol::Variant103(__nt), __end)); (1, 282) } pub(crate) fn __reduce919< @@ -31834,7 +31867,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action206::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant100(__nt), __end)); + __symbols.push((__start, __Symbol::Variant103(__nt), __end)); (1, 282) } pub(crate) fn __reduce920< @@ -32461,7 +32494,7 @@ mod __parse__Top { let __start = __sym0.0; let __end = __sym0.2; let __nt = super::__action281::<>(source_code, mode, __sym0); - __symbols.push((__start, __Symbol::Variant101(__nt), __end)); + __symbols.push((__start, __Symbol::Variant104(__nt), __end)); (1, 296) } pub(crate) fn __reduce953< @@ -32477,7 +32510,7 @@ mod __parse__Top { let __start = __lookahead_start.cloned().or_else(|| __symbols.last().map(|s| s.2.clone())).unwrap_or_default(); let __end = __start.clone(); let __nt = super::__action282::<>(source_code, mode, &__start, &__end); - __symbols.push((__start, __Symbol::Variant101(__nt), __end)); + __symbols.push((__start, __Symbol::Variant104(__nt), __end)); (0, 296) } } @@ -32518,7 +32551,7 @@ fn __action2< mode: Mode, (_, start, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, body, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, body, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, alloc::vec::Vec, TextSize), (_, end, _): (TextSize, TextSize, TextSize), ) -> ast::Mod @@ -32843,7 +32876,7 @@ fn __action25< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, targets, _): (TextSize, Vec, TextSize), + (_, targets, _): (TextSize, Vec, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -32861,8 +32894,8 @@ fn __action26< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, expression, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, suffix, _): (TextSize, alloc::vec::Vec, TextSize), + (_, expression, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, suffix, _): (TextSize, alloc::vec::Vec, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> Result> { @@ -32896,9 +32929,9 @@ fn __action27< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, target, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, target, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, rhs, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, rhs, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> Result> { @@ -32922,10 +32955,10 @@ fn __action28< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, target, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, target, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, annotation, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, rhs, _): (TextSize, core::option::Option, TextSize), + (_, annotation, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, rhs, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> Result> { @@ -32951,8 +32984,8 @@ fn __action29< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { e } @@ -32964,8 +32997,8 @@ fn __action30< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { e } @@ -32976,8 +33009,8 @@ fn __action31< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -32988,8 +33021,8 @@ fn __action32< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33000,8 +33033,8 @@ fn __action33< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33012,8 +33045,8 @@ fn __action34< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33024,8 +33057,8 @@ fn __action35< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33036,8 +33069,8 @@ fn __action36< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33048,8 +33081,8 @@ fn __action37< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33060,8 +33093,8 @@ fn __action38< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33072,8 +33105,8 @@ fn __action39< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -33275,7 +33308,7 @@ fn __action55< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, core::option::Option, TextSize), + (_, value, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -33293,7 +33326,7 @@ fn __action56< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, expression, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, expression, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -33342,8 +33375,8 @@ fn __action59< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, exc, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, cause, _): (TextSize, core::option::Option, TextSize), + (_, exc, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, cause, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -33583,8 +33616,8 @@ fn __action73< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, test, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, msg, _): (TextSize, core::option::Option, TextSize), + (_, test, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, msg, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -33637,7 +33670,7 @@ fn __action75< (_, location, _): (TextSize, TextSize, TextSize), (_, c, _): (TextSize, (IpyEscapeKind, String), TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { if mode == Mode::Ipython { @@ -33669,7 +33702,7 @@ fn __action76< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, suffix, _): (TextSize, alloc::vec::Vec, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> Result> @@ -33846,7 +33879,7 @@ fn __action85< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, subject, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, subject, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), @@ -33881,7 +33914,7 @@ fn __action86< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, tuple_location, _): (TextSize, TextSize, TextSize), - (_, subject, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, subject, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, tuple_end_location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), @@ -33924,7 +33957,7 @@ fn __action87< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, tuple_location, _): (TextSize, TextSize, TextSize), - (_, elts, _): (TextSize, Vec, TextSize), + (_, elts, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, core::option::Option, TextSize), (_, tuple_end_location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), @@ -33992,7 +34025,7 @@ fn __action89< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, guard, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, guard, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Expr { { @@ -34350,7 +34383,7 @@ fn __action111< (_, location, _): (TextSize, TextSize, TextSize), (_, value, _): (TextSize, ast::Number, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::Expr::NumberLiteral( ast::ExprNumberLiteral { value, range: (location..end_location).into() } @@ -34363,8 +34396,8 @@ fn __action112< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -34377,9 +34410,9 @@ fn __action113< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, operand, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, operand, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::Expr::UnaryOp( ast::ExprUnaryOp { @@ -34397,11 +34430,11 @@ fn __action114< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -34469,7 +34502,7 @@ fn __action118< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Pattern { @@ -34486,7 +34519,7 @@ fn __action119< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Pattern { @@ -34653,7 +34686,7 @@ fn __action129< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Expr { e.into() @@ -34665,7 +34698,7 @@ fn __action130< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Expr { e.into() @@ -35001,10 +35034,10 @@ fn __action146< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, test, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, test, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), - (_, s2, _): (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), + (_, s2, _): (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), (_, s3, _): (TextSize, core::option::Option<(TextSize, ast::Suite)>, TextSize), ) -> ast::Stmt { @@ -35037,7 +35070,7 @@ fn __action147< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, test, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, test, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), (_, s2, _): (TextSize, core::option::Option, TextSize), @@ -35070,9 +35103,9 @@ fn __action148< (_, location, _): (TextSize, TextSize, TextSize), (_, is_async, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, target, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, target, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, iter, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, iter, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), (_, orelse, _): (TextSize, core::option::Option, TextSize), @@ -35206,7 +35239,7 @@ fn __action152< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, typ, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, typ, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -35233,7 +35266,7 @@ fn __action153< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, x, _): (TextSize, (ast::ParenthesizedExpr, ast::Identifier), TextSize), + (_, x, _): (TextSize, (crate::parser::ParenthesizedExpr, ast::Identifier), TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -35259,7 +35292,7 @@ fn __action154< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, typ, _): (TextSize, core::option::Option, TextSize), + (_, typ, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -35285,7 +35318,7 @@ fn __action155< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, x, _): (TextSize, (ast::ParenthesizedExpr, ast::Identifier), TextSize), + (_, x, _): (TextSize, (crate::parser::ParenthesizedExpr, ast::Identifier), TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -35408,7 +35441,7 @@ fn __action161< >( source_code: &str, mode: Mode, - (_, all, _): (TextSize, Vec, TextSize), + (_, all, _): (TextSize, Vec, TextSize), ) -> Vec { { @@ -35427,9 +35460,9 @@ fn __action162< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, context_expr, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, context_expr, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, optional_vars, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, optional_vars, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::WithItem { @@ -35456,7 +35489,7 @@ fn __action163< (_, name, _): (TextSize, ast::Identifier, TextSize), (_, type_params, _): (TextSize, core::option::Option, TextSize), (_, parameters, _): (TextSize, ast::Parameters, TextSize), - (_, returns, _): (TextSize, core::option::Option, TextSize), + (_, returns, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, body, _): (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -35507,7 +35540,7 @@ fn __action165< (_, name, _): (TextSize, ast::Expr, TextSize), (_, type_params, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -35589,7 +35622,7 @@ fn __action169< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, name, _): (TextSize, ast::Identifier, TextSize), - (_, annotation, _): (TextSize, core::option::Option, TextSize), + (_, annotation, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::ParameterWithDefault { @@ -35608,7 +35641,7 @@ fn __action170< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, name, _): (TextSize, ast::Identifier, TextSize), - (_, annotation, _): (TextSize, core::option::Option, TextSize), + (_, annotation, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Parameter { @@ -35626,7 +35659,7 @@ fn __action171< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, name, _): (TextSize, ast::Identifier, TextSize), - (_, annotation, _): (TextSize, core::option::Option, TextSize), + (_, annotation, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Parameter { @@ -35697,7 +35730,7 @@ fn __action174< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, name, _): (TextSize, ast::Identifier, TextSize), - (_, bound, _): (TextSize, core::option::Option, TextSize), + (_, bound, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::TypeParam { @@ -35754,7 +35787,7 @@ fn __action177< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, expression, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, expression, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), ) -> ast::Decorator @@ -35772,9 +35805,9 @@ fn __action178< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, core::option::Option, TextSize), + (_, value, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprYield { value: value.map(ast::Expr::from).map(Box::new), @@ -35791,9 +35824,9 @@ fn __action179< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprYieldFrom { value: Box::new(value.into()), @@ -35807,8 +35840,8 @@ fn __action180< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -35819,8 +35852,8 @@ fn __action181< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -35834,7 +35867,7 @@ fn __action182< (_, location, _): (TextSize, TextSize, TextSize), (_, id, _): (TextSize, ast::Identifier, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprName { id: id.into(), @@ -35850,11 +35883,11 @@ fn __action183< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, target, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, target, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprNamedExpr { @@ -35878,9 +35911,9 @@ fn __action184< (_, end_location_args, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, fstring_middle, _): (TextSize, core::option::Option<(String, bool)>, TextSize), - (_, body, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, body, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { if fstring_middle.is_some() { @@ -36171,8 +36204,8 @@ fn __action207< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -36184,10 +36217,10 @@ fn __action208< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, s1, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, s1, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprTuple { @@ -36205,10 +36238,10 @@ fn __action209< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, elts, _): (TextSize, Vec, TextSize), + (_, elts, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let elts = elts.into_iter().map(ast::Expr::from).collect(); @@ -36226,8 +36259,8 @@ fn __action210< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -36239,12 +36272,12 @@ fn __action211< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, lower, _): (TextSize, core::option::Option, TextSize), + (_, lower, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, upper, _): (TextSize, core::option::Option, TextSize), - (_, step, _): (TextSize, core::option::Option>, TextSize), + (_, upper, _): (TextSize, core::option::Option, TextSize), + (_, step, _): (TextSize, core::option::Option>, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let lower = lower.map(ast::Expr::from).map(Box::new); @@ -36264,8 +36297,8 @@ fn __action212< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, core::option::Option, TextSize), -) -> Option + (_, e, _): (TextSize, core::option::Option, TextSize), +) -> Option { e } @@ -36348,14 +36381,14 @@ fn __action218< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, values, _): (TextSize, alloc::vec::Vec, TextSize), + (_, elements, _): (TextSize, alloc::vec::Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> StringType { { StringType::FString(ast::FString { - values, + elements, range: (location..end_location).into() }) } @@ -36367,8 +36400,8 @@ fn __action219< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::Expr, TextSize), -) -> ast::Expr + (_, __0, _): (TextSize, ast::FStringElement, TextSize), +) -> ast::FStringElement { __0 } @@ -36382,11 +36415,11 @@ fn __action220< (_, location, _): (TextSize, TextSize, TextSize), (_, fstring_middle, _): (TextSize, (String, bool), TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { let (source, is_raw) = fstring_middle; - Ok(parse_fstring_middle(&source, is_raw, (location..end_location).into())?) + Ok(parse_fstring_literal_element(&source, is_raw, (location..end_location).into())?) } } @@ -36398,13 +36431,13 @@ fn __action221< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, debug, _): (TextSize, core::option::Option, TextSize), (_, conversion, _): (TextSize, core::option::Option<(TextSize, ast::ConversionFlag)>, TextSize), - (_, format_spec, _): (TextSize, core::option::Option, TextSize), + (_, format_spec, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { if value.expr.is_lambda_expr() && !value.is_parenthesized() { @@ -36429,16 +36462,15 @@ fn __action221< } }); Ok( - ast::ExprFormattedValue { - value: Box::new(value.into()), + ast::FStringElement::Expression(ast::FStringExpressionElement { + expression: Box::new(value.into()), debug_text, conversion: conversion.map_or(ast::ConversionFlag::None, |(_, conversion_flag)| { conversion_flag }), format_spec: format_spec.map(Box::new), range: (location..end_location).into(), - } - .into() + }) ) } } @@ -36450,8 +36482,8 @@ fn __action222< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, format_spec, _): (TextSize, ast::Expr, TextSize), -) -> ast::Expr + (_, format_spec, _): (TextSize, ast::FStringFormatSpec, TextSize), +) -> ast::FStringFormatSpec { format_spec } @@ -36463,15 +36495,13 @@ fn __action223< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, values, _): (TextSize, alloc::vec::Vec, TextSize), + (_, elements, _): (TextSize, alloc::vec::Vec, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::Expr +) -> ast::FStringFormatSpec { - { - ast::FString { - values, - range: (location..end_location).into() - }.into() + ast::FStringFormatSpec { + elements, + range: (location..end_location).into(), } } @@ -36507,9 +36537,9 @@ fn __action225< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, Vec, TextSize), + (_, e, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, core::option::Option, TextSize), -) -> Vec +) -> Vec { e } @@ -36520,9 +36550,9 @@ fn __action226< >( source_code: &str, mode: Mode, - (_, elements, _): (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize), + (_, elements, _): (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize), (_, _, _): (TextSize, core::option::Option, TextSize), -) -> Vec<(Option>, ast::ParenthesizedExpr)> +) -> Vec<(Option>, crate::parser::ParenthesizedExpr)> { elements } @@ -36533,10 +36563,10 @@ fn __action227< >( source_code: &str, mode: Mode, - (_, e1, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e1, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e2, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> (ast::ParenthesizedExpr, ast::ParenthesizedExpr) + (_, e2, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr) { (e1, e2) } @@ -36547,8 +36577,8 @@ fn __action228< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize), -) -> (Option>, ast::ParenthesizedExpr) + (_, e, _): (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize), +) -> (Option>, crate::parser::ParenthesizedExpr) { (Some(Box::new(e.0)), e.1) } @@ -36560,8 +36590,8 @@ fn __action229< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> (Option>, ast::ParenthesizedExpr) + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> (Option>, crate::parser::ParenthesizedExpr) { (None, e) } @@ -36572,9 +36602,9 @@ fn __action230< >( source_code: &str, mode: Mode, - (_, e1, _): (TextSize, Vec, TextSize), + (_, e1, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, core::option::Option, TextSize), -) -> Vec +) -> Vec { e1 } @@ -36585,8 +36615,8 @@ fn __action231< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -36597,8 +36627,8 @@ fn __action232< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -36609,8 +36639,8 @@ fn __action233< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -36621,9 +36651,9 @@ fn __action234< >( source_code: &str, mode: Mode, - (_, elements, _): (TextSize, Vec, TextSize), + (_, elements, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, core::option::Option, TextSize), -) -> Vec +) -> Vec { elements } @@ -36634,8 +36664,8 @@ fn __action235< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -36648,9 +36678,9 @@ fn __action236< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprStarred { value: Box::new(value.into()), @@ -36680,10 +36710,10 @@ fn __action238< (_, location, _): (TextSize, TextSize, TextSize), (_, is_async, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, target, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, target, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, iter, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, ifs, _): (TextSize, alloc::vec::Vec, TextSize), + (_, iter, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, ifs, _): (TextSize, alloc::vec::Vec, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::Comprehension { @@ -36706,8 +36736,8 @@ fn __action239< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -36719,8 +36749,8 @@ fn __action240< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, c, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, c, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { c } @@ -36755,7 +36785,7 @@ fn __action242< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, elt, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, elt, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, generators, _): (TextSize, core::option::Option>, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) @@ -36784,7 +36814,7 @@ fn __action243< (_, location, _): (TextSize, TextSize, TextSize), (_, i, _): (TextSize, ast::Identifier, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -36799,7 +36829,7 @@ fn __action244< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -36819,7 +36849,7 @@ fn __action245< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -36927,7 +36957,7 @@ fn __action253< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> alloc::vec::Vec +) -> alloc::vec::Vec { alloc::vec![] } @@ -36938,8 +36968,8 @@ fn __action254< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), +) -> alloc::vec::Vec { v } @@ -36951,10 +36981,10 @@ fn __action255< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, values, _): (TextSize, alloc::vec::Vec, TextSize), - (_, last, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, values, _): (TextSize, alloc::vec::Vec, TextSize), + (_, last, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let values = values.into_iter().chain(std::iter::once(last)).map(ast::Expr::from).collect(); @@ -36968,8 +36998,8 @@ fn __action256< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -37006,14 +37036,14 @@ fn __action259< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, elts, _): (TextSize, Vec, TextSize), + (_, elts, _): (TextSize, Vec, TextSize), (_, trailing_comma, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { if elts.len() == 1 && trailing_comma.is_none() { - ast::ParenthesizedExpr { + crate::parser::ParenthesizedExpr { expr: elts.into_iter().next().unwrap().into(), range: (location..end_location).into(), } @@ -37030,8 +37060,8 @@ fn __action260< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { vec![e] } @@ -37042,10 +37072,10 @@ fn __action261< >( source_code: &str, mode: Mode, - (_, mut v, _): (TextSize, Vec, TextSize), + (_, mut v, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { { v.push(e); @@ -37060,14 +37090,14 @@ fn __action262< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, elts, _): (TextSize, Vec, TextSize), + (_, elts, _): (TextSize, Vec, TextSize), (_, trailing_comma, _): (TextSize, core::option::Option, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { if elts.len() == 1 && trailing_comma.is_none() { - ast::ParenthesizedExpr { + crate::parser::ParenthesizedExpr { expr: elts.into_iter().next().unwrap().into(), range: (location..end_location).into(), } @@ -37084,8 +37114,8 @@ fn __action263< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, (Option>, ast::ParenthesizedExpr), TextSize), -) -> Vec<(Option>, ast::ParenthesizedExpr)> + (_, e, _): (TextSize, (Option>, crate::parser::ParenthesizedExpr), TextSize), +) -> Vec<(Option>, crate::parser::ParenthesizedExpr)> { vec![e] } @@ -37096,10 +37126,10 @@ fn __action264< >( source_code: &str, mode: Mode, - (_, mut v, _): (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize), + (_, mut v, _): (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, (Option>, ast::ParenthesizedExpr), TextSize), -) -> Vec<(Option>, ast::ParenthesizedExpr)> + (_, e, _): (TextSize, (Option>, crate::parser::ParenthesizedExpr), TextSize), +) -> Vec<(Option>, crate::parser::ParenthesizedExpr)> { { v.push(e); @@ -37113,8 +37143,8 @@ fn __action265< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { vec![e] } @@ -37125,10 +37155,10 @@ fn __action266< >( source_code: &str, mode: Mode, - (_, mut v, _): (TextSize, Vec, TextSize), + (_, mut v, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { { v.push(e); @@ -37142,8 +37172,8 @@ fn __action267< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::Expr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, ast::FStringFormatSpec, TextSize), +) -> core::option::Option { Some(__0) } @@ -37156,7 +37186,7 @@ fn __action268< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -37219,7 +37249,7 @@ fn __action273< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> alloc::vec::Vec +) -> alloc::vec::Vec { alloc::vec![] } @@ -37230,8 +37260,8 @@ fn __action274< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), +) -> alloc::vec::Vec { v } @@ -37271,8 +37301,8 @@ fn __action277< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, Option, TextSize), -) -> core::option::Option> + (_, __0, _): (TextSize, Option, TextSize), +) -> core::option::Option> { Some(__0) } @@ -37285,7 +37315,7 @@ fn __action278< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option> +) -> core::option::Option> { None } @@ -37296,10 +37326,10 @@ fn __action279< >( source_code: &str, mode: Mode, - (_, e1, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e1, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e2, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e2, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { vec![e1, e2] } @@ -37310,10 +37340,10 @@ fn __action280< >( source_code: &str, mode: Mode, - (_, mut v, _): (TextSize, Vec, TextSize), + (_, mut v, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { { v.push(e); @@ -37544,8 +37574,8 @@ fn __action293< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -37558,7 +37588,7 @@ fn __action294< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -37570,8 +37600,8 @@ fn __action295< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -37582,8 +37612,8 @@ fn __action296< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -37596,7 +37626,7 @@ fn __action297< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -37608,8 +37638,8 @@ fn __action298< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -37770,8 +37800,8 @@ fn __action306< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -37784,7 +37814,7 @@ fn __action307< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -37796,8 +37826,8 @@ fn __action308< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -37858,8 +37888,8 @@ fn __action313< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { vec![e] } @@ -37870,10 +37900,10 @@ fn __action314< >( source_code: &str, mode: Mode, - (_, mut v, _): (TextSize, Vec, TextSize), + (_, mut v, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { { v.push(e); @@ -37912,7 +37942,7 @@ fn __action317< >( source_code: &str, mode: Mode, - (_, context_expr, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, context_expr, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::WithItem { { @@ -37980,7 +38010,7 @@ fn __action322< >( source_code: &str, mode: Mode, - (_, context_expr, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, context_expr, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::WithItem { { @@ -38048,8 +38078,8 @@ fn __action327< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -38062,7 +38092,7 @@ fn __action328< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -38073,10 +38103,10 @@ fn __action329< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, __1, _): (TextSize, ast::Identifier, TextSize), -) -> (ast::ParenthesizedExpr, ast::Identifier) +) -> (crate::parser::ParenthesizedExpr, ast::Identifier) { (__0, __1) } @@ -38282,7 +38312,7 @@ fn __action345< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)> +) -> alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)> { alloc::vec![] } @@ -38293,8 +38323,8 @@ fn __action346< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), -) -> alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)> + (_, v, _): (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), +) -> alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)> { v } @@ -38307,10 +38337,10 @@ fn __action347< mode: Mode, (_, __0, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, __1, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, __1, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, __2, _): (TextSize, ast::Suite, TextSize), -) -> (TextSize, ast::ParenthesizedExpr, ast::Suite) +) -> (TextSize, crate::parser::ParenthesizedExpr, ast::Suite) { (__0, __1, __2) } @@ -38592,10 +38622,10 @@ fn __action367< >( source_code: &str, mode: Mode, - (_, e1, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e1, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e2, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e2, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { vec![e1, e2] } @@ -38606,10 +38636,10 @@ fn __action368< >( source_code: &str, mode: Mode, - (_, mut v, _): (TextSize, Vec, TextSize), + (_, mut v, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { { v.push(e); @@ -38686,11 +38716,11 @@ fn __action374< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -38706,8 +38736,8 @@ fn __action375< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -38718,8 +38748,8 @@ fn __action376< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -38732,7 +38762,7 @@ fn __action377< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -38744,8 +38774,8 @@ fn __action378< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -38986,8 +39016,8 @@ fn __action396< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -39000,7 +39030,7 @@ fn __action397< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -39012,8 +39042,8 @@ fn __action398< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39024,8 +39054,8 @@ fn __action399< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -39038,7 +39068,7 @@ fn __action400< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -39049,8 +39079,8 @@ fn __action401< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { Some(__0) } @@ -39063,7 +39093,7 @@ fn __action402< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option +) -> core::option::Option { None } @@ -39075,13 +39105,13 @@ fn __action403< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, body, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, body, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, test, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, test, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, orelse, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, orelse, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprIfExp { test: Box::new(test.into()), @@ -39097,8 +39127,8 @@ fn __action404< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39109,8 +39139,8 @@ fn __action405< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39123,7 +39153,7 @@ fn __action406< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> alloc::vec::Vec +) -> alloc::vec::Vec { alloc::vec![] } @@ -39134,8 +39164,8 @@ fn __action407< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), +) -> alloc::vec::Vec { v } @@ -39320,8 +39350,8 @@ fn __action422< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { alloc::vec![__0] } @@ -39332,9 +39362,9 @@ fn __action423< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { { let mut v = v; v.push(e); v } } @@ -39384,11 +39414,11 @@ fn __action427< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -39404,8 +39434,8 @@ fn __action428< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39466,8 +39496,8 @@ fn __action433< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, (TextSize, ast::ParenthesizedExpr, ast::Suite), TextSize), -) -> alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)> + (_, __0, _): (TextSize, (TextSize, crate::parser::ParenthesizedExpr, ast::Suite), TextSize), +) -> alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)> { alloc::vec![__0] } @@ -39478,9 +39508,9 @@ fn __action434< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), - (_, e, _): (TextSize, (TextSize, ast::ParenthesizedExpr, ast::Suite), TextSize), -) -> alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)> + (_, v, _): (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), + (_, e, _): (TextSize, (TextSize, crate::parser::ParenthesizedExpr, ast::Suite), TextSize), +) -> alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)> { { let mut v = v; v.push(e); v } } @@ -39492,13 +39522,13 @@ fn __action435< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, body, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, body, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, test, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, test, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, orelse, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, orelse, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprIfExp { test: Box::new(test.into()), @@ -39514,8 +39544,8 @@ fn __action436< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39526,8 +39556,8 @@ fn __action437< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39813,8 +39843,8 @@ fn __action456< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::Expr, TextSize), -) -> alloc::vec::Vec + (_, __0, _): (TextSize, ast::FStringElement, TextSize), +) -> alloc::vec::Vec { alloc::vec![__0] } @@ -39825,9 +39855,9 @@ fn __action457< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), - (_, e, _): (TextSize, ast::Expr, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), + (_, e, _): (TextSize, ast::FStringElement, TextSize), +) -> alloc::vec::Vec { { let mut v = v; v.push(e); v } } @@ -39838,8 +39868,8 @@ fn __action458< >( source_code: &str, mode: Mode, - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { vec![e] } @@ -39850,10 +39880,10 @@ fn __action459< >( source_code: &str, mode: Mode, - (_, mut v, _): (TextSize, Vec, TextSize), + (_, mut v, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Vec + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Vec { { v.push(e); @@ -39867,8 +39897,8 @@ fn __action460< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { alloc::vec![__0] } @@ -39879,9 +39909,9 @@ fn __action461< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { { let mut v = v; v.push(e); v } } @@ -39892,9 +39922,9 @@ fn __action462< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39906,10 +39936,10 @@ fn __action463< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, values, _): (TextSize, alloc::vec::Vec, TextSize), - (_, last, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, values, _): (TextSize, alloc::vec::Vec, TextSize), + (_, last, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let values = values.into_iter().chain(std::iter::once(last)).map(ast::Expr::from).collect(); @@ -39923,8 +39953,8 @@ fn __action464< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -39935,8 +39965,8 @@ fn __action465< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { alloc::vec![__0] } @@ -39947,9 +39977,9 @@ fn __action466< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { { let mut v = v; v.push(e); v } } @@ -40048,8 +40078,8 @@ fn __action474< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { alloc::vec![__0] } @@ -40060,9 +40090,9 @@ fn __action475< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { { let mut v = v; v.push(e); v } } @@ -40073,9 +40103,9 @@ fn __action476< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40088,9 +40118,9 @@ fn __action477< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, operand, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, operand, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprUnaryOp { operand: Box::new(operand.into()), @@ -40105,8 +40135,8 @@ fn __action478< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40223,7 +40253,7 @@ fn __action487< mode: Mode, (_, mut i, _): (TextSize, ast::ParameterWithDefault, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, default, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, default, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::ParameterWithDefault { @@ -40371,7 +40401,7 @@ fn __action498< mode: Mode, (_, mut i, _): (TextSize, ast::ParameterWithDefault, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, default, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, default, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), ) -> ast::ParameterWithDefault { @@ -40439,10 +40469,10 @@ fn __action503< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, values, _): (TextSize, alloc::vec::Vec, TextSize), - (_, last, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, values, _): (TextSize, alloc::vec::Vec, TextSize), + (_, last, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let values = values.into_iter().chain(std::iter::once(last)).map(ast::Expr::from).collect(); @@ -40456,8 +40486,8 @@ fn __action504< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40469,11 +40499,11 @@ fn __action505< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40489,8 +40519,8 @@ fn __action506< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40502,11 +40532,11 @@ fn __action507< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40522,8 +40552,8 @@ fn __action508< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40535,10 +40565,10 @@ fn __action509< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, values, _): (TextSize, alloc::vec::Vec, TextSize), - (_, last, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, values, _): (TextSize, alloc::vec::Vec, TextSize), + (_, last, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let values = values.into_iter().chain(std::iter::once(last)).map(ast::Expr::from).collect(); @@ -40552,8 +40582,8 @@ fn __action510< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40615,10 +40645,10 @@ fn __action515< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, comparisons, _): (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, comparisons, _): (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let (ops, comparators) = comparisons.into_iter().map(|(op, comparator)| (op, ast::Expr::from(comparator))).unzip(); @@ -40632,8 +40662,8 @@ fn __action516< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40644,8 +40674,8 @@ fn __action517< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, (ast::CmpOp, ast::ParenthesizedExpr), TextSize), -) -> alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)> + (_, __0, _): (TextSize, (ast::CmpOp, crate::parser::ParenthesizedExpr), TextSize), +) -> alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)> { alloc::vec![__0] } @@ -40656,9 +40686,9 @@ fn __action518< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), - (_, e, _): (TextSize, (ast::CmpOp, ast::ParenthesizedExpr), TextSize), -) -> alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)> + (_, v, _): (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), + (_, e, _): (TextSize, (ast::CmpOp, crate::parser::ParenthesizedExpr), TextSize), +) -> alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)> { { let mut v = v; v.push(e); v } } @@ -40670,8 +40700,8 @@ fn __action519< source_code: &str, mode: Mode, (_, __0, _): (TextSize, ast::CmpOp, TextSize), - (_, __1, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> (ast::CmpOp, ast::ParenthesizedExpr) + (_, __1, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> (ast::CmpOp, crate::parser::ParenthesizedExpr) { (__0, __1) } @@ -40684,9 +40714,9 @@ fn __action520< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, operand, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, operand, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprUnaryOp { operand: Box::new(operand.into()), @@ -40701,8 +40731,8 @@ fn __action521< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40714,11 +40744,11 @@ fn __action522< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40734,8 +40764,8 @@ fn __action523< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40747,11 +40777,11 @@ fn __action524< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40767,8 +40797,8 @@ fn __action525< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40780,10 +40810,10 @@ fn __action526< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, comparisons, _): (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, comparisons, _): (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let (ops, comparators) = comparisons.into_iter().map(|(op, comparator)| (op, ast::Expr::from(comparator))).unzip(); @@ -40797,8 +40827,8 @@ fn __action527< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40810,11 +40840,11 @@ fn __action528< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40830,8 +40860,8 @@ fn __action529< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40844,9 +40874,9 @@ fn __action530< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, op, _): (TextSize, ast::UnaryOp, TextSize), - (_, operand, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, operand, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprUnaryOp { operand: Box::new(operand.into()), @@ -40861,8 +40891,8 @@ fn __action531< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40874,11 +40904,11 @@ fn __action532< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40894,8 +40924,8 @@ fn __action533< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40907,11 +40937,11 @@ fn __action534< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40927,8 +40957,8 @@ fn __action535< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40940,11 +40970,11 @@ fn __action536< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -40960,8 +40990,8 @@ fn __action537< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -40974,9 +41004,9 @@ fn __action538< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprAwait { value: Box::new(value.into()), range: (location..end_location).into() }.into() @@ -40989,8 +41019,8 @@ fn __action539< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41001,8 +41031,8 @@ fn __action540< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41014,10 +41044,10 @@ fn __action541< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, func, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, func, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, arguments, _): (TextSize, ast::Arguments, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprCall { func: Box::new(func.into()), @@ -41033,12 +41063,12 @@ fn __action542< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, slice, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, slice, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprSubscript { value: Box::new(value.into()), @@ -41055,11 +41085,11 @@ fn __action543< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, attr, _): (TextSize, ast::Identifier, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprAttribute { value: Box::new(value.into()), @@ -41076,11 +41106,11 @@ fn __action544< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -41096,8 +41126,8 @@ fn __action545< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41109,11 +41139,11 @@ fn __action546< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -41129,8 +41159,8 @@ fn __action547< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41142,7 +41172,7 @@ fn __action548< source_code: &str, mode: Mode, (_, expr, _): (TextSize, ast::Expr, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { expr.into() } @@ -41156,7 +41186,7 @@ fn __action549< (_, location, _): (TextSize, TextSize, TextSize), (_, value, _): (TextSize, ast::Number, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprNumberLiteral { value, @@ -41173,7 +41203,7 @@ fn __action550< (_, location, _): (TextSize, TextSize, TextSize), (_, id, _): (TextSize, ast::Identifier, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprName { id: id.into(), @@ -41190,10 +41220,10 @@ fn __action551< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elts, _): (TextSize, core::option::Option>, TextSize), + (_, elts, _): (TextSize, core::option::Option>, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let elts = elts.into_iter().flatten().map(ast::Expr::from).collect(); @@ -41209,11 +41239,11 @@ fn __action552< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elt, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, elt, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprListComp { elt: Box::new(elt.into()), generators, range: (location..end_location).into() }.into() @@ -41228,15 +41258,15 @@ fn __action553< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elts, _): (TextSize, Vec, TextSize), + (_, elts, _): (TextSize, Vec, TextSize), (_, trailing_comma, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { if elts.len() == 1 && trailing_comma.is_none() { - ast::ParenthesizedExpr { + crate::parser::ParenthesizedExpr { expr: elts.into_iter().next().unwrap().into(), range: (location..end_location).into(), } @@ -41255,13 +41285,13 @@ fn __action554< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, left, _): (TextSize, core::option::Option>, TextSize), - (_, mid, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, right, _): (TextSize, alloc::vec::Vec, TextSize), + (_, left, _): (TextSize, core::option::Option>, TextSize), + (_, mid, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, alloc::vec::Vec, TextSize), (_, trailing_comma, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { if left.is_none() && right.is_empty() && trailing_comma.is_none() { @@ -41271,7 +41301,7 @@ fn __action554< location: mid.start(), })?; } - Ok(ast::ParenthesizedExpr { + Ok(crate::parser::ParenthesizedExpr { expr: mid.into(), range: (location..end_location).into(), }) @@ -41292,7 +41322,7 @@ fn __action555< (_, _, _): (TextSize, token::Tok, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprTuple { elts: Vec::new(), @@ -41309,12 +41339,12 @@ fn __action556< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { - ast::ParenthesizedExpr { + crate::parser::ParenthesizedExpr { expr: e.into(), range: (location..end_location).into(), } @@ -41328,11 +41358,11 @@ fn __action557< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elt, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, elt, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprGeneratorExp { elt: Box::new(elt.into()), @@ -41350,10 +41380,10 @@ fn __action558< (_, _, _): (TextSize, token::Tok, TextSize), (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { Err(LexicalError{ @@ -41371,10 +41401,10 @@ fn __action559< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, core::option::Option>, ast::ParenthesizedExpr)>>, TextSize), + (_, e, _): (TextSize, core::option::Option>, crate::parser::ParenthesizedExpr)>>, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let (keys, values) = e @@ -41394,11 +41424,11 @@ fn __action560< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e1, _): (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize), + (_, e1, _): (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprDictComp { @@ -41418,10 +41448,10 @@ fn __action561< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elts, _): (TextSize, Vec, TextSize), + (_, elts, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let elts = elts.into_iter().map(ast::Expr::from).collect(); @@ -41440,11 +41470,11 @@ fn __action562< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elt, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, elt, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprSetComp { elt: Box::new(elt.into()), @@ -41462,7 +41492,7 @@ fn __action563< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBooleanLiteral { value: true, range: (location..end_location).into() }.into() } @@ -41476,7 +41506,7 @@ fn __action564< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBooleanLiteral { value: false, range: (location..end_location).into() }.into() } @@ -41490,7 +41520,7 @@ fn __action565< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprNoneLiteral { range: (location..end_location).into() }.into() } @@ -41504,7 +41534,7 @@ fn __action566< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprEllipsisLiteral { range: (location..end_location).into() }.into() } @@ -41515,8 +41545,8 @@ fn __action567< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize), -) -> core::option::Option>, ast::ParenthesizedExpr)>> + (_, __0, _): (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize), +) -> core::option::Option>, crate::parser::ParenthesizedExpr)>> { Some(__0) } @@ -41529,7 +41559,7 @@ fn __action568< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option>, ast::ParenthesizedExpr)>> +) -> core::option::Option>, crate::parser::ParenthesizedExpr)>> { None } @@ -41542,7 +41572,7 @@ fn __action569< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> alloc::vec::Vec +) -> alloc::vec::Vec { alloc::vec![] } @@ -41553,8 +41583,8 @@ fn __action570< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), +) -> alloc::vec::Vec { v } @@ -41566,8 +41596,8 @@ fn __action571< source_code: &str, mode: Mode, (_, _, _): (TextSize, token::Tok, TextSize), - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41578,8 +41608,8 @@ fn __action572< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, Vec, TextSize), -) -> core::option::Option> + (_, __0, _): (TextSize, Vec, TextSize), +) -> core::option::Option> { Some(__0) } @@ -41592,7 +41622,7 @@ fn __action573< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option> +) -> core::option::Option> { None } @@ -41603,9 +41633,9 @@ fn __action574< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, Vec, TextSize), + (_, __0, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), -) -> Vec +) -> Vec { __0 } @@ -41616,8 +41646,8 @@ fn __action575< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, Vec, TextSize), -) -> core::option::Option> + (_, __0, _): (TextSize, Vec, TextSize), +) -> core::option::Option> { Some(__0) } @@ -41630,7 +41660,7 @@ fn __action576< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> core::option::Option> +) -> core::option::Option> { None } @@ -41642,11 +41672,11 @@ fn __action577< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, op, _): (TextSize, ast::Operator, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -41662,8 +41692,8 @@ fn __action578< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41676,9 +41706,9 @@ fn __action579< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, op, _): (TextSize, ast::UnaryOp, TextSize), - (_, operand, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, operand, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprUnaryOp { operand: Box::new(operand.into()), @@ -41693,8 +41723,8 @@ fn __action580< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41705,8 +41735,8 @@ fn __action581< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { alloc::vec![__0] } @@ -41717,9 +41747,9 @@ fn __action582< >( source_code: &str, mode: Mode, - (_, v, _): (TextSize, alloc::vec::Vec, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + (_, v, _): (TextSize, alloc::vec::Vec, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { { let mut v = v; v.push(e); v } } @@ -41731,11 +41761,11 @@ fn __action583< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, left, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, left, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, right, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBinOp { left: Box::new(left.into()), @@ -41751,8 +41781,8 @@ fn __action584< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41765,9 +41795,9 @@ fn __action585< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprAwait { value: Box::new(value.into()), range: (location..end_location).into() }.into() @@ -41780,8 +41810,8 @@ fn __action586< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41792,8 +41822,8 @@ fn __action587< >( source_code: &str, mode: Mode, - (_, __0, _): (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + (_, __0, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { __0 } @@ -41805,10 +41835,10 @@ fn __action588< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, func, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, func, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, arguments, _): (TextSize, ast::Arguments, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprCall { func: Box::new(func.into()), @@ -41824,12 +41854,12 @@ fn __action589< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, slice, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, slice, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprSubscript { value: Box::new(value.into()), @@ -41846,11 +41876,11 @@ fn __action590< source_code: &str, mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), - (_, value, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, value, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, attr, _): (TextSize, ast::Identifier, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprAttribute { value: Box::new(value.into()), @@ -41867,7 +41897,7 @@ fn __action591< source_code: &str, mode: Mode, (_, expr, _): (TextSize, ast::Expr, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { expr.into() } @@ -41881,7 +41911,7 @@ fn __action592< (_, location, _): (TextSize, TextSize, TextSize), (_, value, _): (TextSize, ast::Number, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprNumberLiteral { value, @@ -41898,7 +41928,7 @@ fn __action593< (_, location, _): (TextSize, TextSize, TextSize), (_, id, _): (TextSize, ast::Identifier, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprName { id: id.into(), @@ -41915,10 +41945,10 @@ fn __action594< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elts, _): (TextSize, core::option::Option>, TextSize), + (_, elts, _): (TextSize, core::option::Option>, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let elts = elts.into_iter().flatten().map(ast::Expr::from).collect(); @@ -41934,11 +41964,11 @@ fn __action595< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elt, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, elt, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprListComp { elt: Box::new(elt.into()), generators, range: (location..end_location).into() }.into() @@ -41953,13 +41983,13 @@ fn __action596< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, left, _): (TextSize, core::option::Option>, TextSize), - (_, mid, _): (TextSize, ast::ParenthesizedExpr, TextSize), - (_, right, _): (TextSize, alloc::vec::Vec, TextSize), + (_, left, _): (TextSize, core::option::Option>, TextSize), + (_, mid, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), + (_, right, _): (TextSize, alloc::vec::Vec, TextSize), (_, trailing_comma, _): (TextSize, core::option::Option, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { if left.is_none() && right.is_empty() && trailing_comma.is_none() { @@ -41969,7 +41999,7 @@ fn __action596< location: mid.start(), })?; } - Ok(ast::ParenthesizedExpr { + Ok(crate::parser::ParenthesizedExpr { expr: mid.into(), range: (location..end_location).into(), }) @@ -41990,7 +42020,7 @@ fn __action597< (_, _, _): (TextSize, token::Tok, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprTuple { elts: Vec::new(), @@ -42007,12 +42037,12 @@ fn __action598< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { - ast::ParenthesizedExpr { + crate::parser::ParenthesizedExpr { expr: e.into(), range: (location..end_location).into(), } @@ -42026,11 +42056,11 @@ fn __action599< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elt, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, elt, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprGeneratorExp { elt: Box::new(elt.into()), @@ -42048,10 +42078,10 @@ fn __action600< (_, _, _): (TextSize, token::Tok, TextSize), (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, e, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { { Err(LexicalError{ @@ -42069,10 +42099,10 @@ fn __action601< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e, _): (TextSize, core::option::Option>, ast::ParenthesizedExpr)>>, TextSize), + (_, e, _): (TextSize, core::option::Option>, crate::parser::ParenthesizedExpr)>>, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let (keys, values) = e @@ -42092,11 +42122,11 @@ fn __action602< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, e1, _): (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize), + (_, e1, _): (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { ast::ExprDictComp { @@ -42116,10 +42146,10 @@ fn __action603< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elts, _): (TextSize, Vec, TextSize), + (_, elts, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { { let elts = elts.into_iter().map(ast::Expr::from).collect(); @@ -42138,11 +42168,11 @@ fn __action604< mode: Mode, (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), - (_, elt, _): (TextSize, ast::ParenthesizedExpr, TextSize), + (_, elt, _): (TextSize, crate::parser::ParenthesizedExpr, TextSize), (_, generators, _): (TextSize, Vec, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprSetComp { elt: Box::new(elt.into()), @@ -42160,7 +42190,7 @@ fn __action605< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBooleanLiteral { value: true, range: (location..end_location).into() }.into() } @@ -42174,7 +42204,7 @@ fn __action606< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprBooleanLiteral { value: false, range: (location..end_location).into() }.into() } @@ -42188,7 +42218,7 @@ fn __action607< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprNoneLiteral { range: (location..end_location).into() }.into() } @@ -42202,7 +42232,7 @@ fn __action608< (_, location, _): (TextSize, TextSize, TextSize), (_, _, _): (TextSize, token::Tok, TextSize), (_, end_location, _): (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { ast::ExprEllipsisLiteral { range: (location..end_location).into() }.into() } @@ -42215,11 +42245,11 @@ fn __action609< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, Vec, TextSize), + __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.0; let __end0 = __3.2; @@ -42249,10 +42279,10 @@ fn __action610< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, Vec, TextSize), + __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __3.0; @@ -42283,13 +42313,13 @@ fn __action611< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option>, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __2: (TextSize, core::option::Option>, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __5.0; let __end0 = __5.2; @@ -42321,12 +42351,12 @@ fn __action612< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option>, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __2: (TextSize, core::option::Option>, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.2; let __end0 = __5.0; @@ -42359,13 +42389,13 @@ fn __action613< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option>, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __2: (TextSize, core::option::Option>, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __5.0; let __end0 = __5.2; @@ -42397,12 +42427,12 @@ fn __action614< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option>, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __2: (TextSize, core::option::Option>, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.2; let __end0 = __5.0; @@ -42433,9 +42463,9 @@ fn __action615< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize), + __0: (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> Vec<(Option>, ast::ParenthesizedExpr)> +) -> Vec<(Option>, crate::parser::ParenthesizedExpr)> { let __start0 = __1.0; let __end0 = __1.2; @@ -42459,8 +42489,8 @@ fn __action616< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize), -) -> Vec<(Option>, ast::ParenthesizedExpr)> + __0: (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize), +) -> Vec<(Option>, crate::parser::ParenthesizedExpr)> { let __start0 = __0.2; let __end0 = __0.2; @@ -42485,9 +42515,9 @@ fn __action617< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> Vec +) -> Vec { let __start0 = __1.0; let __end0 = __1.2; @@ -42511,8 +42541,8 @@ fn __action618< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), -) -> Vec + __0: (TextSize, Vec, TextSize), +) -> Vec { let __start0 = __0.2; let __end0 = __0.2; @@ -42538,10 +42568,10 @@ fn __action619< source_code: &str, mode: Mode, __0: (TextSize, TextSize, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.0; let __end0 = __2.2; @@ -42568,9 +42598,9 @@ fn __action620< source_code: &str, mode: Mode, __0: (TextSize, TextSize, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __2.0; @@ -42598,10 +42628,10 @@ fn __action621< source_code: &str, mode: Mode, __0: (TextSize, TextSize, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.0; let __end0 = __2.2; @@ -42628,9 +42658,9 @@ fn __action622< source_code: &str, mode: Mode, __0: (TextSize, TextSize, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __2.0; @@ -42725,9 +42755,9 @@ fn __action625< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> Vec +) -> Vec { let __start0 = __1.0; let __end0 = __1.2; @@ -42751,8 +42781,8 @@ fn __action626< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), -) -> Vec + __0: (TextSize, Vec, TextSize), +) -> Vec { let __start0 = __0.2; let __end0 = __0.2; @@ -43000,7 +43030,7 @@ fn __action633< __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), - __3: (TextSize, Vec, TextSize), + __3: (TextSize, Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), __6: (TextSize, token::Tok, TextSize), @@ -43044,7 +43074,7 @@ fn __action634< __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), - __3: (TextSize, Vec, TextSize), + __3: (TextSize, Vec, TextSize), __4: (TextSize, TextSize, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), @@ -43925,9 +43955,9 @@ fn __action661< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> Vec +) -> Vec { let __start0 = __1.0; let __end0 = __1.2; @@ -43951,8 +43981,8 @@ fn __action662< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), -) -> Vec + __0: (TextSize, Vec, TextSize), +) -> Vec { let __start0 = __0.2; let __end0 = __0.2; @@ -43978,10 +44008,10 @@ fn __action663< source_code: &str, mode: Mode, __0: (TextSize, TextSize, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.0; let __end0 = __2.2; @@ -44008,9 +44038,9 @@ fn __action664< source_code: &str, mode: Mode, __0: (TextSize, TextSize, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __2.0; @@ -44483,13 +44513,13 @@ fn __action679< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, core::option::Option<(TextSize, ast::ConversionFlag)>, TextSize), - __5: (TextSize, core::option::Option, TextSize), + __5: (TextSize, core::option::Option, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -44521,12 +44551,12 @@ fn __action680< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, core::option::Option<(TextSize, ast::ConversionFlag)>, TextSize), - __4: (TextSize, core::option::Option, TextSize), + __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -44560,9 +44590,9 @@ fn __action681< __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), - __5: (TextSize, ast::ParenthesizedExpr, TextSize), + __5: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, ast::Suite, TextSize), __8: (TextSize, core::option::Option, TextSize), @@ -44599,9 +44629,9 @@ fn __action682< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, ast::Suite, TextSize), __7: (TextSize, core::option::Option, TextSize), @@ -44644,7 +44674,7 @@ fn __action683< __4: (TextSize, ast::Identifier, TextSize), __5: (TextSize, core::option::Option, TextSize), __6: (TextSize, ast::Parameters, TextSize), - __7: (TextSize, core::option::Option, TextSize), + __7: (TextSize, core::option::Option, TextSize), __8: (TextSize, token::Tok, TextSize), __9: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -44685,7 +44715,7 @@ fn __action684< __3: (TextSize, ast::Identifier, TextSize), __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, ast::Parameters, TextSize), - __6: (TextSize, core::option::Option, TextSize), + __6: (TextSize, core::option::Option, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -44724,10 +44754,10 @@ fn __action685< __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), - __5: (TextSize, ast::ParenthesizedExpr, TextSize), - __6: (TextSize, alloc::vec::Vec, TextSize), + __5: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __6: (TextSize, alloc::vec::Vec, TextSize), __7: (TextSize, TextSize, TextSize), ) -> ast::Comprehension { @@ -44761,10 +44791,10 @@ fn __action686< mode: Mode, __0: (TextSize, TextSize, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), - __5: (TextSize, alloc::vec::Vec, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __5: (TextSize, alloc::vec::Vec, TextSize), __6: (TextSize, TextSize, TextSize), ) -> ast::Comprehension { @@ -45914,10 +45944,10 @@ fn __action723< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), -) -> (TextSize, ast::ParenthesizedExpr, ast::Suite) +) -> (TextSize, crate::parser::ParenthesizedExpr, ast::Suite) { let __start0 = __0.0; let __end0 = __0.0; @@ -45975,11 +46005,11 @@ fn __action725< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46007,11 +46037,11 @@ fn __action726< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46039,11 +46069,11 @@ fn __action727< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46071,10 +46101,10 @@ fn __action728< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46101,10 +46131,10 @@ fn __action729< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46163,11 +46193,11 @@ fn __action731< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46195,11 +46225,11 @@ fn __action732< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46260,8 +46290,8 @@ fn __action734< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, core::option::Option, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, core::option::Option, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -46293,7 +46323,7 @@ fn __action735< mode: Mode, __0: (TextSize, ast::Number, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46321,7 +46351,7 @@ fn __action736< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46348,10 +46378,10 @@ fn __action737< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), + __1: (TextSize, core::option::Option>, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46380,11 +46410,11 @@ fn __action738< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46414,11 +46444,11 @@ fn __action739< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46448,10 +46478,10 @@ fn __action740< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46480,13 +46510,13 @@ fn __action741< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -46518,12 +46548,12 @@ fn __action742< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -46556,7 +46586,7 @@ fn __action743< __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46584,10 +46614,10 @@ fn __action744< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46616,11 +46646,11 @@ fn __action745< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46651,10 +46681,10 @@ fn __action746< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -46684,10 +46714,10 @@ fn __action747< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, ast::ParenthesizedExpr)>>, TextSize), + __1: (TextSize, core::option::Option>, crate::parser::ParenthesizedExpr)>>, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46716,11 +46746,11 @@ fn __action748< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize), + __1: (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46750,10 +46780,10 @@ fn __action749< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46782,11 +46812,11 @@ fn __action750< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46817,7 +46847,7 @@ fn __action751< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46845,7 +46875,7 @@ fn __action752< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46873,7 +46903,7 @@ fn __action753< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46901,7 +46931,7 @@ fn __action754< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46929,7 +46959,7 @@ fn __action755< mode: Mode, __0: (TextSize, ast::Number, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46957,7 +46987,7 @@ fn __action756< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -46984,10 +47014,10 @@ fn __action757< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), + __1: (TextSize, core::option::Option>, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47016,11 +47046,11 @@ fn __action758< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47050,13 +47080,13 @@ fn __action759< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -47088,12 +47118,12 @@ fn __action760< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -47126,7 +47156,7 @@ fn __action761< __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47154,10 +47184,10 @@ fn __action762< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47186,11 +47216,11 @@ fn __action763< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47221,10 +47251,10 @@ fn __action764< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -47254,10 +47284,10 @@ fn __action765< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, ast::ParenthesizedExpr)>>, TextSize), + __1: (TextSize, core::option::Option>, crate::parser::ParenthesizedExpr)>>, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47286,11 +47316,11 @@ fn __action766< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize), + __1: (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47320,10 +47350,10 @@ fn __action767< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47352,11 +47382,11 @@ fn __action768< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47387,7 +47417,7 @@ fn __action769< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47415,7 +47445,7 @@ fn __action770< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47443,7 +47473,7 @@ fn __action771< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47471,7 +47501,7 @@ fn __action772< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47497,10 +47527,10 @@ fn __action773< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Arguments, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47527,12 +47557,12 @@ fn __action774< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47561,11 +47591,11 @@ fn __action775< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, ast::Identifier, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47593,10 +47623,10 @@ fn __action776< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Arguments, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47623,12 +47653,12 @@ fn __action777< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47657,11 +47687,11 @@ fn __action778< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, ast::Identifier, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47690,9 +47720,9 @@ fn __action779< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47720,9 +47750,9 @@ fn __action780< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47875,10 +47905,10 @@ fn __action785< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47905,10 +47935,10 @@ fn __action786< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -47936,7 +47966,7 @@ fn __action787< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), __3: (TextSize, token::Tok, TextSize), ) -> ast::Decorator @@ -47968,7 +47998,7 @@ fn __action788< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -48056,7 +48086,7 @@ fn __action791< source_code: &str, mode: Mode, __0: (TextSize, ast::Identifier, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::Parameter { @@ -48086,7 +48116,7 @@ fn __action792< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -48118,7 +48148,7 @@ fn __action793< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, (ast::ParenthesizedExpr, ast::Identifier), TextSize), + __1: (TextSize, (crate::parser::ParenthesizedExpr, ast::Identifier), TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -48151,7 +48181,7 @@ fn __action794< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -48185,7 +48215,7 @@ fn __action795< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, (ast::ParenthesizedExpr, ast::Identifier), TextSize), + __2: (TextSize, (crate::parser::ParenthesizedExpr, ast::Identifier), TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -48217,11 +48247,11 @@ fn __action796< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -48249,11 +48279,11 @@ fn __action797< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -48281,8 +48311,8 @@ fn __action798< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec, TextSize), __2: (TextSize, TextSize, TextSize), ) -> Result> { @@ -48311,9 +48341,9 @@ fn __action799< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), ) -> Result> { @@ -48343,10 +48373,10 @@ fn __action800< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, core::option::Option, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, TextSize, TextSize), ) -> Result> { @@ -48416,7 +48446,7 @@ fn __action802< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, alloc::vec::Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), ) -> StringType @@ -48447,9 +48477,9 @@ fn __action803< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::Expr +) -> ast::FStringFormatSpec { let __start0 = __0.0; let __end0 = __0.0; @@ -48477,7 +48507,7 @@ fn __action804< mode: Mode, __0: (TextSize, (String, bool), TextSize), __1: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -48504,13 +48534,13 @@ fn __action805< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, core::option::Option<(TextSize, ast::ConversionFlag)>, TextSize), - __4: (TextSize, core::option::Option, TextSize), + __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -48542,12 +48572,12 @@ fn __action806< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, core::option::Option<(TextSize, ast::ConversionFlag)>, TextSize), - __3: (TextSize, core::option::Option, TextSize), + __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -48578,9 +48608,9 @@ fn __action807< source_code: &str, mode: Mode, __0: (TextSize, ast::UnaryOp, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -48608,9 +48638,9 @@ fn __action808< source_code: &str, mode: Mode, __0: (TextSize, ast::UnaryOp, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -48694,7 +48724,7 @@ fn __action811< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -48723,7 +48753,7 @@ fn __action812< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -48753,9 +48783,9 @@ fn __action813< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, ast::Suite, TextSize), __7: (TextSize, core::option::Option, TextSize), @@ -48792,9 +48822,9 @@ fn __action814< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, ast::Suite, TextSize), __6: (TextSize, core::option::Option, TextSize), @@ -48835,7 +48865,7 @@ fn __action815< __3: (TextSize, ast::Identifier, TextSize), __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, ast::Parameters, TextSize), - __6: (TextSize, core::option::Option, TextSize), + __6: (TextSize, core::option::Option, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -48876,7 +48906,7 @@ fn __action816< __2: (TextSize, ast::Identifier, TextSize), __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, ast::Parameters, TextSize), - __5: (TextSize, core::option::Option, TextSize), + __5: (TextSize, core::option::Option, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -48911,7 +48941,7 @@ fn __action817< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, core::option::Option>, TextSize), __2: (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) @@ -48943,7 +48973,7 @@ fn __action818< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -48974,7 +49004,7 @@ fn __action819< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -49004,7 +49034,7 @@ fn __action820< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -49033,10 +49063,10 @@ fn __action821< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -49063,9 +49093,9 @@ fn __action822< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -49091,10 +49121,10 @@ fn __action823< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -49121,9 +49151,9 @@ fn __action824< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -49208,10 +49238,10 @@ fn __action827< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), - __4: (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), + __4: (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), __5: (TextSize, core::option::Option<(TextSize, ast::Suite)>, TextSize), ) -> ast::Stmt { @@ -49491,7 +49521,7 @@ fn __action836< mode: Mode, __0: (TextSize, (IpyEscapeKind, String), TextSize), __1: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -49545,7 +49575,7 @@ fn __action838< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, alloc::vec::Vec, TextSize), __2: (TextSize, TextSize, TextSize), ) -> Result> @@ -49580,9 +49610,9 @@ fn __action839< __2: (TextSize, TextSize, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, core::option::Option<(String, bool)>, TextSize), - __5: (TextSize, ast::ParenthesizedExpr, TextSize), + __5: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.0; let __end0 = __0.0; @@ -49707,7 +49737,7 @@ fn __action843< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, TextSize, TextSize), ) -> ast::Pattern { @@ -49735,7 +49765,7 @@ fn __action844< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, TextSize, TextSize), ) -> ast::Pattern { @@ -50306,7 +50336,7 @@ fn __action862< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -50344,7 +50374,7 @@ fn __action863< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -50396,7 +50426,7 @@ fn __action864< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -50448,7 +50478,7 @@ fn __action865< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, TextSize, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -50497,11 +50527,11 @@ fn __action866< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -50531,7 +50561,7 @@ fn __action867< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -50588,9 +50618,9 @@ fn __action869< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -50618,9 +50648,9 @@ fn __action870< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -50649,7 +50679,7 @@ fn __action871< mode: Mode, __0: (TextSize, ast::Number, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -50676,9 +50706,9 @@ fn __action872< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -50733,10 +50763,10 @@ fn __action874< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -50763,10 +50793,10 @@ fn __action875< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52029,11 +52059,11 @@ fn __action916< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52061,11 +52091,11 @@ fn __action917< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52122,8 +52152,8 @@ fn __action919< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, core::option::Option, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, core::option::Option, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -52351,11 +52381,11 @@ fn __action926< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52383,11 +52413,11 @@ fn __action927< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52417,10 +52447,10 @@ fn __action928< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), - __5: (TextSize, alloc::vec::Vec, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __5: (TextSize, alloc::vec::Vec, TextSize), __6: (TextSize, TextSize, TextSize), ) -> ast::Comprehension { @@ -52454,10 +52484,10 @@ fn __action929< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, TextSize, TextSize), ) -> ast::Comprehension { @@ -52490,8 +52520,8 @@ fn __action930< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option, TextSize), -) -> Option + __1: (TextSize, core::option::Option, TextSize), +) -> Option { let __start0 = __0.0; let __end0 = __0.0; @@ -52518,9 +52548,9 @@ fn __action931< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52578,7 +52608,7 @@ fn __action933< source_code: &str, mode: Mode, __0: (TextSize, ast::Identifier, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::Parameter { @@ -52717,12 +52747,12 @@ fn __action938< >( source_code: &str, mode: Mode, - __0: (TextSize, core::option::Option, TextSize), + __0: (TextSize, core::option::Option, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option, TextSize), - __3: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, core::option::Option, TextSize), + __3: (TextSize, core::option::Option>, TextSize), __4: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52751,10 +52781,10 @@ fn __action939< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52781,10 +52811,10 @@ fn __action940< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52811,9 +52841,9 @@ fn __action941< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52839,11 +52869,11 @@ fn __action942< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52871,11 +52901,11 @@ fn __action943< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52903,13 +52933,13 @@ fn __action944< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -52939,13 +52969,13 @@ fn __action945< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -53006,7 +53036,7 @@ fn __action947< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::Mod @@ -53177,7 +53207,7 @@ fn __action952< __1: (TextSize, ast::Expr, TextSize), __2: (TextSize, core::option::Option, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -53210,7 +53240,7 @@ fn __action953< source_code: &str, mode: Mode, __0: (TextSize, ast::Identifier, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::TypeParam { @@ -53366,7 +53396,7 @@ fn __action958< source_code: &str, mode: Mode, __0: (TextSize, ast::Identifier, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::ParameterWithDefault { @@ -53452,7 +53482,7 @@ fn __action961< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), __4: (TextSize, core::option::Option, TextSize), @@ -53485,9 +53515,9 @@ fn __action962< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::WithItem { @@ -53583,11 +53613,11 @@ fn __action965< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -53615,11 +53645,11 @@ fn __action966< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -53648,9 +53678,9 @@ fn __action967< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -53679,9 +53709,9 @@ fn __action968< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -57182,8 +57212,8 @@ fn __action1079< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { let __start0 = __0.0; let __end0 = __1.2; @@ -57208,9 +57238,9 @@ fn __action1080< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -57240,7 +57270,7 @@ fn __action1081< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -57270,8 +57300,8 @@ fn __action1082< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { let __start0 = __0.0; let __end0 = __1.2; @@ -57295,10 +57325,10 @@ fn __action1083< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec { let __start0 = __1.0; let __end0 = __2.2; @@ -57324,12 +57354,12 @@ fn __action1084< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -57360,13 +57390,13 @@ fn __action1085< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -57396,11 +57426,11 @@ fn __action1086< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -57430,12 +57460,12 @@ fn __action1087< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -57464,12 +57494,12 @@ fn __action1088< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -57500,13 +57530,13 @@ fn __action1089< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -57536,11 +57566,11 @@ fn __action1090< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -57570,12 +57600,12 @@ fn __action1091< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, core::option::Option>, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, alloc::vec::Vec, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -57790,8 +57820,8 @@ fn __action1098< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { let __start0 = __0.0; let __end0 = __1.2; @@ -57822,7 +57852,7 @@ fn __action1099< __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, ast::Parameters, TextSize), __6: (TextSize, token::Tok, TextSize), - __7: (TextSize, ast::ParenthesizedExpr, TextSize), + __7: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __8: (TextSize, token::Tok, TextSize), __9: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -57903,7 +57933,7 @@ fn __action1101< __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, ast::Parameters, TextSize), __5: (TextSize, token::Tok, TextSize), - __6: (TextSize, ast::ParenthesizedExpr, TextSize), + __6: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -58030,8 +58060,8 @@ fn __action1105< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { let __start0 = __0.0; let __end0 = __1.2; @@ -58057,7 +58087,7 @@ fn __action1106< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::Parameter { @@ -58115,7 +58145,7 @@ fn __action1108< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::TypeParam { @@ -58173,7 +58203,7 @@ fn __action1110< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::ParameterWithDefault { @@ -58230,8 +58260,8 @@ fn __action1112< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { let __start0 = __0.0; let __end0 = __1.2; @@ -58257,7 +58287,7 @@ fn __action1113< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::Parameter { @@ -58414,7 +58444,7 @@ fn __action1119< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::Mod { @@ -58444,7 +58474,7 @@ fn __action1120< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, TextSize, TextSize), ) -> ast::Mod @@ -58645,9 +58675,9 @@ fn __action1127< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, ast::Suite, TextSize), __7: (TextSize, token::Tok, TextSize), @@ -58687,9 +58717,9 @@ fn __action1128< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -58724,9 +58754,9 @@ fn __action1129< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, ast::Suite, TextSize), __6: (TextSize, token::Tok, TextSize), @@ -58764,9 +58794,9 @@ fn __action1130< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -58952,7 +58982,7 @@ fn __action1135< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -58988,7 +59018,7 @@ fn __action1136< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -59394,8 +59424,8 @@ fn __action1147< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { let __start0 = __0.0; let __end0 = __1.2; @@ -59420,9 +59450,9 @@ fn __action1148< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -59452,7 +59482,7 @@ fn __action1149< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, TextSize, TextSize), ) -> ast::Stmt { @@ -59482,10 +59512,10 @@ fn __action1150< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), -) -> alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)> +) -> alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)> { let __start0 = __0.0; let __end0 = __3.2; @@ -59511,12 +59541,12 @@ fn __action1151< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), + __0: (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, ast::Suite, TextSize), -) -> alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)> +) -> alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)> { let __start0 = __1.0; let __end0 = __4.2; @@ -59544,7 +59574,7 @@ fn __action1152< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), __4: (TextSize, core::option::Option<(TextSize, ast::Suite)>, TextSize), @@ -59578,10 +59608,10 @@ fn __action1153< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), - __4: (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), + __4: (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), __5: (TextSize, core::option::Option<(TextSize, ast::Suite)>, TextSize), ) -> ast::Stmt { @@ -59640,7 +59670,7 @@ fn __action1155< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -59676,7 +59706,7 @@ fn __action1156< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -59708,10 +59738,10 @@ fn __action1157< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), - __4: (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), + __4: (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, ast::Suite, TextSize), @@ -59746,10 +59776,10 @@ fn __action1158< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), - __4: (TextSize, alloc::vec::Vec<(TextSize, ast::ParenthesizedExpr, ast::Suite)>, TextSize), + __4: (TextSize, alloc::vec::Vec<(TextSize, crate::parser::ParenthesizedExpr, ast::Suite)>, TextSize), ) -> ast::Stmt { let __start0 = __4.2; @@ -59779,9 +59809,9 @@ fn __action1159< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> alloc::vec::Vec +) -> alloc::vec::Vec { let __start0 = __0.0; let __end0 = __1.2; @@ -59805,10 +59835,10 @@ fn __action1160< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> alloc::vec::Vec +) -> alloc::vec::Vec { let __start0 = __1.0; let __end0 = __2.2; @@ -59939,9 +59969,9 @@ fn __action1165< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> alloc::vec::Vec +) -> alloc::vec::Vec { let __start0 = __0.0; let __end0 = __1.2; @@ -59965,10 +59995,10 @@ fn __action1166< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> alloc::vec::Vec +) -> alloc::vec::Vec { let __start0 = __1.0; let __end0 = __2.2; @@ -59993,9 +60023,9 @@ fn __action1167< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> core::option::Option> +) -> core::option::Option> { let __start0 = __0.0; let __end0 = __1.2; @@ -60020,13 +60050,13 @@ fn __action1168< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60056,11 +60086,11 @@ fn __action1169< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -60090,14 +60120,14 @@ fn __action1170< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60128,12 +60158,12 @@ fn __action1171< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -60164,12 +60194,12 @@ fn __action1172< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60198,10 +60228,10 @@ fn __action1173< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -60230,13 +60260,13 @@ fn __action1174< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60266,11 +60296,11 @@ fn __action1175< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -60300,13 +60330,13 @@ fn __action1176< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60336,11 +60366,11 @@ fn __action1177< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -60370,14 +60400,14 @@ fn __action1178< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60408,12 +60438,12 @@ fn __action1179< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -60444,12 +60474,12 @@ fn __action1180< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60478,10 +60508,10 @@ fn __action1181< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -60510,13 +60540,13 @@ fn __action1182< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.0; let __end0 = __2.2; @@ -60546,11 +60576,11 @@ fn __action1183< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, TextSize, TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -61220,7 +61250,7 @@ fn __action1206< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Identifier, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -61255,7 +61285,7 @@ fn __action1207< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, ast::Identifier, TextSize), __5: (TextSize, token::Tok, TextSize), @@ -61289,7 +61319,7 @@ fn __action1208< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), ) -> Vec { @@ -61316,7 +61346,7 @@ fn __action1209< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), ) -> Vec @@ -61346,7 +61376,7 @@ fn __action1210< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), ) -> Vec { @@ -61373,7 +61403,7 @@ fn __action1211< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), ) -> core::option::Option> { @@ -61400,7 +61430,7 @@ fn __action1212< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::WithItem, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -61466,7 +61496,7 @@ fn __action1214< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::WithItem, TextSize), __4: (TextSize, alloc::vec::Vec, TextSize), @@ -61536,7 +61566,7 @@ fn __action1216< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::WithItem, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -61598,7 +61628,7 @@ fn __action1218< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::WithItem, TextSize), __4: (TextSize, alloc::vec::Vec, TextSize), @@ -61664,8 +61694,8 @@ fn __action1220< source_code: &str, mode: Mode, __0: (TextSize, ast::CmpOp, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)> + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)> { let __start0 = __0.0; let __end0 = __1.2; @@ -61689,10 +61719,10 @@ fn __action1221< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), + __0: (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), __1: (TextSize, ast::CmpOp, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)> + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)> { let __start0 = __1.0; let __end0 = __2.2; @@ -61889,10 +61919,10 @@ fn __action1228< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -61919,10 +61949,10 @@ fn __action1229< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -61949,10 +61979,10 @@ fn __action1230< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -61979,9 +62009,9 @@ fn __action1231< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -62007,9 +62037,9 @@ fn __action1232< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -62065,10 +62095,10 @@ fn __action1234< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -62095,10 +62125,10 @@ fn __action1235< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -62156,9 +62186,9 @@ fn __action1237< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __3.2; @@ -62188,7 +62218,7 @@ fn __action1238< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __1.2; @@ -62216,7 +62246,7 @@ fn __action1239< source_code: &str, mode: Mode, __0: (TextSize, ast::Number, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -62242,7 +62272,7 @@ fn __action1240< source_code: &str, mode: Mode, __0: (TextSize, ast::Identifier, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -62268,9 +62298,9 @@ fn __action1241< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), + __1: (TextSize, core::option::Option>, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -62298,10 +62328,10 @@ fn __action1242< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -62330,10 +62360,10 @@ fn __action1243< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -62362,9 +62392,9 @@ fn __action1244< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -62392,12 +62422,12 @@ fn __action1245< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __5.2; let __end0 = __5.2; @@ -62428,10 +62458,10 @@ fn __action1246< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.2; let __end0 = __3.2; @@ -62460,13 +62490,13 @@ fn __action1247< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __6.2; let __end0 = __6.2; @@ -62498,11 +62528,11 @@ fn __action1248< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.2; let __end0 = __4.2; @@ -62532,11 +62562,11 @@ fn __action1249< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.2; let __end0 = __4.2; @@ -62566,9 +62596,9 @@ fn __action1250< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __2.2; @@ -62596,12 +62626,12 @@ fn __action1251< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __5.2; let __end0 = __5.2; @@ -62632,10 +62662,10 @@ fn __action1252< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.2; let __end0 = __3.2; @@ -62665,7 +62695,7 @@ fn __action1253< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -62692,9 +62722,9 @@ fn __action1254< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -62722,10 +62752,10 @@ fn __action1255< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -62755,9 +62785,9 @@ fn __action1256< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.2; let __end0 = __3.2; @@ -62786,9 +62816,9 @@ fn __action1257< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, ast::ParenthesizedExpr)>>, TextSize), + __1: (TextSize, core::option::Option>, crate::parser::ParenthesizedExpr)>>, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -62816,10 +62846,10 @@ fn __action1258< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize), + __1: (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -62848,9 +62878,9 @@ fn __action1259< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -62878,10 +62908,10 @@ fn __action1260< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -62910,7 +62940,7 @@ fn __action1261< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -62936,7 +62966,7 @@ fn __action1262< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -62962,7 +62992,7 @@ fn __action1263< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -62988,7 +63018,7 @@ fn __action1264< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -63014,7 +63044,7 @@ fn __action1265< source_code: &str, mode: Mode, __0: (TextSize, ast::Number, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -63040,7 +63070,7 @@ fn __action1266< source_code: &str, mode: Mode, __0: (TextSize, ast::Identifier, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -63066,9 +63096,9 @@ fn __action1267< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, TextSize), + __1: (TextSize, core::option::Option>, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -63096,10 +63126,10 @@ fn __action1268< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -63128,12 +63158,12 @@ fn __action1269< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __5.2; let __end0 = __5.2; @@ -63164,10 +63194,10 @@ fn __action1270< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.2; let __end0 = __3.2; @@ -63196,13 +63226,13 @@ fn __action1271< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __6.2; let __end0 = __6.2; @@ -63234,11 +63264,11 @@ fn __action1272< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.2; let __end0 = __4.2; @@ -63268,11 +63298,11 @@ fn __action1273< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.2; let __end0 = __4.2; @@ -63302,9 +63332,9 @@ fn __action1274< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __2.2; @@ -63332,12 +63362,12 @@ fn __action1275< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), __5: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __5.2; let __end0 = __5.2; @@ -63368,10 +63398,10 @@ fn __action1276< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, alloc::vec::Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.2; let __end0 = __3.2; @@ -63401,7 +63431,7 @@ fn __action1277< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -63428,9 +63458,9 @@ fn __action1278< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -63458,10 +63488,10 @@ fn __action1279< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -63491,9 +63521,9 @@ fn __action1280< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.2; let __end0 = __3.2; @@ -63522,9 +63552,9 @@ fn __action1281< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option>, ast::ParenthesizedExpr)>>, TextSize), + __1: (TextSize, core::option::Option>, crate::parser::ParenthesizedExpr)>>, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -63552,10 +63582,10 @@ fn __action1282< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, (ast::ParenthesizedExpr, ast::ParenthesizedExpr), TextSize), + __1: (TextSize, (crate::parser::ParenthesizedExpr, crate::parser::ParenthesizedExpr), TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -63584,9 +63614,9 @@ fn __action1283< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -63614,10 +63644,10 @@ fn __action1284< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, Vec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -63646,7 +63676,7 @@ fn __action1285< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -63672,7 +63702,7 @@ fn __action1286< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -63698,7 +63728,7 @@ fn __action1287< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -63724,7 +63754,7 @@ fn __action1288< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -63749,9 +63779,9 @@ fn __action1289< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Arguments, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -63777,11 +63807,11 @@ fn __action1290< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -63809,10 +63839,10 @@ fn __action1291< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, ast::Identifier, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -63839,9 +63869,9 @@ fn __action1292< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Arguments, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -63867,11 +63897,11 @@ fn __action1293< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -63899,10 +63929,10 @@ fn __action1294< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, ast::Identifier, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -63930,8 +63960,8 @@ fn __action1295< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -63958,8 +63988,8 @@ fn __action1296< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -64067,9 +64097,9 @@ fn __action1300< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -64095,9 +64125,9 @@ fn __action1301< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, ast::ParenthesizedExpr)>, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec<(ast::CmpOp, crate::parser::ParenthesizedExpr)>, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -64124,7 +64154,7 @@ fn __action1302< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), ) -> ast::Decorator { @@ -64154,7 +64184,7 @@ fn __action1303< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), ) -> ast::Stmt { let __start0 = __1.2; @@ -64237,7 +64267,7 @@ fn __action1306< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Parameter { let __start0 = __2.2; @@ -64291,10 +64321,10 @@ fn __action1308< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -64321,10 +64351,10 @@ fn __action1309< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -64351,8 +64381,8 @@ fn __action1310< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec, TextSize), ) -> Result> { let __start0 = __1.2; @@ -64379,9 +64409,9 @@ fn __action1311< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> Result> { let __start0 = __2.2; @@ -64409,10 +64439,10 @@ fn __action1312< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, core::option::Option, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, core::option::Option, TextSize), ) -> Result> { let __start0 = __3.2; @@ -64442,7 +64472,7 @@ fn __action1313< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, alloc::vec::Vec, TextSize), __2: (TextSize, token::Tok, TextSize), ) -> StringType { @@ -64471,8 +64501,8 @@ fn __action1314< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), -) -> ast::Expr + __0: (TextSize, alloc::vec::Vec, TextSize), +) -> ast::FStringFormatSpec { let __start0 = __0.2; let __end0 = __0.2; @@ -64498,7 +64528,7 @@ fn __action1315< source_code: &str, mode: Mode, __0: (TextSize, (String, bool), TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __0.2; @@ -64524,12 +64554,12 @@ fn __action1316< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, core::option::Option<(TextSize, ast::ConversionFlag)>, TextSize), - __4: (TextSize, core::option::Option, TextSize), + __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __5.2; let __end0 = __5.2; @@ -64560,11 +64590,11 @@ fn __action1317< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, core::option::Option<(TextSize, ast::ConversionFlag)>, TextSize), - __3: (TextSize, core::option::Option, TextSize), + __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.2; let __end0 = __4.2; @@ -64594,8 +64624,8 @@ fn __action1318< source_code: &str, mode: Mode, __0: (TextSize, ast::UnaryOp, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -64622,8 +64652,8 @@ fn __action1319< source_code: &str, mode: Mode, __0: (TextSize, ast::UnaryOp, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -64702,7 +64732,7 @@ fn __action1322< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option, TextSize), + __1: (TextSize, core::option::Option, TextSize), ) -> ast::Stmt { let __start0 = __1.2; @@ -64729,7 +64759,7 @@ fn __action1323< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __0.2; @@ -64755,7 +64785,7 @@ fn __action1324< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, core::option::Option>, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -64785,7 +64815,7 @@ fn __action1325< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { let __start0 = __2.2; @@ -64814,7 +64844,7 @@ fn __action1326< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { let __start0 = __1.2; @@ -64842,7 +64872,7 @@ fn __action1327< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { let __start0 = __1.2; @@ -64869,9 +64899,9 @@ fn __action1328< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -64897,8 +64927,8 @@ fn __action1329< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, Vec, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -64923,9 +64953,9 @@ fn __action1330< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -64951,8 +64981,8 @@ fn __action1331< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, Vec, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -65318,7 +65348,7 @@ fn __action1344< source_code: &str, mode: Mode, __0: (TextSize, (IpyEscapeKind, String), TextSize), -) -> Result> +) -> Result> { let __start0 = __0.2; let __end0 = __0.2; @@ -65369,7 +65399,7 @@ fn __action1346< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, alloc::vec::Vec, TextSize), ) -> Result> { @@ -65401,8 +65431,8 @@ fn __action1347< __1: (TextSize, core::option::Option, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, core::option::Option<(String, bool)>, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Result> + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Result> { let __start0 = __1.2; let __end0 = __2.0; @@ -65519,7 +65549,7 @@ fn __action1351< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Pattern { let __start0 = __0.2; @@ -65545,7 +65575,7 @@ fn __action1352< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Pattern { let __start0 = __0.2; @@ -66048,7 +66078,7 @@ fn __action1369< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -66088,7 +66118,7 @@ fn __action1370< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -66128,7 +66158,7 @@ fn __action1371< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), __4: (TextSize, token::Tok, TextSize), @@ -66165,10 +66195,10 @@ fn __action1372< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -66196,7 +66226,7 @@ fn __action1373< source_code: &str, mode: Mode, __0: (TextSize, ast::Identifier, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -66250,8 +66280,8 @@ fn __action1375< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -66278,8 +66308,8 @@ fn __action1376< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -66306,7 +66336,7 @@ fn __action1377< source_code: &str, mode: Mode, __0: (TextSize, ast::Number, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -66332,8 +66362,8 @@ fn __action1378< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -66385,9 +66415,9 @@ fn __action1380< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -66413,9 +66443,9 @@ fn __action1381< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -66443,7 +66473,7 @@ fn __action1382< mode: Mode, __0: (TextSize, ast::ParameterWithDefault, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::ParameterWithDefault { let __start0 = __2.2; @@ -66473,7 +66503,7 @@ fn __action1383< mode: Mode, __0: (TextSize, ast::ParameterWithDefault, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::ParameterWithDefault { let __start0 = __2.2; @@ -69341,10 +69371,10 @@ fn __action1473< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -69371,10 +69401,10 @@ fn __action1474< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -69428,9 +69458,9 @@ fn __action1476< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __3.2; @@ -69460,7 +69490,7 @@ fn __action1477< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __1.2; @@ -69673,10 +69703,10 @@ fn __action1484< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -69703,10 +69733,10 @@ fn __action1485< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -69735,10 +69765,10 @@ fn __action1486< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), - __5: (TextSize, alloc::vec::Vec, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __5: (TextSize, alloc::vec::Vec, TextSize), ) -> ast::Comprehension { let __start0 = __5.2; @@ -69770,10 +69800,10 @@ fn __action1487< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), ) -> ast::Comprehension { let __start0 = __4.2; @@ -69804,8 +69834,8 @@ fn __action1488< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -69861,7 +69891,7 @@ fn __action1490< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Parameter { let __start0 = __2.2; @@ -69993,11 +70023,11 @@ fn __action1495< >( source_code: &str, mode: Mode, - __0: (TextSize, core::option::Option, TextSize), + __0: (TextSize, core::option::Option, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option, TextSize), - __3: (TextSize, core::option::Option>, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, core::option::Option, TextSize), + __3: (TextSize, core::option::Option>, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.2; let __end0 = __3.2; @@ -70025,9 +70055,9 @@ fn __action1496< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -70053,9 +70083,9 @@ fn __action1497< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), + __0: (TextSize, Vec, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -70081,8 +70111,8 @@ fn __action1498< >( source_code: &str, mode: Mode, - __0: (TextSize, Vec, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, Vec, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -70107,10 +70137,10 @@ fn __action1499< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -70137,10 +70167,10 @@ fn __action1500< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -70167,12 +70197,12 @@ fn __action1501< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __4.2; let __end0 = __4.2; @@ -70201,12 +70231,12 @@ fn __action1502< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __4.2; let __end0 = __4.2; @@ -70264,7 +70294,7 @@ fn __action1504< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Mod { let __start0 = __1.2; @@ -70292,7 +70322,7 @@ fn __action1505< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, alloc::vec::Vec, TextSize), ) -> ast::Mod { @@ -70655,7 +70685,7 @@ fn __action1515< __1: (TextSize, ast::Expr, TextSize), __2: (TextSize, core::option::Option, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __4.2; @@ -70687,7 +70717,7 @@ fn __action1516< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::TypeParam { let __start0 = __2.2; @@ -70861,7 +70891,7 @@ fn __action1522< mode: Mode, __0: (TextSize, ast::Identifier, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::ParameterWithDefault { let __start0 = __2.2; @@ -70967,9 +70997,9 @@ fn __action1526< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::WithItem { let __start0 = __2.2; @@ -70997,10 +71027,10 @@ fn __action1527< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -71027,10 +71057,10 @@ fn __action1528< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -71058,8 +71088,8 @@ fn __action1529< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, core::option::Option, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, core::option::Option, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.2; let __end0 = __1.2; @@ -71087,8 +71117,8 @@ fn __action1530< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -71187,7 +71217,7 @@ fn __action1533< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> Result> { let __start0 = __0.2; @@ -71213,8 +71243,8 @@ fn __action1534< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec, TextSize), ) -> Result> { let __start0 = __1.0; @@ -71239,10 +71269,10 @@ fn __action1535< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> Result> { let __start0 = __3.0; @@ -71269,9 +71299,9 @@ fn __action1536< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> Result> { let __start0 = __2.2; @@ -71731,7 +71761,7 @@ fn __action1553< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, Vec, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { @@ -71757,7 +71787,7 @@ fn __action1554< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> (Option<(TextSize, TextSize, Option)>, ast::Expr) { let __start0 = __0.2; @@ -71785,9 +71815,9 @@ fn __action1555< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Comprehension { let __start0 = __4.2; @@ -71819,10 +71849,10 @@ fn __action1556< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), - __5: (TextSize, alloc::vec::Vec, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __5: (TextSize, alloc::vec::Vec, TextSize), ) -> ast::Comprehension { let __start0 = __5.0; @@ -71852,9 +71882,9 @@ fn __action1557< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Comprehension { let __start0 = __3.2; @@ -71884,10 +71914,10 @@ fn __action1558< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), - __4: (TextSize, alloc::vec::Vec, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __4: (TextSize, alloc::vec::Vec, TextSize), ) -> ast::Comprehension { let __start0 = __4.0; @@ -72061,7 +72091,7 @@ fn __action1563< __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, ast::Parameters, TextSize), __5: (TextSize, token::Tok, TextSize), - __6: (TextSize, ast::ParenthesizedExpr, TextSize), + __6: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -72104,7 +72134,7 @@ fn __action1564< __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, ast::Parameters, TextSize), __6: (TextSize, token::Tok, TextSize), - __7: (TextSize, ast::ParenthesizedExpr, TextSize), + __7: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __8: (TextSize, token::Tok, TextSize), __9: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -72220,7 +72250,7 @@ fn __action1567< __2: (TextSize, core::option::Option, TextSize), __3: (TextSize, ast::Parameters, TextSize), __4: (TextSize, token::Tok, TextSize), - __5: (TextSize, ast::ParenthesizedExpr, TextSize), + __5: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -72261,7 +72291,7 @@ fn __action1568< __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, ast::Parameters, TextSize), __5: (TextSize, token::Tok, TextSize), - __6: (TextSize, ast::ParenthesizedExpr, TextSize), + __6: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -72368,9 +72398,9 @@ fn __action1571< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize), + __1: (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.0; let __end0 = __1.2; @@ -72397,7 +72427,7 @@ fn __action1572< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __1.0; @@ -72424,9 +72454,9 @@ fn __action1573< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec<(Option>, ast::ParenthesizedExpr)>, TextSize), + __1: (TextSize, Vec<(Option>, crate::parser::ParenthesizedExpr)>, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.0; let __end0 = __1.2; @@ -72453,7 +72483,7 @@ fn __action1574< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __1.0; @@ -72532,12 +72562,12 @@ fn __action1577< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, (TextSize, ast::ConversionFlag), TextSize), - __4: (TextSize, core::option::Option, TextSize), + __4: (TextSize, core::option::Option, TextSize), __5: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -72566,11 +72596,11 @@ fn __action1578< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, core::option::Option, TextSize), + __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -72600,11 +72630,11 @@ fn __action1579< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, (TextSize, ast::ConversionFlag), TextSize), - __3: (TextSize, core::option::Option, TextSize), + __3: (TextSize, core::option::Option, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.0; let __end0 = __2.2; @@ -72632,10 +72662,10 @@ fn __action1580< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, core::option::Option, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, core::option::Option, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.2; let __end0 = __2.0; @@ -72664,12 +72694,12 @@ fn __action1581< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, (TextSize, ast::ConversionFlag), TextSize), - __4: (TextSize, ast::Expr, TextSize), + __4: (TextSize, ast::FStringFormatSpec, TextSize), __5: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __4.0; let __end0 = __4.2; @@ -72698,11 +72728,11 @@ fn __action1582< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, (TextSize, ast::ConversionFlag), TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.2; let __end0 = __4.0; @@ -72732,11 +72762,11 @@ fn __action1583< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::Expr, TextSize), + __3: (TextSize, ast::FStringFormatSpec, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -72764,10 +72794,10 @@ fn __action1584< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -72796,11 +72826,11 @@ fn __action1585< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, (TextSize, ast::ConversionFlag), TextSize), - __3: (TextSize, ast::Expr, TextSize), + __3: (TextSize, ast::FStringFormatSpec, TextSize), __4: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -72828,10 +72858,10 @@ fn __action1586< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, (TextSize, ast::ConversionFlag), TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -72860,10 +72890,10 @@ fn __action1587< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, ast::Expr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, ast::FStringFormatSpec, TextSize), __3: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __2.0; let __end0 = __2.2; @@ -72890,9 +72920,9 @@ fn __action1588< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> Result> +) -> Result> { let __start0 = __1.2; let __end0 = __2.0; @@ -72948,7 +72978,7 @@ fn __action1590< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, alloc::vec::Vec, TextSize), + __1: (TextSize, alloc::vec::Vec, TextSize), __2: (TextSize, token::Tok, TextSize), ) -> StringType { @@ -72977,7 +73007,7 @@ fn __action1591< mode: Mode, __lookbehind: &TextSize, __lookahead: &TextSize, -) -> ast::Expr +) -> ast::FStringFormatSpec { let __start0 = *__lookbehind; let __end0 = *__lookahead; @@ -73001,8 +73031,8 @@ fn __action1592< >( source_code: &str, mode: Mode, - __0: (TextSize, alloc::vec::Vec, TextSize), -) -> ast::Expr + __0: (TextSize, alloc::vec::Vec, TextSize), +) -> ast::FStringFormatSpec { let __start0 = __0.0; let __end0 = __0.2; @@ -73302,9 +73332,9 @@ fn __action1603< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.0; let __end0 = __1.2; @@ -73331,7 +73361,7 @@ fn __action1604< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __1.0; @@ -73358,9 +73388,9 @@ fn __action1605< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Vec, TextSize), + __1: (TextSize, Vec, TextSize), __2: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.0; let __end0 = __1.2; @@ -73387,7 +73417,7 @@ fn __action1606< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __1.0; @@ -77633,8 +77663,8 @@ fn __action1727< __1: (TextSize, ast::Parameters, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, core::option::Option<(String, bool)>, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Result> + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Result> { let __start0 = __1.0; let __end0 = __1.2; @@ -77664,8 +77694,8 @@ fn __action1728< __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, core::option::Option<(String, bool)>, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Result> + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Result> { let __start0 = __0.2; let __end0 = __1.0; @@ -77693,11 +77723,11 @@ fn __action1729< >( source_code: &str, mode: Mode, - __0: (TextSize, core::option::Option, TextSize), + __0: (TextSize, core::option::Option, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option, TextSize), - __3: (TextSize, Option, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, core::option::Option, TextSize), + __3: (TextSize, Option, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __3.0; let __end0 = __3.2; @@ -77723,10 +77753,10 @@ fn __action1730< >( source_code: &str, mode: Mode, - __0: (TextSize, core::option::Option, TextSize), + __0: (TextSize, core::option::Option, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, core::option::Option, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, core::option::Option, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __2.2; let __end0 = __2.2; @@ -77754,7 +77784,7 @@ fn __action1731< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, ast::Suite, TextSize), ) -> ast::ExceptHandler @@ -77814,8 +77844,8 @@ fn __action1733< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Option + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Option { let __start0 = __1.0; let __end0 = __1.2; @@ -77840,7 +77870,7 @@ fn __action1734< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> Option +) -> Option { let __start0 = __0.2; let __end0 = __0.2; @@ -77865,11 +77895,11 @@ fn __action1735< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), - __3: (TextSize, Option, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __3: (TextSize, Option, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.2; @@ -77903,10 +77933,10 @@ fn __action1736< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, Option, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, Option, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.2; @@ -77942,9 +77972,9 @@ fn __action1737< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), - __2: (TextSize, Option, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __2: (TextSize, Option, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -77980,8 +78010,8 @@ fn __action1738< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, Option, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, Option, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -78017,10 +78047,10 @@ fn __action1739< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.2; @@ -78053,9 +78083,9 @@ fn __action1740< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.2; @@ -78090,8 +78120,8 @@ fn __action1741< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -78126,7 +78156,7 @@ fn __action1742< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.0; @@ -78163,9 +78193,9 @@ fn __action1743< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, ast::Suite, TextSize), __7: (TextSize, token::Tok, TextSize), @@ -78205,9 +78235,9 @@ fn __action1744< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -78240,9 +78270,9 @@ fn __action1745< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, ast::Suite, TextSize), __6: (TextSize, token::Tok, TextSize), @@ -78280,9 +78310,9 @@ fn __action1746< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __4: (TextSize, token::Tok, TextSize), __5: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -78313,8 +78343,8 @@ fn __action1747< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> core::option::Option + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> core::option::Option { let __start0 = __0.0; let __end0 = __0.2; @@ -78337,8 +78367,8 @@ fn __action1748< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.2; @@ -78361,8 +78391,8 @@ fn __action1749< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.0; let __end0 = __0.2; @@ -78386,7 +78416,7 @@ fn __action1750< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Mod { let __start0 = __1.0; @@ -78412,7 +78442,7 @@ fn __action1751< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __2: (TextSize, alloc::vec::Vec, TextSize), ) -> ast::Mod { @@ -78440,7 +78470,7 @@ fn __action1752< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __1.0; @@ -78492,8 +78522,8 @@ fn __action1754< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), - __1: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> ast::ParenthesizedExpr + __1: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> crate::parser::ParenthesizedExpr { let __start0 = __1.0; let __end0 = __1.2; @@ -78518,7 +78548,7 @@ fn __action1755< source_code: &str, mode: Mode, __0: (TextSize, token::Tok, TextSize), -) -> ast::ParenthesizedExpr +) -> crate::parser::ParenthesizedExpr { let __start0 = __0.2; let __end0 = __0.2; @@ -78543,7 +78573,7 @@ fn __action1756< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> Result> { let __start0 = __0.0; @@ -78567,8 +78597,8 @@ fn __action1757< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), - __1: (TextSize, alloc::vec::Vec, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), + __1: (TextSize, alloc::vec::Vec, TextSize), ) -> Result> { let __start0 = __0.0; @@ -78593,9 +78623,9 @@ fn __action1758< >( source_code: &str, mode: Mode, - __0: (TextSize, ast::ParenthesizedExpr, TextSize), + __0: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __1: (TextSize, ast::Operator, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> Result> { let __start0 = __0.0; @@ -78899,7 +78929,7 @@ fn __action1767< __3: (TextSize, ast::TypeParams, TextSize), __4: (TextSize, ast::Parameters, TextSize), __5: (TextSize, token::Tok, TextSize), - __6: (TextSize, ast::ParenthesizedExpr, TextSize), + __6: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -78938,7 +78968,7 @@ fn __action1768< __2: (TextSize, ast::Identifier, TextSize), __3: (TextSize, ast::Parameters, TextSize), __4: (TextSize, token::Tok, TextSize), - __5: (TextSize, ast::ParenthesizedExpr, TextSize), + __5: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -78980,7 +79010,7 @@ fn __action1769< __4: (TextSize, ast::TypeParams, TextSize), __5: (TextSize, ast::Parameters, TextSize), __6: (TextSize, token::Tok, TextSize), - __7: (TextSize, ast::ParenthesizedExpr, TextSize), + __7: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __8: (TextSize, token::Tok, TextSize), __9: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -79021,7 +79051,7 @@ fn __action1770< __3: (TextSize, ast::Identifier, TextSize), __4: (TextSize, ast::Parameters, TextSize), __5: (TextSize, token::Tok, TextSize), - __6: (TextSize, ast::ParenthesizedExpr, TextSize), + __6: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -79210,7 +79240,7 @@ fn __action1775< __2: (TextSize, ast::TypeParams, TextSize), __3: (TextSize, ast::Parameters, TextSize), __4: (TextSize, token::Tok, TextSize), - __5: (TextSize, ast::ParenthesizedExpr, TextSize), + __5: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -79247,7 +79277,7 @@ fn __action1776< __1: (TextSize, ast::Identifier, TextSize), __2: (TextSize, ast::Parameters, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __5: (TextSize, token::Tok, TextSize), __6: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -79287,7 +79317,7 @@ fn __action1777< __3: (TextSize, ast::TypeParams, TextSize), __4: (TextSize, ast::Parameters, TextSize), __5: (TextSize, token::Tok, TextSize), - __6: (TextSize, ast::ParenthesizedExpr, TextSize), + __6: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __7: (TextSize, token::Tok, TextSize), __8: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -79326,7 +79356,7 @@ fn __action1778< __2: (TextSize, ast::Identifier, TextSize), __3: (TextSize, ast::Parameters, TextSize), __4: (TextSize, token::Tok, TextSize), - __5: (TextSize, ast::ParenthesizedExpr, TextSize), + __5: (TextSize, crate::parser::ParenthesizedExpr, TextSize), __6: (TextSize, token::Tok, TextSize), __7: (TextSize, ast::Suite, TextSize), ) -> ast::Stmt @@ -79505,7 +79535,7 @@ fn __action1783< __1: (TextSize, ast::Expr, TextSize), __2: (TextSize, ast::TypeParams, TextSize), __3: (TextSize, token::Tok, TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __2.0; @@ -79536,7 +79566,7 @@ fn __action1784< __0: (TextSize, token::Tok, TextSize), __1: (TextSize, ast::Expr, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), ) -> ast::Stmt { let __start0 = __1.2; @@ -79569,8 +79599,8 @@ fn __action1785< __1: (TextSize, ast::Parameters, TextSize), __2: (TextSize, token::Tok, TextSize), __3: (TextSize, (String, bool), TextSize), - __4: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Result> + __4: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Result> { let __start0 = __3.0; let __end0 = __3.2; @@ -79600,8 +79630,8 @@ fn __action1786< __0: (TextSize, token::Tok, TextSize), __1: (TextSize, ast::Parameters, TextSize), __2: (TextSize, token::Tok, TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Result> + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Result> { let __start0 = __2.2; let __end0 = __3.0; @@ -79632,8 +79662,8 @@ fn __action1787< __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), __2: (TextSize, (String, bool), TextSize), - __3: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Result> + __3: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Result> { let __start0 = __2.0; let __end0 = __2.2; @@ -79661,8 +79691,8 @@ fn __action1788< mode: Mode, __0: (TextSize, token::Tok, TextSize), __1: (TextSize, token::Tok, TextSize), - __2: (TextSize, ast::ParenthesizedExpr, TextSize), -) -> Result> + __2: (TextSize, crate::parser::ParenthesizedExpr, TextSize), +) -> Result> { let __start0 = __1.2; let __end0 = __2.0; diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings.snap index b7e99b5722..58c33b7302 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..9, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..8, - value: StringLiteral( + expression: StringLiteral( ExprStringLiteral { range: 3..7, value: StringLiteralValue { @@ -57,11 +57,11 @@ expression: parse_ast FString( FString { range: 10..20, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 12..19, - value: Name( + expression: Name( ExprName { range: 13..16, id: "foo", @@ -93,11 +93,11 @@ expression: parse_ast FString( FString { range: 21..28, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 23..27, - value: Tuple( + expression: Tuple( ExprTuple { range: 24..26, elts: [ @@ -138,11 +138,11 @@ expression: parse_ast FString( FString { range: 29..39, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 31..38, - value: Compare( + expression: Compare( ExprCompare { range: 32..36, left: NumberLiteral( @@ -171,21 +171,10 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 37..37, - value: FStringValue { - inner: Single( - FString( - FString { - range: 37..37, - values: [], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 37..37, + elements: [], + }, ), }, ), @@ -209,11 +198,11 @@ expression: parse_ast FString( FString { range: 40..55, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 42..54, - value: NumberLiteral( + expression: NumberLiteral( ExprNumberLiteral { range: 43..44, value: Int( @@ -224,58 +213,39 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 45..53, - value: FStringValue { - inner: Single( - FString( - FString { - range: 45..53, - values: [ - FormattedValue( - ExprFormattedValue { - range: 45..50, - value: StringLiteral( - ExprStringLiteral { - range: 46..49, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 46..49, - value: "}", - unicode: false, - }, - ), - }, - }, - ), - debug_text: None, - conversion: None, - format_spec: None, + FStringFormatSpec { + range: 45..53, + elements: [ + Expression( + FStringExpressionElement { + range: 45..50, + expression: StringLiteral( + ExprStringLiteral { + range: 46..49, + value: StringLiteralValue { + inner: Single( + StringLiteral { + range: 46..49, + value: "}", + unicode: false, }, ), - StringLiteral( - ExprStringLiteral { - range: 50..53, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 50..53, - value: ">10", - unicode: false, - }, - ), - }, - }, - ), - ], + }, }, ), - ), - }, - }, - ), + debug_text: None, + conversion: None, + format_spec: None, + }, + ), + Literal( + FStringLiteralElement { + range: 50..53, + value: ">10", + }, + ), + ], + }, ), }, ), @@ -299,11 +269,11 @@ expression: parse_ast FString( FString { range: 56..71, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 58..70, - value: NumberLiteral( + expression: NumberLiteral( ExprNumberLiteral { range: 59..60, value: Int( @@ -314,58 +284,39 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 61..69, - value: FStringValue { - inner: Single( - FString( - FString { - range: 61..69, - values: [ - FormattedValue( - ExprFormattedValue { - range: 61..66, - value: StringLiteral( - ExprStringLiteral { - range: 62..65, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 62..65, - value: "{", - unicode: false, - }, - ), - }, - }, - ), - debug_text: None, - conversion: None, - format_spec: None, + FStringFormatSpec { + range: 61..69, + elements: [ + Expression( + FStringExpressionElement { + range: 61..66, + expression: StringLiteral( + ExprStringLiteral { + range: 62..65, + value: StringLiteralValue { + inner: Single( + StringLiteral { + range: 62..65, + value: "{", + unicode: false, }, ), - StringLiteral( - ExprStringLiteral { - range: 66..69, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 66..69, - value: ">10", - unicode: false, - }, - ), - }, - }, - ), - ], + }, }, ), - ), - }, - }, - ), + debug_text: None, + conversion: None, + format_spec: None, + }, + ), + Literal( + FStringLiteralElement { + range: 66..69, + value: ">10", + }, + ), + ], + }, ), }, ), @@ -389,11 +340,11 @@ expression: parse_ast FString( FString { range: 72..86, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 74..85, - value: Name( + expression: Name( ExprName { range: 77..80, id: "foo", @@ -430,11 +381,11 @@ expression: parse_ast FString( FString { range: 87..107, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 89..106, - value: Name( + expression: Name( ExprName { range: 92..95, id: "foo", @@ -449,36 +400,17 @@ expression: parse_ast ), conversion: None, format_spec: Some( - FString( - ExprFString { - range: 100..105, - value: FStringValue { - inner: Single( - FString( - FString { - range: 100..105, - values: [ - StringLiteral( - ExprStringLiteral { - range: 100..105, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 100..105, - value: ".3f ", - unicode: false, - }, - ), - }, - }, - ), - ], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 100..105, + elements: [ + Literal( + FStringLiteralElement { + range: 100..105, + value: ".3f ", + }, + ), + ], + }, ), }, ), @@ -502,11 +434,11 @@ expression: parse_ast FString( FString { range: 108..126, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 110..125, - value: Name( + expression: Name( ExprName { range: 113..116, id: "foo", @@ -543,11 +475,11 @@ expression: parse_ast FString( FString { range: 127..143, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 129..142, - value: Tuple( + expression: Tuple( ExprTuple { range: 132..136, elts: [ @@ -601,11 +533,11 @@ expression: parse_ast FString( FString { range: 144..170, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 146..169, - value: FString( + expression: FString( ExprFString { range: 147..163, value: FStringValue { @@ -613,11 +545,11 @@ expression: parse_ast FString( FString { range: 147..163, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 149..162, - value: NumberLiteral( + expression: NumberLiteral( ExprNumberLiteral { range: 150..156, value: Float( @@ -633,36 +565,17 @@ expression: parse_ast ), conversion: None, format_spec: Some( - FString( - ExprFString { - range: 158..161, - value: FStringValue { - inner: Single( - FString( - FString { - range: 158..161, - values: [ - StringLiteral( - ExprStringLiteral { - range: 158..161, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 158..161, - value: ".1f", - unicode: false, - }, - ), - }, - }, - ), - ], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 158..161, + elements: [ + Literal( + FStringLiteralElement { + range: 158..161, + value: ".1f", + }, + ), + ], + }, ), }, ), @@ -676,36 +589,17 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 164..168, - value: FStringValue { - inner: Single( - FString( - FString { - range: 164..168, - values: [ - StringLiteral( - ExprStringLiteral { - range: 164..168, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 164..168, - value: "*^20", - unicode: false, - }, - ), - }, - }, - ), - ], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 164..168, + elements: [ + Literal( + FStringLiteralElement { + range: 164..168, + value: "*^20", + }, + ), + ], + }, ), }, ), @@ -742,25 +636,17 @@ expression: parse_ast FString( FString { range: 180..195, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 182..186, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 182..186, - value: "bar ", - unicode: false, - }, - ), - }, + value: "bar ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 186..193, - value: BinOp( + expression: BinOp( ExprBinOp { range: 187..192, left: Name( @@ -785,18 +671,10 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 193..194, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 193..194, - value: " ", - unicode: false, - }, - ), - }, + value: " ", }, ), ], @@ -925,25 +803,17 @@ expression: parse_ast FString( FString { range: 300..317, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 302..303, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 302..303, - value: "\\", - unicode: false, - }, - ), - }, + value: "\\", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 303..308, - value: Name( + expression: Name( ExprName { range: 304..307, id: "foo", @@ -955,24 +825,16 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 308..309, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 308..309, - value: "\\", - unicode: false, - }, - ), - }, + value: "\\", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 309..316, - value: Name( + expression: Name( ExprName { range: 310..313, id: "bar", @@ -982,36 +844,17 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 314..315, - value: FStringValue { - inner: Single( - FString( - FString { - range: 314..315, - values: [ - StringLiteral( - ExprStringLiteral { - range: 314..315, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 314..315, - value: "\\", - unicode: false, - }, - ), - }, - }, - ), - ], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 314..315, + elements: [ + Literal( + FStringLiteralElement { + range: 314..315, + value: "\\", + }, + ), + ], + }, ), }, ), @@ -1035,19 +878,11 @@ expression: parse_ast FString( FString { range: 318..332, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 320..331, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 320..331, - value: "\\{foo\\}", - unicode: false, - }, - ), - }, + value: "\\{foo\\}", }, ), ], @@ -1070,11 +905,11 @@ expression: parse_ast FString( FString { range: 333..373, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 337..370, - value: Name( + expression: Name( ExprName { range: 343..346, id: "foo", @@ -1084,36 +919,17 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 347..369, - value: FStringValue { - inner: Single( - FString( - FString { - range: 347..369, - values: [ - StringLiteral( - ExprStringLiteral { - range: 347..369, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 347..369, - value: "x\n y\n z\n", - unicode: false, - }, - ), - }, - }, - ), - ], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 347..369, + elements: [ + Literal( + FStringLiteralElement { + range: 347..369, + value: "x\n y\n z\n", + }, + ), + ], + }, ), }, ), diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings_with_unicode.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings_with_unicode.snap index 7373b5c2b6..705c6e4d47 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings_with_unicode.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__fstrings_with_unicode.snap @@ -22,11 +22,11 @@ expression: parse_ast FString( FString { range: 7..15, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 9..14, - value: Name( + expression: Name( ExprName { range: 10..13, id: "bar", @@ -81,11 +81,11 @@ expression: parse_ast FString( FString { range: 36..44, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 38..43, - value: Name( + expression: Name( ExprName { range: 39..42, id: "bar", @@ -140,11 +140,11 @@ expression: parse_ast FString( FString { range: 66..74, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 68..73, - value: Name( + expression: Name( ExprName { range: 69..72, id: "bar", @@ -199,25 +199,17 @@ expression: parse_ast FString( FString { range: 97..116, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 99..103, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 99..103, - value: "bar ", - unicode: false, - }, - ), - }, + value: "bar ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 103..108, - value: Name( + expression: Name( ExprName { range: 104..107, id: "baz", @@ -229,18 +221,10 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 108..115, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 108..115, - value: " really", - unicode: false, - }, - ), - }, + value: " really", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__parse_f_string.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__parse_f_string.snap index d99fa9e549..84364a344e 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__parse_f_string.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__parse_f_string.snap @@ -14,19 +14,11 @@ expression: parse_ast FString( FString { range: 0..14, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 2..13, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 2..13, - value: "Hello world", - unicode: false, - }, - ), - }, + value: "Hello world", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try.snap index a5e05daf58..f2497947ca 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try.snap @@ -86,25 +86,17 @@ expression: parse_ast FString( FString { range: 62..81, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 64..71, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 64..71, - value: "caught ", - unicode: false, - }, - ), - }, + value: "caught ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 71..80, - value: Call( + expression: Call( ExprCall { range: 72..79, func: Name( @@ -194,25 +186,17 @@ expression: parse_ast FString( FString { range: 114..133, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 116..123, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 116..123, - value: "caught ", - unicode: false, - }, - ), - }, + value: "caught ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 123..132, - value: Call( + expression: Call( ExprCall { range: 124..131, func: Name( diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try_star.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try_star.snap index eb9cfeefb9..220516c86b 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try_star.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__parser__tests__try_star.snap @@ -204,25 +204,17 @@ expression: parse_ast FString( FString { range: 133..179, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 135..142, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 135..142, - value: "caught ", - unicode: false, - }, - ), - }, + value: "caught ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 142..151, - value: Call( + expression: Call( ExprCall { range: 143..150, func: Name( @@ -252,24 +244,16 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 151..164, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 151..164, - value: " with nested ", - unicode: false, - }, - ), - }, + value: " with nested ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 164..178, - value: Attribute( + expression: Attribute( ExprAttribute { range: 165..177, value: Name( @@ -351,25 +335,17 @@ expression: parse_ast FString( FString { range: 213..259, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 215..222, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 215..222, - value: "caught ", - unicode: false, - }, - ), - }, + value: "caught ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 222..231, - value: Call( + expression: Call( ExprCall { range: 223..230, func: Name( @@ -399,24 +375,16 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 231..244, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 231..244, - value: " with nested ", - unicode: false, - }, - ), - }, + value: " with nested ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 244..258, - value: Attribute( + expression: Attribute( ExprAttribute { range: 245..257, value: Name( diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_constant_range.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_constant_range.snap index 4575aad44e..ef91cbf428 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_constant_range.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_constant_range.snap @@ -14,25 +14,17 @@ expression: parse_ast FString( FString { range: 0..22, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 2..5, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 2..5, - value: "aaa", - unicode: false, - }, - ), - }, + value: "aaa", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 5..10, - value: Name( + expression: Name( ExprName { range: 6..9, id: "bbb", @@ -44,24 +36,16 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 10..13, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 10..13, - value: "ccc", - unicode: false, - }, - ), - }, + value: "ccc", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 13..18, - value: Name( + expression: Name( ExprName { range: 14..17, id: "ddd", @@ -73,18 +57,10 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 18..21, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 18..21, - value: "eee", - unicode: false, - }, - ), - }, + value: "eee", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_character.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_character.snap index 2ded682ed4..8353b0e11c 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_character.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_character.snap @@ -14,25 +14,17 @@ expression: parse_ast FString( FString { range: 0..8, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 2..4, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 2..4, - value: "\\", - unicode: false, - }, - ), - }, + value: "\\", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 4..7, - value: Name( + expression: Name( ExprName { range: 5..6, id: "x", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_newline.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_newline.snap index d7090ab4c0..5394a34336 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_newline.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_escaped_newline.snap @@ -14,25 +14,17 @@ expression: parse_ast FString( FString { range: 0..8, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 2..4, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 2..4, - value: "\n", - unicode: false, - }, - ), - }, + value: "\n", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 4..7, - value: Name( + expression: Name( ExprName { range: 5..6, id: "x", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_line_continuation.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_line_continuation.snap index 92ac071ae5..10c759fefa 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_line_continuation.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_line_continuation.snap @@ -14,25 +14,17 @@ expression: parse_ast FString( FString { range: 0..9, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 3..5, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 3..5, - value: "\\\n", - unicode: false, - }, - ), - }, + value: "\\\n", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 5..8, - value: Name( + expression: Name( ExprName { range: 6..7, id: "x", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base.snap index 476a5a3f53..f01086439a 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..10, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..9, - value: Name( + expression: Name( ExprName { range: 3..7, id: "user", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base_more.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base_more.snap index 765076eddb..fe5c4f3497 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base_more.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_base_more.snap @@ -14,25 +14,17 @@ expression: parse_ast FString( FString { range: 0..38, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 2..6, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 2..6, - value: "mix ", - unicode: false, - }, - ), - }, + value: "mix ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 6..13, - value: Name( + expression: Name( ExprName { range: 7..11, id: "user", @@ -49,24 +41,16 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 13..28, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 13..28, - value: " with text and ", - unicode: false, - }, - ), - }, + value: " with text and ", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 28..37, - value: Name( + expression: Name( ExprName { range: 29..35, id: "second", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_format.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_format.snap index d34b5387ad..26d2fb22be 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_format.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_parse_self_documenting_format.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..14, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..13, - value: Name( + expression: Name( ExprName { range: 3..7, id: "user", @@ -33,36 +33,17 @@ expression: parse_ast ), conversion: None, format_spec: Some( - FString( - ExprFString { - range: 9..12, - value: FStringValue { - inner: Single( - FString( - FString { - range: 9..12, - values: [ - StringLiteral( - ExprStringLiteral { - range: 9..12, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 9..12, - value: ">10", - unicode: false, - }, - ), - }, - }, - ), - ], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 9..12, + elements: [ + Literal( + FStringLiteralElement { + range: 9..12, + value: ">10", + }, + ), + ], + }, ), }, ), diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_unescaped_newline.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_unescaped_newline.snap index a607945402..55797fae09 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_unescaped_newline.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__fstring_unescaped_newline.snap @@ -14,25 +14,17 @@ expression: parse_ast FString( FString { range: 0..11, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 4..5, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 4..5, - value: "\n", - unicode: false, - }, - ), - }, + value: "\n", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 5..8, - value: Name( + expression: Name( ExprName { range: 6..7, id: "x", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_empty_fstring.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_empty_fstring.snap index a4375d213f..fccf2a0b9e 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_empty_fstring.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_empty_fstring.snap @@ -14,7 +14,7 @@ expression: "parse_suite(r#\"f\"\"\"#, \"\").unwrap()" FString( FString { range: 0..3, - values: [], + elements: [], }, ), ), diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_1.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_1.snap index 5e831e22e6..3fa34cf133 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_1.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_1.snap @@ -22,19 +22,11 @@ expression: parse_ast FString( FString { range: 9..17, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 11..16, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 11..16, - value: "world", - unicode: false, - }, - ), - }, + value: "world", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_2.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_2.snap index 5e831e22e6..3fa34cf133 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_2.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_2.snap @@ -22,19 +22,11 @@ expression: parse_ast FString( FString { range: 9..17, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 11..16, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 11..16, - value: "world", - unicode: false, - }, - ), - }, + value: "world", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_3.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_3.snap index c59b4bc186..62c35e88a4 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_3.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_3.snap @@ -22,25 +22,17 @@ expression: parse_ast FString( FString { range: 9..22, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 11..16, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 11..16, - value: "world", - unicode: false, - }, - ), - }, + value: "world", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 16..21, - value: StringLiteral( + expression: StringLiteral( ExprStringLiteral { range: 17..20, value: StringLiteralValue { diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_4.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_4.snap index 4d777817d7..05f12455f3 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_4.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_f_string_concat_4.snap @@ -22,25 +22,17 @@ expression: parse_ast FString( FString { range: 9..22, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 11..16, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 11..16, - value: "world", - unicode: false, - }, - ), - }, + value: "world", }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 16..21, - value: StringLiteral( + expression: StringLiteral( ExprStringLiteral { range: 17..20, value: StringLiteralValue { diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring.snap index c389463798..5b24ddca17 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..18, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..5, - value: Name( + expression: Name( ExprName { range: 3..4, id: "a", @@ -30,10 +30,10 @@ expression: parse_ast format_spec: None, }, ), - FormattedValue( - ExprFormattedValue { + Expression( + FStringExpressionElement { range: 5..10, - value: Name( + expression: Name( ExprName { range: 7..8, id: "b", @@ -45,18 +45,10 @@ expression: parse_ast format_spec: None, }, ), - StringLiteral( - ExprStringLiteral { + Literal( + FStringLiteralElement { range: 10..17, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 10..17, - value: "{foo}", - unicode: false, - }, - ), - }, + value: "{foo}", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_equals.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_equals.snap index fc8338ee70..0972b502da 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_equals.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_equals.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..13, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..12, - value: Compare( + expression: Compare( ExprCompare { range: 3..11, left: NumberLiteral( diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_concatenation_string_spec.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_concatenation_string_spec.snap index fdb3a6fc90..fe1418fc69 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_concatenation_string_spec.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_concatenation_string_spec.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..16, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..15, - value: Name( + expression: Name( ExprName { range: 3..6, id: "foo", @@ -28,54 +28,43 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 7..14, - value: FStringValue { - inner: Single( - FString( - FString { - range: 7..14, - values: [ - FormattedValue( - ExprFormattedValue { - range: 7..14, - value: StringLiteral( - ExprStringLiteral { - range: 8..13, - value: StringLiteralValue { - inner: Concatenated( - ConcatenatedStringLiteral { - strings: [ - StringLiteral { - range: 8..10, - value: "", - unicode: false, - }, - StringLiteral { - range: 11..13, - value: "", - unicode: false, - }, - ], - value: "", - }, - ), - }, + FStringFormatSpec { + range: 7..14, + elements: [ + Expression( + FStringExpressionElement { + range: 7..14, + expression: StringLiteral( + ExprStringLiteral { + range: 8..13, + value: StringLiteralValue { + inner: Concatenated( + ConcatenatedStringLiteral { + strings: [ + StringLiteral { + range: 8..10, + value: "", + unicode: false, }, - ), - debug_text: None, - conversion: None, - format_spec: None, + StringLiteral { + range: 11..13, + value: "", + unicode: false, + }, + ], + value: "", }, ), - ], + }, }, ), - ), - }, - }, - ), + debug_text: None, + conversion: None, + format_spec: None, + }, + ), + ], + }, ), }, ), diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_spec.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_spec.snap index 3ffcbc7c9e..d4e72897e0 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_spec.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_spec.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..15, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..14, - value: Name( + expression: Name( ExprName { range: 3..6, id: "foo", @@ -28,37 +28,26 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 7..13, - value: FStringValue { - inner: Single( - FString( - FString { - range: 7..13, - values: [ - FormattedValue( - ExprFormattedValue { - range: 7..13, - value: Name( - ExprName { - range: 8..12, - id: "spec", - ctx: Load, - }, - ), - debug_text: None, - conversion: None, - format_spec: None, - }, - ), - ], + FStringFormatSpec { + range: 7..13, + elements: [ + Expression( + FStringExpressionElement { + range: 7..13, + expression: Name( + ExprName { + range: 8..12, + id: "spec", + ctx: Load, }, ), - ), - }, - }, - ), + debug_text: None, + conversion: None, + format_spec: None, + }, + ), + ], + }, ), }, ), diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_string_spec.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_string_spec.snap index 06e925c016..c00e35f945 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_string_spec.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_nested_string_spec.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..13, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..12, - value: Name( + expression: Name( ExprName { range: 3..6, id: "foo", @@ -28,44 +28,33 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 7..11, - value: FStringValue { - inner: Single( - FString( - FString { - range: 7..11, - values: [ - FormattedValue( - ExprFormattedValue { - range: 7..11, - value: StringLiteral( - ExprStringLiteral { - range: 8..10, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 8..10, - value: "", - unicode: false, - }, - ), - }, - }, - ), - debug_text: None, - conversion: None, - format_spec: None, + FStringFormatSpec { + range: 7..11, + elements: [ + Expression( + FStringExpressionElement { + range: 7..11, + expression: StringLiteral( + ExprStringLiteral { + range: 8..10, + value: StringLiteralValue { + inner: Single( + StringLiteral { + range: 8..10, + value: "", + unicode: false, }, ), - ], + }, }, ), - ), - }, - }, - ), + debug_text: None, + conversion: None, + format_spec: None, + }, + ), + ], + }, ), }, ), diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_equals.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_equals.snap index c4aee85024..b1284e5cca 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_equals.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_equals.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..11, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..10, - value: Compare( + expression: Compare( ExprCompare { range: 3..9, left: NumberLiteral( diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_nested_spec.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_nested_spec.snap index 6b8b11fe88..ba4061fdde 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_nested_spec.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_not_nested_spec.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..13, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..12, - value: Name( + expression: Name( ExprName { range: 3..6, id: "foo", @@ -28,36 +28,17 @@ expression: parse_ast debug_text: None, conversion: None, format_spec: Some( - FString( - ExprFString { - range: 7..11, - value: FStringValue { - inner: Single( - FString( - FString { - range: 7..11, - values: [ - StringLiteral( - ExprStringLiteral { - range: 7..11, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 7..11, - value: "spec", - unicode: false, - }, - ), - }, - }, - ), - ], - }, - ), - ), - }, - }, - ), + FStringFormatSpec { + range: 7..11, + elements: [ + Literal( + FStringLiteralElement { + range: 7..11, + value: "spec", + }, + ), + ], + }, ), }, ), diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_prec_space.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_prec_space.snap index fd024631ef..7c3ec7583d 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_prec_space.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_prec_space.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..10, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..9, - value: Name( + expression: Name( ExprName { range: 3..4, id: "x", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_trailing_space.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_trailing_space.snap index c1f08c3397..47b37caa7f 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_trailing_space.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_self_doc_trailing_space.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..10, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..9, - value: Name( + expression: Name( ExprName { range: 3..4, id: "x", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_yield_expr.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_yield_expr.snap index 20ccfbf7e1..39219b2044 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_yield_expr.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_fstring_yield_expr.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..10, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 2..9, - value: Yield( + expression: Yield( ExprYield { range: 3..8, value: None, diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_1.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_1.snap index c8495e4b31..f28bda3133 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_1.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_1.snap @@ -22,19 +22,11 @@ expression: parse_ast FString( FString { range: 10..18, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 12..17, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 12..17, - value: "world", - unicode: false, - }, - ), - }, + value: "world", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_2.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_2.snap index 9278f809af..dc6afd4e42 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_2.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__parse_u_f_string_concat_2.snap @@ -22,19 +22,11 @@ expression: parse_ast FString( FString { range: 10..18, - values: [ - StringLiteral( - ExprStringLiteral { + elements: [ + Literal( + FStringLiteralElement { range: 12..17, - value: StringLiteralValue { - inner: Single( - StringLiteral { - range: 12..17, - value: "world", - unicode: false, - }, - ), - }, + value: "world", }, ), ], diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__raw_fstring.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__raw_fstring.snap index 100bf1ed55..05fe49bbf7 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__raw_fstring.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__raw_fstring.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..7, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 3..6, - value: Name( + expression: Name( ExprName { range: 4..5, id: "x", diff --git a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__triple_quoted_raw_fstring.snap b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__triple_quoted_raw_fstring.snap index 872fe090c8..c772d6a052 100644 --- a/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__triple_quoted_raw_fstring.snap +++ b/crates/ruff_python_parser/src/snapshots/ruff_python_parser__string__tests__triple_quoted_raw_fstring.snap @@ -14,11 +14,11 @@ expression: parse_ast FString( FString { range: 0..11, - values: [ - FormattedValue( - ExprFormattedValue { + elements: [ + Expression( + FStringExpressionElement { range: 5..8, - value: Name( + expression: Name( ExprName { range: 6..7, id: "x", diff --git a/crates/ruff_python_parser/src/string.rs b/crates/ruff_python_parser/src/string.rs index 2d4f2c5df9..ab9106ff31 100644 --- a/crates/ruff_python_parser/src/string.rs +++ b/crates/ruff_python_parser/src/string.rs @@ -202,7 +202,7 @@ impl<'a> StringParser<'a> { Ok(()) } - fn parse_fstring_middle(&mut self) -> Result { + fn parse_fstring_middle(&mut self) -> Result { let mut value = String::new(); while let Some(ch) = self.next_char() { match ch { @@ -239,9 +239,8 @@ impl<'a> StringParser<'a> { ch => value.push(ch), } } - Ok(Expr::from(ast::StringLiteral { + Ok(ast::FStringElement::Literal(ast::FStringLiteralElement { value, - unicode: false, range: self.range, })) } @@ -324,11 +323,11 @@ pub(crate) fn parse_string_literal( StringParser::new(source, kind, start_location, range).parse() } -pub(crate) fn parse_fstring_middle( +pub(crate) fn parse_fstring_literal_element( source: &str, is_raw: bool, range: TextRange, -) -> Result { +) -> Result { let kind = if is_raw { StringKind::RawString } else { diff --git a/crates/ruff_python_semantic/src/analyze/class.rs b/crates/ruff_python_semantic/src/analyze/class.rs new file mode 100644 index 0000000000..3efc01ff33 --- /dev/null +++ b/crates/ruff_python_semantic/src/analyze/class.rs @@ -0,0 +1,57 @@ +use rustc_hash::FxHashSet; + +use ruff_python_ast as ast; +use ruff_python_ast::call_path::CallPath; +use ruff_python_ast::helpers::map_subscript; + +use crate::{BindingId, SemanticModel}; + +/// Return `true` if any base class of a class definition matches a predicate. +pub fn any_over_body( + class_def: &ast::StmtClassDef, + semantic: &SemanticModel, + func: &dyn Fn(CallPath) -> bool, +) -> bool { + fn inner( + class_def: &ast::StmtClassDef, + semantic: &SemanticModel, + func: &dyn Fn(CallPath) -> bool, + seen: &mut FxHashSet, + ) -> bool { + class_def.bases().iter().any(|expr| { + // If the base class itself matches the pattern, then this does too. + // Ex) `class Foo(BaseModel): ...` + if semantic + .resolve_call_path(map_subscript(expr)) + .is_some_and(func) + { + return true; + } + + // If the base class extends a class that matches the pattern, then this does too. + // Ex) `class Bar(BaseModel): ...; class Foo(Bar): ...` + if let Some(id) = semantic.lookup_attribute(map_subscript(expr)) { + if seen.insert(id) { + let binding = semantic.binding(id); + if let Some(base_class) = binding + .kind + .as_class_definition() + .map(|id| &semantic.scopes[*id]) + .and_then(|scope| scope.kind.as_class()) + { + if inner(base_class, semantic, func, seen) { + return true; + } + } + } + } + false + }) + } + + if class_def.bases().is_empty() { + return false; + } + + inner(class_def, semantic, func, &mut FxHashSet::default()) +} diff --git a/crates/ruff_python_semantic/src/analyze/imports.rs b/crates/ruff_python_semantic/src/analyze/imports.rs new file mode 100644 index 0000000000..a4a9ddc134 --- /dev/null +++ b/crates/ruff_python_semantic/src/analyze/imports.rs @@ -0,0 +1,55 @@ +use ruff_python_ast::{self as ast, Expr, Stmt}; + +use crate::SemanticModel; + +/// Returns `true` if a [`Stmt`] is a `sys.path` modification, as in: +/// ```python +/// import sys +/// +/// sys.path.append("../") +/// ``` +pub fn is_sys_path_modification(stmt: &Stmt, semantic: &SemanticModel) -> bool { + let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt else { + return false; + }; + let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() else { + return false; + }; + semantic + .resolve_call_path(func.as_ref()) + .is_some_and(|call_path| { + matches!( + call_path.as_slice(), + [ + "sys", + "path", + "append" + | "insert" + | "extend" + | "remove" + | "pop" + | "clear" + | "reverse" + | "sort" + ] + ) + }) +} + +/// Returns `true` if a [`Stmt`] is a `matplotlib.use` activation, as in: +/// ```python +/// import matplotlib +/// +/// matplotlib.use("Agg") +/// ``` +pub fn is_matplotlib_activation(stmt: &Stmt, semantic: &SemanticModel) -> bool { + let Stmt::Expr(ast::StmtExpr { value, range: _ }) = stmt else { + return false; + }; + let Expr::Call(ast::ExprCall { func, .. }) = value.as_ref() else { + return false; + }; + semantic + .resolve_call_path(func.as_ref()) + .is_some_and(|call_path| matches!(call_path.as_slice(), ["matplotlib", "use"])) +} diff --git a/crates/ruff_python_semantic/src/analyze/mod.rs b/crates/ruff_python_semantic/src/analyze/mod.rs index 941309a526..0376f63c39 100644 --- a/crates/ruff_python_semantic/src/analyze/mod.rs +++ b/crates/ruff_python_semantic/src/analyze/mod.rs @@ -1,4 +1,6 @@ +pub mod class; pub mod function_type; +pub mod imports; pub mod logging; pub mod type_inference; pub mod typing; diff --git a/crates/ruff_python_semantic/src/analyze/type_inference.rs b/crates/ruff_python_semantic/src/analyze/type_inference.rs index f5261cd683..427bdddca3 100644 --- a/crates/ruff_python_semantic/src/analyze/type_inference.rs +++ b/crates/ruff_python_semantic/src/analyze/type_inference.rs @@ -323,7 +323,6 @@ impl From<&Expr> for ResolvedPythonType { | Expr::YieldFrom(_) | Expr::Compare(_) | Expr::Call(_) - | Expr::FormattedValue(_) | Expr::Attribute(_) | Expr::Subscript(_) | Expr::Starred(_) diff --git a/crates/ruff_python_semantic/src/analyze/typing.rs b/crates/ruff_python_semantic/src/analyze/typing.rs index bd18f70ba8..2dd7f1003e 100644 --- a/crates/ruff_python_semantic/src/analyze/typing.rs +++ b/crates/ruff_python_semantic/src/analyze/typing.rs @@ -568,3 +568,126 @@ pub fn resolve_assignment<'a>( _ => None, } } + +/// Find the assigned [`Expr`] for a given symbol, if any. +/// +/// For example given: +/// ```python +/// foo = 42 +/// (bar, bla) = 1, "str" +/// ``` +/// +/// This function will return a `NumberLiteral` with value `Int(42)` when called with `foo` and a +/// `StringLiteral` with value `"str"` when called with `bla`. +pub fn find_assigned_value<'a>(symbol: &str, semantic: &'a SemanticModel<'a>) -> Option<&'a Expr> { + let binding_id = semantic.lookup_symbol(symbol)?; + let binding = semantic.binding(binding_id); + match binding.kind { + // Ex) `x := 1` + BindingKind::NamedExprAssignment => { + let parent_id = binding.source?; + let parent = semantic + .expressions(parent_id) + .find_map(|expr| expr.as_named_expr_expr()); + if let Some(ast::ExprNamedExpr { target, value, .. }) = parent { + return match_value(symbol, target.as_ref(), value.as_ref()); + } + } + // Ex) `x = 1` + BindingKind::Assignment => { + let parent_id = binding.source?; + let parent = semantic.statement(parent_id); + match parent { + Stmt::Assign(ast::StmtAssign { value, targets, .. }) => { + if let Some(target) = targets.iter().find(|target| defines(symbol, target)) { + return match_value(symbol, target, value.as_ref()); + } + } + Stmt::AnnAssign(ast::StmtAnnAssign { + value: Some(value), + target, + .. + }) => { + return match_value(symbol, target, value.as_ref()); + } + _ => {} + } + } + _ => {} + } + None +} + +/// Given a target and value, find the value that's assigned to the given symbol. +fn match_value<'a>(symbol: &str, target: &Expr, value: &'a Expr) -> Option<&'a Expr> { + match target { + Expr::Name(ast::ExprName { id, .. }) if id.as_str() == symbol => Some(value), + Expr::Tuple(ast::ExprTuple { elts, .. }) | Expr::List(ast::ExprList { elts, .. }) => { + match value { + Expr::Tuple(ast::ExprTuple { + elts: value_elts, .. + }) + | Expr::List(ast::ExprList { + elts: value_elts, .. + }) + | Expr::Set(ast::ExprSet { + elts: value_elts, .. + }) => get_value_by_id(symbol, elts, value_elts), + _ => None, + } + } + _ => None, + } +} + +/// Returns `true` if the [`Expr`] defines the symbol. +fn defines(symbol: &str, expr: &Expr) -> bool { + match expr { + Expr::Name(ast::ExprName { id, .. }) => id == symbol, + Expr::Tuple(ast::ExprTuple { elts, .. }) + | Expr::List(ast::ExprList { elts, .. }) + | Expr::Set(ast::ExprSet { elts, .. }) => elts.iter().any(|elt| defines(symbol, elt)), + _ => false, + } +} + +fn get_value_by_id<'a>(target_id: &str, targets: &[Expr], values: &'a [Expr]) -> Option<&'a Expr> { + for (target, value) in targets.iter().zip(values.iter()) { + match target { + Expr::Tuple(ast::ExprTuple { + elts: target_elts, .. + }) + | Expr::List(ast::ExprList { + elts: target_elts, .. + }) + | Expr::Set(ast::ExprSet { + elts: target_elts, .. + }) => { + // Collection types can be mismatched like in: (a, b, [c, d]) = [1, 2, {3, 4}] + match value { + Expr::Tuple(ast::ExprTuple { + elts: value_elts, .. + }) + | Expr::List(ast::ExprList { + elts: value_elts, .. + }) + | Expr::Set(ast::ExprSet { + elts: value_elts, .. + }) => { + if let Some(result) = get_value_by_id(target_id, target_elts, value_elts) { + return Some(result); + } + } + _ => (), + }; + } + Expr::Name(ast::ExprName { id, .. }) => { + if *id == target_id { + return Some(value); + } + } + _ => (), + } + } + None +} diff --git a/crates/ruff_python_semantic/src/model.rs b/crates/ruff_python_semantic/src/model.rs index 42617ddaf7..fd20a4714c 100644 --- a/crates/ruff_python_semantic/src/model.rs +++ b/crates/ruff_python_semantic/src/model.rs @@ -291,9 +291,12 @@ impl<'a> SemanticModel<'a> { if let Some(binding_id) = self.scopes.global().get(name.id.as_str()) { if !self.bindings[binding_id].is_unbound() { // Mark the binding as used. - let reference_id = - self.resolved_references - .push(ScopeId::global(), name.range, self.flags); + let reference_id = self.resolved_references.push( + ScopeId::global(), + self.node_id, + name.range, + self.flags, + ); self.bindings[binding_id].references.push(reference_id); // Mark any submodule aliases as used. @@ -302,6 +305,7 @@ impl<'a> SemanticModel<'a> { { let reference_id = self.resolved_references.push( ScopeId::global(), + self.node_id, name.range, self.flags, ); @@ -356,18 +360,24 @@ impl<'a> SemanticModel<'a> { if let Some(binding_id) = scope.get(name.id.as_str()) { // Mark the binding as used. - let reference_id = - self.resolved_references - .push(self.scope_id, name.range, self.flags); + let reference_id = self.resolved_references.push( + self.scope_id, + self.node_id, + name.range, + self.flags, + ); self.bindings[binding_id].references.push(reference_id); // Mark any submodule aliases as used. if let Some(binding_id) = self.resolve_submodule(name.id.as_str(), scope_id, binding_id) { - let reference_id = - self.resolved_references - .push(self.scope_id, name.range, self.flags); + let reference_id = self.resolved_references.push( + self.scope_id, + self.node_id, + name.range, + self.flags, + ); self.bindings[binding_id].references.push(reference_id); } @@ -431,9 +441,12 @@ impl<'a> SemanticModel<'a> { // The `x` in `print(x)` should resolve to the `x` in `x = 1`. BindingKind::UnboundException(Some(binding_id)) => { // Mark the binding as used. - let reference_id = - self.resolved_references - .push(self.scope_id, name.range, self.flags); + let reference_id = self.resolved_references.push( + self.scope_id, + self.node_id, + name.range, + self.flags, + ); self.bindings[binding_id].references.push(reference_id); // Mark any submodule aliases as used. @@ -442,6 +455,7 @@ impl<'a> SemanticModel<'a> { { let reference_id = self.resolved_references.push( self.scope_id, + self.node_id, name.range, self.flags, ); @@ -747,6 +761,7 @@ impl<'a> SemanticModel<'a> { { return Some(ImportedName { name: format!("{name}.{member}"), + source, range: self.nodes[source].range(), context: binding.context, }); @@ -771,6 +786,7 @@ impl<'a> SemanticModel<'a> { { return Some(ImportedName { name: (*name).to_string(), + source, range: self.nodes[source].range(), context: binding.context, }); @@ -792,6 +808,7 @@ impl<'a> SemanticModel<'a> { { return Some(ImportedName { name: format!("{name}.{member}"), + source, range: self.nodes[source].range(), context: binding.context, }); @@ -979,6 +996,23 @@ impl<'a> SemanticModel<'a> { &self.nodes[node_id] } + /// Given a [`NodeId`], return its parent, if any. + #[inline] + pub fn parent_expression(&self, node_id: NodeId) -> Option<&'a Expr> { + self.nodes + .ancestor_ids(node_id) + .filter_map(|id| self.nodes[id].as_expression()) + .nth(1) + } + + /// Given a [`NodeId`], return the [`NodeId`] of the parent expression, if any. + pub fn parent_expression_id(&self, node_id: NodeId) -> Option { + self.nodes + .ancestor_ids(node_id) + .filter(|id| self.nodes[*id].is_expression()) + .nth(1) + } + /// Return the [`Stmt`] corresponding to the given [`NodeId`]. #[inline] pub fn statement(&self, node_id: NodeId) -> &'a Stmt { @@ -1005,6 +1039,22 @@ impl<'a> SemanticModel<'a> { .nth(1) } + /// Return the [`Expr`] corresponding to the given [`NodeId`]. + #[inline] + pub fn expression(&self, node_id: NodeId) -> Option<&'a Expr> { + self.nodes + .ancestor_ids(node_id) + .find_map(|id| self.nodes[id].as_expression()) + } + + /// Returns an [`Iterator`] over the expressions, starting from the given [`NodeId`]. + /// through to any parents. + pub fn expressions(&self, node_id: NodeId) -> impl Iterator + '_ { + self.nodes + .ancestor_ids(node_id) + .filter_map(move |id| self.nodes[id].as_expression()) + } + /// Set the [`Globals`] for the current [`Scope`]. pub fn set_globals(&mut self, globals: Globals<'a>) { // If any global bindings don't already exist in the global scope, add them. @@ -1169,17 +1219,17 @@ impl<'a> SemanticModel<'a> { /// Add a reference to the given [`BindingId`] in the local scope. pub fn add_local_reference(&mut self, binding_id: BindingId, range: TextRange) { - let reference_id = self - .resolved_references - .push(self.scope_id, range, self.flags); + let reference_id = + self.resolved_references + .push(self.scope_id, self.node_id, range, self.flags); self.bindings[binding_id].references.push(reference_id); } /// Add a reference to the given [`BindingId`] in the global scope. pub fn add_global_reference(&mut self, binding_id: BindingId, range: TextRange) { - let reference_id = self - .resolved_references - .push(ScopeId::global(), range, self.flags); + let reference_id = + self.resolved_references + .push(ScopeId::global(), self.node_id, range, self.flags); self.bindings[binding_id].references.push(reference_id); } @@ -1282,10 +1332,16 @@ impl<'a> SemanticModel<'a> { .intersects(SemanticModelFlags::TYPING_ONLY_ANNOTATION) } - /// Return `true` if the model is in a runtime-required type annotation. - pub const fn in_runtime_annotation(&self) -> bool { + /// Return `true` if the context is in a runtime-evaluated type annotation. + pub const fn in_runtime_evaluated_annotation(&self) -> bool { self.flags - .intersects(SemanticModelFlags::RUNTIME_ANNOTATION) + .intersects(SemanticModelFlags::RUNTIME_EVALUATED_ANNOTATION) + } + + /// Return `true` if the context is in a runtime-required type annotation. + pub const fn in_runtime_required_annotation(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::RUNTIME_REQUIRED_ANNOTATION) } /// Return `true` if the model is in a type definition. @@ -1359,8 +1415,8 @@ impl<'a> SemanticModel<'a> { } /// Return `true` if the model is in a `typing::Literal` annotation. - pub const fn in_literal(&self) -> bool { - self.flags.intersects(SemanticModelFlags::LITERAL) + pub const fn in_typing_literal(&self) -> bool { + self.flags.intersects(SemanticModelFlags::TYPING_LITERAL) } /// Return `true` if the model is in a subscript expression. @@ -1457,8 +1513,9 @@ impl ShadowedBinding { bitflags! { /// Flags indicating the current model state. #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] - pub struct SemanticModelFlags: u16 { - /// The model is in a typing-time-only type annotation. + pub struct SemanticModelFlags: u32 { + /// The model is in a type annotation that will only be evaluated when running a type + /// checker. /// /// For example, the model could be visiting `int` in: /// ```python @@ -1473,7 +1530,7 @@ bitflags! { /// are any annotated assignments in module or class scopes. const TYPING_ONLY_ANNOTATION = 1 << 0; - /// The model is in a runtime type annotation. + /// The model is in a type annotation that will be evaluated at runtime. /// /// For example, the model could be visiting `int` in: /// ```python @@ -1487,7 +1544,27 @@ bitflags! { /// If `from __future__ import annotations` is used, all annotations are evaluated at /// typing time. Otherwise, all function argument annotations are evaluated at runtime, as /// are any annotated assignments in module or class scopes. - const RUNTIME_ANNOTATION = 1 << 1; + const RUNTIME_EVALUATED_ANNOTATION = 1 << 1; + + /// The model is in a type annotation that is _required_ to be available at runtime. + /// + /// For example, the context could be visiting `int` in: + /// ```python + /// from pydantic import BaseModel + /// + /// class Foo(BaseModel): + /// x: int + /// ``` + /// + /// In this case, Pydantic requires that the type annotation be available at runtime + /// in order to perform runtime type-checking. + /// + /// Unlike [`RUNTIME_EVALUATED_ANNOTATION`], annotations that are marked as + /// [`RUNTIME_REQUIRED_ANNOTATION`] cannot be deferred to typing time via conversion to a + /// forward reference (e.g., by wrapping the type in quotes), as the annotations are not + /// only required by the Python interpreter, but by runtime type checkers too. + const RUNTIME_REQUIRED_ANNOTATION = 1 << 2; + /// The model is in a type definition. /// @@ -1501,7 +1578,7 @@ bitflags! { /// All type annotations are also type definitions, but the converse is not true. /// In our example, `int` is a type definition but not a type annotation, as it /// doesn't appear in a type annotation context, but rather in a type definition. - const TYPE_DEFINITION = 1 << 2; + const TYPE_DEFINITION = 1 << 3; /// The model is in a (deferred) "simple" string type definition. /// @@ -1512,7 +1589,7 @@ bitflags! { /// /// "Simple" string type definitions are those that consist of a single string literal, /// as opposed to an implicitly concatenated string literal. - const SIMPLE_STRING_TYPE_DEFINITION = 1 << 3; + const SIMPLE_STRING_TYPE_DEFINITION = 1 << 4; /// The model is in a (deferred) "complex" string type definition. /// @@ -1523,7 +1600,7 @@ bitflags! { /// /// "Complex" string type definitions are those that consist of a implicitly concatenated /// string literals. These are uncommon but valid. - const COMPLEX_STRING_TYPE_DEFINITION = 1 << 4; + const COMPLEX_STRING_TYPE_DEFINITION = 1 << 5; /// The model is in a (deferred) `__future__` type definition. /// @@ -1536,7 +1613,7 @@ bitflags! { /// /// `__future__`-style type annotations are only enabled if the `annotations` feature /// is enabled via `from __future__ import annotations`. - const FUTURE_TYPE_DEFINITION = 1 << 5; + const FUTURE_TYPE_DEFINITION = 1 << 6; /// The model is in an exception handler. /// @@ -1547,7 +1624,7 @@ bitflags! { /// except Exception: /// x: int = 1 /// ``` - const EXCEPTION_HANDLER = 1 << 6; + const EXCEPTION_HANDLER = 1 << 7; /// The model is in an f-string. /// @@ -1555,7 +1632,7 @@ bitflags! { /// ```python /// f'{x}' /// ``` - const F_STRING = 1 << 7; + const F_STRING = 1 << 8; /// The model is in a boolean test. /// @@ -1567,7 +1644,7 @@ bitflags! { /// /// The implication is that the actual value returned by the current expression is /// not used, only its truthiness. - const BOOLEAN_TEST = 1 << 8; + const BOOLEAN_TEST = 1 << 9; /// The model is in a `typing::Literal` annotation. /// @@ -1576,7 +1653,7 @@ bitflags! { /// def f(x: Literal["A", "B", "C"]): /// ... /// ``` - const LITERAL = 1 << 9; + const TYPING_LITERAL = 1 << 10; /// The model is in a subscript expression. /// @@ -1584,7 +1661,7 @@ bitflags! { /// ```python /// x["a"]["b"] /// ``` - const SUBSCRIPT = 1 << 10; + const SUBSCRIPT = 1 << 11; /// The model is in a type-checking block. /// @@ -1596,7 +1673,7 @@ bitflags! { /// if TYPE_CHECKING: /// x: int = 1 /// ``` - const TYPE_CHECKING_BLOCK = 1 << 11; + const TYPE_CHECKING_BLOCK = 1 << 12; /// The model has traversed past the "top-of-file" import boundary. /// @@ -1609,7 +1686,7 @@ bitflags! { /// /// x: int = 1 /// ``` - const IMPORT_BOUNDARY = 1 << 12; + const IMPORT_BOUNDARY = 1 << 13; /// The model has traversed past the `__future__` import boundary. /// @@ -1624,7 +1701,7 @@ bitflags! { /// /// Python considers it a syntax error to import from `__future__` after /// any other non-`__future__`-importing statements. - const FUTURES_BOUNDARY = 1 << 13; + const FUTURES_BOUNDARY = 1 << 14; /// `__future__`-style type annotations are enabled in this model. /// @@ -1636,7 +1713,7 @@ bitflags! { /// def f(x: int) -> int: /// ... /// ``` - const FUTURE_ANNOTATIONS = 1 << 14; + const FUTURE_ANNOTATIONS = 1 << 15; /// The model is in a type parameter definition. /// @@ -1646,10 +1723,11 @@ bitflags! { /// /// Record = TypeVar("Record") /// - const TYPE_PARAM_DEFINITION = 1 << 15; + const TYPE_PARAM_DEFINITION = 1 << 16; /// The context is in any type annotation. - const ANNOTATION = Self::TYPING_ONLY_ANNOTATION.bits() | Self::RUNTIME_ANNOTATION.bits(); + const ANNOTATION = Self::TYPING_ONLY_ANNOTATION.bits() | Self::RUNTIME_EVALUATED_ANNOTATION.bits() | Self::RUNTIME_REQUIRED_ANNOTATION.bits(); + /// The context is in any string type definition. const STRING_TYPE_DEFINITION = Self::SIMPLE_STRING_TYPE_DEFINITION.bits() @@ -1753,6 +1831,8 @@ pub enum ReadResult { pub struct ImportedName { /// The name to which the imported symbol is bound. name: String, + /// The statement from which the symbol is imported. + source: NodeId, /// The range at which the symbol is imported. range: TextRange, /// The context in which the symbol is imported. @@ -1767,6 +1847,10 @@ impl ImportedName { pub const fn context(&self) -> ExecutionContext { self.context } + + pub fn statement<'a>(&self, semantic: &'a SemanticModel) -> &'a Stmt { + semantic.statement(self.source) + } } impl Ranged for ImportedName { diff --git a/crates/ruff_python_semantic/src/reference.rs b/crates/ruff_python_semantic/src/reference.rs index a963895b21..6bb807e1c8 100644 --- a/crates/ruff_python_semantic/src/reference.rs +++ b/crates/ruff_python_semantic/src/reference.rs @@ -8,11 +8,14 @@ use ruff_text_size::{Ranged, TextRange}; use crate::context::ExecutionContext; use crate::scope::ScopeId; -use crate::{Exceptions, SemanticModelFlags}; +use crate::{Exceptions, NodeId, SemanticModelFlags}; /// A resolved read reference to a name in a program. #[derive(Debug, Clone)] pub struct ResolvedReference { + /// The expression that the reference occurs in. `None` if the reference is a global + /// reference or a reference via an augmented assignment. + node_id: Option, /// The scope in which the reference is defined. scope_id: ScopeId, /// The range of the reference in the source code. @@ -22,6 +25,11 @@ pub struct ResolvedReference { } impl ResolvedReference { + /// The expression that the reference occurs in. + pub const fn expression_id(&self) -> Option { + self.node_id + } + /// The scope in which the reference is defined. pub const fn scope_id(&self) -> ScopeId { self.scope_id @@ -35,6 +43,48 @@ impl ResolvedReference { ExecutionContext::Runtime } } + + /// Return `true` if the context is in a typing-only type annotation. + pub const fn in_typing_only_annotation(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::TYPING_ONLY_ANNOTATION) + } + + /// Return `true` if the context is in a runtime-required type annotation. + pub const fn in_runtime_evaluated_annotation(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::RUNTIME_EVALUATED_ANNOTATION) + } + + /// Return `true` if the context is in a "simple" string type definition. + pub const fn in_simple_string_type_definition(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::SIMPLE_STRING_TYPE_DEFINITION) + } + + /// Return `true` if the context is in a "complex" string type definition. + pub const fn in_complex_string_type_definition(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::COMPLEX_STRING_TYPE_DEFINITION) + } + + /// Return `true` if the context is in a `__future__` type definition. + pub const fn in_future_type_definition(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::FUTURE_TYPE_DEFINITION) + } + + /// Return `true` if the context is in any kind of deferred type definition. + pub const fn in_deferred_type_definition(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::DEFERRED_TYPE_DEFINITION) + } + + /// Return `true` if the context is in a type-checking block. + pub const fn in_type_checking_block(&self) -> bool { + self.flags + .intersects(SemanticModelFlags::TYPE_CHECKING_BLOCK) + } } impl Ranged for ResolvedReference { @@ -57,10 +107,12 @@ impl ResolvedReferences { pub(crate) fn push( &mut self, scope_id: ScopeId, + node_id: Option, range: TextRange, flags: SemanticModelFlags, ) -> ResolvedReferenceId { self.0.push(ResolvedReference { + node_id, scope_id, range, flags, diff --git a/crates/ruff_shrinking/Cargo.toml b/crates/ruff_shrinking/Cargo.toml index caa568c997..f655da8bab 100644 --- a/crates/ruff_shrinking/Cargo.toml +++ b/crates/ruff_shrinking/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_shrinking" -version = "0.1.6" +version = "0.1.8" edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -8,7 +8,7 @@ edition = "2021" [dependencies] anyhow = { workspace = true } clap = { workspace = true } -fs-err = "2.10.0" +fs-err = "2.11.0" regex = { workspace = true } ruff_python_ast = { path = "../ruff_python_ast" } ruff_python_parser = { path = "../ruff_python_parser" } diff --git a/crates/ruff_wasm/Cargo.toml b/crates/ruff_wasm/Cargo.toml index a4998df0fb..861862b54d 100644 --- a/crates/ruff_wasm/Cargo.toml +++ b/crates/ruff_wasm/Cargo.toml @@ -36,12 +36,12 @@ console_log = { version = "1.0.0" } log = { workspace = true } serde = { workspace = true } -serde-wasm-bindgen = { version = "0.6.1" } +serde-wasm-bindgen = { version = "0.6.3" } wasm-bindgen = { version = "0.2.84" } -js-sys = { version = "0.3.65" } +js-sys = { version = "0.3.66" } [dev-dependencies] -wasm-bindgen-test = { version = "0.3.38" } +wasm-bindgen-test = { version = "0.3.39" } [lints] workspace = true diff --git a/crates/ruff_workspace/src/configuration.rs b/crates/ruff_workspace/src/configuration.rs index 4c38249b0e..fe95418e69 100644 --- a/crates/ruff_workspace/src/configuration.rs +++ b/crates/ruff_workspace/src/configuration.rs @@ -34,7 +34,9 @@ use ruff_linter::settings::{ use ruff_linter::{ fs, warn_user, warn_user_once, warn_user_once_by_id, RuleSelector, RUFF_PKG_VERSION, }; -use ruff_python_formatter::{MagicTrailingComma, QuoteStyle}; +use ruff_python_formatter::{ + DocstringCode, DocstringCodeLineWidth, MagicTrailingComma, QuoteStyle, +}; use crate::options::{ Flake8AnnotationsOptions, Flake8BanditOptions, Flake8BugbearOptions, Flake8BuiltinsOptions, @@ -158,12 +160,21 @@ impl Configuration { let format = self.format; let format_defaults = FormatterSettings::default(); + let quote_style = format.quote_style.unwrap_or(format_defaults.quote_style); + let format_preview = match format.preview.unwrap_or(global_preview) { + PreviewMode::Disabled => ruff_python_formatter::PreviewMode::Disabled, + PreviewMode::Enabled => ruff_python_formatter::PreviewMode::Enabled, + }; + + if quote_style == QuoteStyle::Preserve && !format_preview.is_enabled() { + return Err(anyhow!( + "'quote-style = preserve' is a preview only feature. Run with '--preview' to enable it." + )); + } + let formatter = FormatterSettings { exclude: FilePatternSet::try_from_iter(format.exclude.unwrap_or_default())?, - preview: match format.preview.unwrap_or(global_preview) { - PreviewMode::Disabled => ruff_python_formatter::PreviewMode::Disabled, - PreviewMode::Enabled => ruff_python_formatter::PreviewMode::Enabled, - }, + preview: format_preview, line_width: self .line_length .map_or(format_defaults.line_width, |length| { @@ -176,10 +187,16 @@ impl Configuration { .map_or(format_defaults.indent_width, |tab_size| { ruff_formatter::IndentWidth::from(NonZeroU8::from(tab_size)) }), - quote_style: format.quote_style.unwrap_or(format_defaults.quote_style), + quote_style, magic_trailing_comma: format .magic_trailing_comma .unwrap_or(format_defaults.magic_trailing_comma), + docstring_code_format: format + .docstring_code_format + .unwrap_or(format_defaults.docstring_code_format), + docstring_code_line_width: format + .docstring_code_line_width + .unwrap_or(format_defaults.docstring_code_line_width), }; let lint = self.lint; @@ -1011,6 +1028,8 @@ pub struct FormatConfiguration { pub quote_style: Option, pub magic_trailing_comma: Option, pub line_ending: Option, + pub docstring_code_format: Option, + pub docstring_code_line_width: Option, } impl FormatConfiguration { @@ -1037,6 +1056,14 @@ impl FormatConfiguration { } }), line_ending: options.line_ending, + docstring_code_format: options.docstring_code_format.map(|yes| { + if yes { + DocstringCode::Enabled + } else { + DocstringCode::Disabled + } + }), + docstring_code_line_width: options.docstring_code_line_length, }) } @@ -1050,6 +1077,10 @@ impl FormatConfiguration { quote_style: self.quote_style.or(other.quote_style), magic_trailing_comma: self.magic_trailing_comma.or(other.magic_trailing_comma), line_ending: self.line_ending.or(other.line_ending), + docstring_code_format: self.docstring_code_format.or(other.docstring_code_format), + docstring_code_line_width: self + .docstring_code_line_width + .or(other.docstring_code_line_width), } } } diff --git a/crates/ruff_workspace/src/options.rs b/crates/ruff_workspace/src/options.rs index 05f4522710..169b8c3da5 100644 --- a/crates/ruff_workspace/src/options.rs +++ b/crates/ruff_workspace/src/options.rs @@ -27,7 +27,7 @@ use ruff_linter::settings::types::{ }; use ruff_linter::{warn_user_once, RuleSelector}; use ruff_macros::{CombineOptions, OptionsMetadata}; -use ruff_python_formatter::QuoteStyle; +use ruff_python_formatter::{DocstringCodeLineWidth, QuoteStyle}; use crate::settings::LineEnding; @@ -93,8 +93,10 @@ pub struct Options { pub fix: Option, /// Enable application of unsafe fixes. + /// If excluded, a hint will be displayed when unsafe fixes are available. + /// If set to false, the hint will be hidden. #[option( - default = "false", + default = r#"null"#, value_type = "bool", example = "unsafe-fixes = true" )] @@ -1640,6 +1642,57 @@ pub struct Flake8TypeCheckingOptions { "# )] pub runtime_evaluated_decorators: Option>, + + /// Whether to add quotes around type annotations, if doing so would allow + /// the corresponding import to be moved into a type-checking block. + /// + /// For example, in the following, Python requires that `Sequence` be + /// available at runtime, despite the fact that it's only used in a type + /// annotation: + /// + /// ```python + /// from collections.abc import Sequence + /// + /// + /// def func(value: Sequence[int]) -> None: + /// ... + /// ``` + /// + /// In other words, moving `from collections.abc import Sequence` into an + /// `if TYPE_CHECKING:` block above would cause a runtime error, as the + /// type would no longer be available at runtime. + /// + /// By default, Ruff will respect such runtime semantics and avoid moving + /// the import to prevent such runtime errors. + /// + /// Setting `quote-annotations` to `true` will instruct Ruff to add quotes + /// around the annotation (e.g., `"Sequence[int]"`), which in turn enables + /// Ruff to move the import into an `if TYPE_CHECKING:` block, like so: + /// + /// ```python + /// from typing import TYPE_CHECKING + /// + /// if TYPE_CHECKING: + /// from collections.abc import Sequence + /// + /// + /// def func(value: "Sequence[int]") -> None: + /// ... + /// ``` + /// + /// Note that this setting has no effect when `from __future__ import annotations` + /// is present, as `__future__` annotations are always treated equivalently + /// to quoted annotations. + #[option( + default = "false", + value_type = "bool", + example = r#" + # Add quotes around type annotations, if doing so would allow + # an import to be moved into a type-checking block. + quote-annotations = true + "# + )] + pub quote_annotations: Option, } impl Flake8TypeCheckingOptions { @@ -1649,8 +1702,9 @@ impl Flake8TypeCheckingOptions { exempt_modules: self .exempt_modules .unwrap_or_else(|| vec!["typing".to_string()]), - runtime_evaluated_base_classes: self.runtime_evaluated_base_classes.unwrap_or_default(), - runtime_evaluated_decorators: self.runtime_evaluated_decorators.unwrap_or_default(), + runtime_required_base_classes: self.runtime_evaluated_base_classes.unwrap_or_default(), + runtime_required_decorators: self.runtime_evaluated_decorators.unwrap_or_default(), + quote_annotations: self.quote_annotations.unwrap_or_default(), } } } @@ -2109,6 +2163,13 @@ impl IsortOptions { warn_user_once!("`sections` is ignored when `no-sections` is set to `true`"); } + // Verify that if `force_sort_within_sections` is `True`, then `lines_between_types` is set to `0`. + let force_sort_within_sections = self.force_sort_within_sections.unwrap_or_default(); + let lines_between_types = self.lines_between_types.unwrap_or_default(); + if force_sort_within_sections && lines_between_types != 0 { + warn_user_once!("`lines-between-types` is ignored when `force-sort-within-sections` is set to `true`"); + } + // Extract any configuration options that deal with user-defined sections. let mut section_order: Vec<_> = self .section_order @@ -2240,7 +2301,7 @@ impl IsortOptions { required_imports: BTreeSet::from_iter(self.required_imports.unwrap_or_default()), combine_as_imports: self.combine_as_imports.unwrap_or(false), force_single_line: self.force_single_line.unwrap_or(false), - force_sort_within_sections: self.force_sort_within_sections.unwrap_or(false), + force_sort_within_sections, case_sensitive: self.case_sensitive.unwrap_or(false), force_wrap_aliases: self.force_wrap_aliases.unwrap_or(false), detect_same_package: self.detect_same_package.unwrap_or(true), @@ -2263,7 +2324,7 @@ impl IsortOptions { variables: BTreeSet::from_iter(self.variables.unwrap_or_default()), no_lines_before: BTreeSet::from_iter(no_lines_before), lines_after_imports: self.lines_after_imports.unwrap_or(-1), - lines_between_types: self.lines_between_types.unwrap_or_default(), + lines_between_types, forced_separate: Vec::from_iter(self.forced_separate.unwrap_or_default()), section_order, no_sections, @@ -2635,6 +2696,18 @@ pub struct PylintOptions { #[option(default = r"5", value_type = "int", example = r"max-args = 5")] pub max_args: Option, + /// Maximum number of positional arguments allowed for a function or method definition + /// (see: `PLR0917`). + /// + /// If not specified, defaults to the value of `max-args`. + #[option(default = r"3", value_type = "int", example = r"max-pos-args = 3")] + pub max_positional_args: Option, + + /// Maximum number of local variables allowed for a function or method body (see: + /// `PLR0914`). + #[option(default = r"15", value_type = "int", example = r"max-locals = 15")] + pub max_locals: Option, + /// Maximum number of statements allowed for a function or method body (see: /// `PLR0915`). #[option(default = r"50", value_type = "int", example = r"max-statements = 50")] @@ -2663,6 +2736,10 @@ impl PylintOptions { .unwrap_or(defaults.allow_magic_value_types), allow_dunder_method_names: self.allow_dunder_method_names.unwrap_or_default(), max_args: self.max_args.unwrap_or(defaults.max_args), + max_positional_args: self + .max_positional_args + .or(self.max_args) + .unwrap_or(defaults.max_positional_args), max_bool_expr: self.max_bool_expr.unwrap_or(defaults.max_bool_expr), max_returns: self.max_returns.unwrap_or(defaults.max_returns), max_branches: self.max_branches.unwrap_or(defaults.max_branches), @@ -2670,6 +2747,7 @@ impl PylintOptions { max_public_methods: self .max_public_methods .unwrap_or(defaults.max_public_methods), + max_locals: self.max_locals.unwrap_or(defaults.max_locals), } } } @@ -2801,13 +2879,18 @@ pub struct FormatOptions { )] pub indent_style: Option, - /// Whether to prefer single `'` or double `"` quotes for strings. Defaults to double quotes. + /// Configures the preferred quote character for strings. Valid options are: + /// + /// * `double` (default): Use double quotes `"` + /// * `single`: Use single quotes `'` + /// * `preserve` (preview only): Keeps the existing quote character. We don't recommend using this option except for projects + /// that already use a mixture of single and double quotes and can't migrate to using double or single quotes. /// /// In compliance with [PEP 8](https://peps.python.org/pep-0008/) and [PEP 257](https://peps.python.org/pep-0257/), /// Ruff prefers double quotes for multiline strings and docstrings, regardless of the /// configured quote style. /// - /// Ruff may also deviate from this option if using the configured quotes would require + /// Ruff may also deviate from using the configured quotes if doing so requires /// escaping quote characters within the string. For example, given: /// /// ```python @@ -2816,11 +2899,11 @@ pub struct FormatOptions { /// ``` /// /// Ruff will change `a` to use single quotes when using `quote-style = "single"`. However, - /// `b` will be unchanged, as converting to single quotes would require the inner `'` to be - /// escaped, which leads to less readable code: `'It\'s monday morning'`. + /// `b` remains unchanged, as converting to single quotes requires escaping the inner `'`, + /// which leads to less readable code: `'It\'s monday morning'`. This does not apply when using `preserve`. #[option( default = r#"double"#, - value_type = r#""double" | "single""#, + value_type = r#""double" | "single" | "preserve""#, example = r#" # Prefer single quotes over double quotes. quote-style = "single" @@ -2871,6 +2954,156 @@ pub struct FormatOptions { "# )] pub line_ending: Option, + + /// Whether to format code snippets in docstrings. + /// + /// When this is enabled, Python code examples within docstrings are + /// automatically reformatted. + /// + /// For example, when this is enabled, the following code: + /// + /// ```python + /// def f(x): + /// """ + /// Something about `f`. And an example in doctest format: + /// + /// >>> f( x ) + /// + /// Markdown is also supported: + /// + /// ```py + /// f( x ) + /// ``` + /// + /// As are reStructuredText literal blocks:: + /// + /// f( x ) + /// + /// + /// And reStructuredText code blocks: + /// + /// .. code-block:: python + /// + /// f( x ) + /// """ + /// pass + /// ``` + /// + /// ... will be reformatted (assuming the rest of the options are set to + /// their defaults) as: + /// + /// ```python + /// def f(x): + /// """ + /// Something about `f`. And an example in doctest format: + /// + /// >>> f(x) + /// + /// Markdown is also supported: + /// + /// ```py + /// f(x) + /// ``` + /// + /// As are reStructuredText literal blocks:: + /// + /// f(x) + /// + /// + /// And reStructuredText code blocks: + /// + /// .. code-block:: python + /// + /// f(x) + /// """ + /// pass + /// ``` + /// + /// If a code snippt in a docstring contains invalid Python code or if the + /// formatter would otherwise write invalid Python code, then the code + /// example is ignored by the formatter and kept as-is. + /// + /// Currently, doctest, Markdown, reStructuredText literal blocks, and + /// reStructuredText code blocks are all supported and automatically + /// recognized. In the case of unlabeled fenced code blocks in Markdown and + /// reStructuredText literal blocks, the contents are assumed to be Python + /// and reformatted. As with any other format, if the contents aren't valid + /// Python, then the block is left untouched automatically. + #[option( + default = "false", + value_type = "bool", + example = r#" + # Enable reformatting of code snippets in docstrings. + docstring-code-format = true + "# + )] + pub docstring_code_format: Option, + + /// Set the line length used when formatting code snippets in docstrings. + /// + /// This only has an effect when the `docstring-code-format` setting is + /// enabled. + /// + /// The default value for this setting is `"dynamic"`, which has the effect + /// of ensuring that any reformatted code examples in docstrings adhere to + /// the global line length configuration that is used for the surrounding + /// Python code. The point of this setting is that it takes the indentation + /// of the docstring into account when reformatting code examples. + /// + /// Alternatively, this can be set to a fixed integer, which will result + /// in the same line length limit being applied to all reformatted code + /// examples in docstrings. When set to a fixed integer, the indent of the + /// docstring is not taken into account. That is, this may result in lines + /// in the reformatted code example that exceed the globally configured + /// line length limit. + /// + /// For example, when this is set to `20` and `docstring-code-format` is + /// enabled, then this code: + /// + /// ```python + /// def f(x): + /// ''' + /// Something about `f`. And an example: + /// + /// .. code-block:: python + /// + /// foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear) + /// ''' + /// pass + /// ``` + /// + /// ... will be reformatted (assuming the rest of the options are set + /// to their defaults) as: + /// + /// ```python + /// def f(x): + /// """ + /// Something about `f`. And an example: + /// + /// .. code-block:: python + /// + /// ( + /// foo, + /// bar, + /// quux, + /// ) = this_is_a_long_line( + /// lion, + /// hippo, + /// lemur, + /// bear, + /// ) + /// """ + /// pass + /// ``` + #[option( + default = r#""dynamic""#, + value_type = r#"int | "dynamic""#, + example = r#" + # Format all docstring code snippets with a line length of 60. + docstring-code-line-length = 60 + "# + )] + pub docstring_code_line_length: Option, } #[cfg(test)] diff --git a/crates/ruff_workspace/src/settings.rs b/crates/ruff_workspace/src/settings.rs index 982732e487..fe7c82a85e 100644 --- a/crates/ruff_workspace/src/settings.rs +++ b/crates/ruff_workspace/src/settings.rs @@ -5,7 +5,10 @@ use ruff_linter::settings::types::{FilePattern, FilePatternSet, SerializationFor use ruff_linter::settings::LinterSettings; use ruff_macros::CacheKey; use ruff_python_ast::PySourceType; -use ruff_python_formatter::{MagicTrailingComma, PreviewMode, PyFormatOptions, QuoteStyle}; +use ruff_python_formatter::{ + DocstringCode, DocstringCodeLineWidth, MagicTrailingComma, PreviewMode, PyFormatOptions, + QuoteStyle, +}; use ruff_source_file::find_newline; use std::path::{Path, PathBuf}; @@ -86,6 +89,7 @@ pub(crate) static EXCLUDE: &[FilePattern] = &[ FilePattern::Builtin("build"), FilePattern::Builtin("dist"), FilePattern::Builtin("node_modules"), + FilePattern::Builtin("site-packages"), FilePattern::Builtin("venv"), ]; @@ -124,6 +128,9 @@ pub struct FormatterSettings { pub magic_trailing_comma: MagicTrailingComma, pub line_ending: LineEnding, + + pub docstring_code_format: DocstringCode, + pub docstring_code_line_width: DocstringCodeLineWidth, } impl FormatterSettings { @@ -157,6 +164,8 @@ impl FormatterSettings { .with_preview(self.preview) .with_line_ending(line_ending) .with_line_width(self.line_width) + .with_docstring_code(self.docstring_code_format) + .with_docstring_code_line_width(self.docstring_code_line_width) } } @@ -173,6 +182,8 @@ impl Default for FormatterSettings { indent_width: default_options.indent_width(), quote_style: default_options.quote_style(), magic_trailing_comma: default_options.magic_trailing_comma(), + docstring_code_format: default_options.docstring_code(), + docstring_code_line_width: default_options.docstring_code_line_width(), } } } diff --git a/docs/configuration.md b/docs/configuration.md index 85071a439f..ced72b235f 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -21,20 +21,25 @@ If left unspecified, Ruff's default configuration is equivalent to: ".git", ".git-rewrite", ".hg", + ".ipynb_checkpoints", ".mypy_cache", ".nox", ".pants.d", + ".pyenv", + ".pytest_cache", ".pytype", ".ruff_cache", ".svn", ".tox", ".venv", + ".vscode", "__pypackages__", "_build", "buck-out", "build", "dist", "node_modules", + "site-packages", "venv", ] @@ -71,6 +76,20 @@ If left unspecified, Ruff's default configuration is equivalent to: # Like Black, automatically detect the appropriate line ending. line-ending = "auto" + + # Enable auto-formatting of code examples in docstrings. Markdown, + # reStructuredText code/literal blocks and doctests are all supported. + # + # This is currently disabled by default, but it is planned for this + # to be opt-out in the future. + docstring-code-format = false + + # Set the line length limit used when formatting code snippets in + # docstrings. + # + # This only has an effect when the `docstring-code-format` setting is + # enabled. + docstring-code-line-length = "dynamic" ``` === "ruff.toml" @@ -84,20 +103,25 @@ If left unspecified, Ruff's default configuration is equivalent to: ".git", ".git-rewrite", ".hg", + ".ipynb_checkpoints", ".mypy_cache", ".nox", ".pants.d", + ".pyenv", + ".pytest_cache", ".pytype", ".ruff_cache", ".svn", ".tox", ".venv", + ".vscode", "__pypackages__", "_build", "buck-out", "build", "dist", "node_modules", + "site-packages", "venv", ] @@ -134,6 +158,20 @@ If left unspecified, Ruff's default configuration is equivalent to: # Like Black, automatically detect the appropriate line ending. line-ending = "auto" + + # Enable auto-formatting of code examples in docstrings. Markdown, + # reStructuredText code/literal blocks and doctests are all supported. + # + # This is currently disabled by default, but it is planned for this + # to be opt-out in the future. + docstring-code-format = false + + # Set the line length limit used when formatting code snippets in + # docstrings. + # + # This only has an effect when the `docstring-code-format` setting is + # enabled. + docstring-code-line-length = "dynamic" ``` As an example, the following would configure Ruff to: @@ -301,21 +339,20 @@ For example, `ruff check /path/to/excluded/file.py` will always lint `file.py`. ### Default inclusions -By default, Ruff will discover files matching `*.py`, `*.ipy`, or `pyproject.toml`. +By default, Ruff will discover files matching `*.py`, `*.ipy`, or `pyproject.toml`. To lint or format files with additional file extensions, use the [`extend-include`](settings.md#extend-include) setting. === "pyproject.toml" ```toml - [tool.ruff.lint] + [tool.ruff] extend-include = ["*.ipynb"] ``` === "ruff.toml" ```toml - [lint] extend-include = ["*.ipynb"] ``` @@ -325,20 +362,19 @@ You can also change the default selection using the [`include`](settings.md#incl === "pyproject.toml" ```toml - [tool.ruff.lint] + [tool.ruff] include = ["pyproject.toml", "src/**/*.py", "scripts/**/*.py"] ``` === "ruff.toml" ```toml - [lint] include = ["pyproject.toml", "src/**/*.py", "scripts/**/*.py"] ``` !!! warning Paths provided to `include` _must_ match files. For example, `include = ["src"]` will fail since it -matches a directory. + matches a directory. ## Jupyter Notebook discovery @@ -483,7 +519,7 @@ Options: --ignore-noqa Ignore any `# noqa` comments --output-format - Output serialization format for violations [env: RUFF_OUTPUT_FORMAT=] [possible values: text, json, json-lines, junit, grouped, github, gitlab, pylint, azure] + Output serialization format for violations [env: RUFF_OUTPUT_FORMAT=] [possible values: text, json, json-lines, junit, grouped, github, gitlab, pylint, azure, sarif] -o, --output-file Specify file to write the linter output to (default: stdout) --target-version diff --git a/docs/formatter.md b/docs/formatter.md index bbbc944c00..547f32eee2 100644 --- a/docs/formatter.md +++ b/docs/formatter.md @@ -1,7 +1,10 @@ # The Ruff Formatter The Ruff formatter is an extremely fast Python code formatter designed as a drop-in replacement for -[Black](https://pypi.org/project/black/), available as part of the `ruff` CLI (as of Ruff v0.0.289). +[Black](https://pypi.org/project/black/), available as part of the `ruff` CLI via `ruff format`. + +The Ruff formatter is available as a [production-ready Beta](https://astral.sh/blog/the-ruff-formatter) +as of Ruff v0.1.2. ## `ruff format` @@ -100,10 +103,12 @@ Going forward, the Ruff Formatter will support Black's preview style under Ruff' ## Configuration The Ruff Formatter exposes a small set of configuration options, some of which are also supported -by Black (like line width), some of which are unique to Ruff (like quote and indentation style). +by Black (like line width), some of which are unique to Ruff (like quote, indentation style and +formatting code examples in docstrings). -For example, to configure the formatter to use single quotes, a line width of 100, and -tab indentation, add the following to your configuration file: +For example, to configure the formatter to use single quotes, format code +examples in docstrings, a line width of 100, and tab indentation, add the +following to your configuration file: === "pyproject.toml" @@ -114,6 +119,7 @@ tab indentation, add the following to your configuration file: [tool.ruff.format] quote-style = "single" indent-style = "tab" + docstring-code-format = true ``` === "ruff.toml" @@ -124,6 +130,7 @@ tab indentation, add the following to your configuration file: [format] quote-style = "single" indent-style = "tab" + docstring-code-format = true ``` @@ -134,6 +141,97 @@ Given the focus on Black compatibility (and unlike formatters like [YAPF](https: Ruff does not currently expose any configuration options to modify core formatting behavior outside of these trivia-related settings. +## Docstring formatting + +The Ruff formatter provides an opt-in feature for automatically formatting +Python code examples in docstrings. The Ruff formatter currently recognizes +code examples in the following formats: + +* The Python [doctest] format. +* CommonMark [fenced code blocks] with the following info strings: `python`, +`py`, `python3`, or `py3`. Fenced code blocks without an info string are +assumed to be Python code examples and also formatted. +* reStructuredText [literal blocks]. While literal blocks may contain things +other than Python, this is meant to reflect a long-standing convention in the +Python ecosystem where literal blocks often contain Python code. +* reStructuredText [`code-block` and `sourcecode` directives]. As with +Markdown, the language names recognized for Python are `python`, `py`, +`python3`, or `py3`. + +If a code example is recognized and treated as Python, the Ruff formatter will +automatically skip it if the code does not parse as valid Python or if the +reformatted code would produce an invalid Python program. + +Users may also configure the line length limit used for reformatting Python +code examples in docstrings. The default is a special value, `dynamic`, which +instructs the formatter to respect the line length limit setting for the +surrounding Python code. The `dynamic` setting ensures that even when code +examples are found inside indented docstrings, the line length limit configured +for the surrounding Python code will not be exceeded. Users may also configure +a fixed line length limit for code examples in docstrings. + +For example, this configuration shows how to enable docstring code formatting +with a fixed line length limit: + +=== "pyproject.toml" + + ```toml + [tool.ruff.format] + docstring-code-format = true + docstring-code-line-length = 20 + ``` + +=== "ruff.toml" + + ```toml + [format] + docstring-code-format = true + docstring-code-line-length = 20 + ``` + +With the above configuration, this code: + +```python +def f(x): + ''' + Something about `f`. And an example: + + .. code-block:: python + + foo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear) + ''' + pass +``` + +... will be reformatted (assuming the rest of the options are set +to their defaults) as: + +```python +def f(x): + """ + Something about `f`. And an example: + + .. code-block:: python + + ( + foo, + bar, + quux, + ) = this_is_a_long_line( + lion, + hippo, + lemur, + bear, + ) + """ + pass +``` + +[doctest]: https://docs.python.org/3/library/doctest.html +[fenced code blocks]: https://spec.commonmark.org/0.30/#fenced-code-blocks +[literal blocks]: https://docutils.sourceforge.io/docs/ref/rst/restructuredtext.html#literal-blocks +[`code-block` and `sourcecode` directives]: https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-code-block + ## Format suppression Like Black, Ruff supports `# fmt: on`, `# fmt: off`, and `# fmt: skip` pragma comments, which can @@ -294,3 +392,15 @@ flag. Black promotes some of its preview styling to stable at the end of each year. Ruff will similarly implement formatting changes under the [`preview`](https://docs.astral.sh/ruff/settings/#preview) flag, promoting them to stable through minor releases, in accordance with our [versioning policy](https://github.com/astral-sh/ruff/discussions/6998#discussioncomment-7016766). + +## Sorting imports + +Currently, the Ruff formatter does not sort imports. In order to both sort imports and format, +call the Ruff linter and then the formatter: + +```shell +ruff check --select I --fix . +ruff format . +``` + +A unified command for both linting and formatting is [planned](https://github.com/astral-sh/ruff/issues/8232). diff --git a/docs/installation.md b/docs/installation.md index e0a1e882e9..a5ef50f350 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -48,6 +48,12 @@ on the testing repositories: apk add ruff ``` +For **openSUSE Tumbleweed** users, Ruff is also available in the distribution repository: + +```shell +sudo zypper install python3-ruff +``` + On **Docker**, it is published as `ghcr.io/astral-sh/ruff`, tagged for each release and `latest` for the latest release. diff --git a/docs/integrations.md b/docs/integrations.md index c9babce79c..50ae43c9ca 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -14,7 +14,7 @@ Ruff can be used as a [pre-commit](https://pre-commit.com) hook via [`ruff-pre-c ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.1.6 + rev: v0.1.8 hooks: # Run the linter. - id: ruff @@ -27,7 +27,7 @@ To enable lint fixes, add the `--fix` argument to the lint hook: ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.1.6 + rev: v0.1.8 hooks: # Run the linter. - id: ruff @@ -41,7 +41,7 @@ To run the hooks over Jupyter Notebooks too, add `jupyter` to the list of allowe ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.1.6 + rev: v0.1.8 hooks: # Run the linter. - id: ruff diff --git a/docs/linter.md b/docs/linter.md index 01d258d98c..5594789dad 100644 --- a/docs/linter.md +++ b/docs/linter.md @@ -203,6 +203,9 @@ ruff check . --unsafe-fixes ruff check . --fix --unsafe-fixes ``` +By default, Ruff will display a hint when unsafe fixes are available but not enabled. The suggestion can be silenced +by setting the [`unsafe-fixes`](settings.md#unsafe-fixes) setting to `false` or using the `--no-unsafe-fixes` flag. + The safety of fixes can be adjusted per rule using the [`extend-safe-fixes`](settings.md#extend-safe-fixes) and [`extend-unsafe-fixes`](settings.md#extend-unsafe-fixes) settings. For example, the following configuration would promote unsafe fixes for `F601` to safe fixes and demote safe fixes for `UP034` to unsafe fixes: diff --git a/pyproject.toml b/pyproject.toml index 3d9facb8c9..9a1a059a9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "ruff" -version = "0.1.6" +version = "0.1.8" description = "An extremely fast Python linter and code formatter, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] readme = "README.md" diff --git a/python/ruff-ecosystem/ruff_ecosystem/check.py b/python/ruff-ecosystem/ruff_ecosystem/check.py index fe5ea0c246..89d4320570 100644 --- a/python/ruff-ecosystem/ruff_ecosystem/check.py +++ b/python/ruff-ecosystem/ruff_ecosystem/check.py @@ -52,6 +52,8 @@ def markdown_check_result(result: Result) -> str: """ Render a `ruff check` ecosystem check result as markdown. """ + projects_with_changes = 0 + # Calculate the total number of rule changes all_rule_changes = RuleChanges() project_diffs = { @@ -63,6 +65,9 @@ def markdown_check_result(result: Result) -> str: project_rule_changes[project] = changes = RuleChanges.from_diff(diff) all_rule_changes.update(changes) + if diff: + projects_with_changes += 1 + lines: list[str] = [] total_removed = all_rule_changes.total_removed_violations() total_added = all_rule_changes.total_added_violations() @@ -88,11 +93,17 @@ def markdown_check_result(result: Result) -> str: change_summary = ( f"{markdown_plus_minus(total_added, total_removed)} violations, " f"{markdown_plus_minus(total_added_fixes, total_removed_fixes)} fixes " - f"in {len(result.completed)} projects" + f"in {projects_with_changes} projects" ) if error_count: s = "s" if error_count != 1 else "" change_summary += f"; {error_count} project error{s}" + + unchanged_projects = len(result.completed) - projects_with_changes + if unchanged_projects: + s = "s" if unchanged_projects != 1 else "" + change_summary += f"; {unchanged_projects} project{s} unchanged" + lines.append( f"\u2139\ufe0f ecosystem check **detected linter changes**. ({change_summary})" ) diff --git a/python/ruff-ecosystem/ruff_ecosystem/format.py b/python/ruff-ecosystem/ruff_ecosystem/format.py index 2a15e62920..b448c208da 100644 --- a/python/ruff-ecosystem/ruff_ecosystem/format.py +++ b/python/ruff-ecosystem/ruff_ecosystem/format.py @@ -28,6 +28,7 @@ def markdown_format_result(result: Result) -> str: lines: list[str] = [] total_lines_removed = total_lines_added = 0 total_files_modified = 0 + projects_with_changes = 0 error_count = len(result.errored) patch_sets: list[PatchSet] = [] @@ -39,6 +40,9 @@ def markdown_format_result(result: Result) -> str: patch_sets.append(patch_set) total_files_modified += len(patch_set.modified_files) + if comparison.diff: + projects_with_changes += 1 + if total_lines_removed == 0 and total_lines_added == 0 and error_count == 0: return "\u2705 ecosystem check detected no format changes." @@ -51,11 +55,21 @@ def markdown_format_result(result: Result) -> str: ) else: s = "s" if total_files_modified != 1 else "" - changes = f"+{total_lines_added} -{total_lines_removed} lines in {total_files_modified} file{s} in {len(result.completed)} projects" + changes = ( + f"+{total_lines_added} -{total_lines_removed} lines " + f"in {total_files_modified} file{s} in " + f"{projects_with_changes} projects" + ) + if error_count: s = "s" if error_count != 1 else "" changes += f"; {error_count} project error{s}" + unchanged_projects = len(result.completed) - projects_with_changes + if unchanged_projects: + s = "s" if unchanged_projects != 1 else "" + changes += f"; {unchanged_projects} project{s} unchanged" + lines.append( f"\u2139\ufe0f ecosystem check **detected format changes**. ({changes})" ) diff --git a/python/ruff-ecosystem/ruff_ecosystem/types.py b/python/ruff-ecosystem/ruff_ecosystem/types.py index 687ab31ce8..e9b9664aee 100644 --- a/python/ruff-ecosystem/ruff_ecosystem/types.py +++ b/python/ruff-ecosystem/ruff_ecosystem/types.py @@ -32,11 +32,15 @@ class Diff(Serializable): line[2:] for line in self.lines if line.startswith("+" + " " * leading_spaces) + # Do not include patch headers + and not line.startswith("+++") ) self.removed = list( line[2:] for line in self.lines if line.startswith("-" + " " * leading_spaces) + # Do not include patch headers + and not line.startswith("---") ) def __bool__(self) -> bool: diff --git a/ruff.schema.json b/ruff.schema.json index b0584cad84..3d566f7964 100644 --- a/ruff.schema.json +++ b/ruff.schema.json @@ -690,7 +690,7 @@ } }, "unsafe-fixes": { - "description": "Enable application of unsafe fixes.", + "description": "Enable application of unsafe fixes. If excluded, a hint will be displayed when unsafe fixes are available. If set to false, the hint will be hidden.", "type": [ "boolean", "null" @@ -747,6 +747,16 @@ } ] }, + "DocstringCodeLineWidth": { + "anyOf": [ + { + "$ref": "#/definitions/LineWidth" + }, + { + "type": "null" + } + ] + }, "Flake8AnnotationsOptions": { "type": "object", "properties": { @@ -1194,6 +1204,13 @@ "type": "string" } }, + "quote-annotations": { + "description": "Whether to add quotes around type annotations, if doing so would allow the corresponding import to be moved into a type-checking block.\n\nFor example, in the following, Python requires that `Sequence` be available at runtime, despite the fact that it's only used in a type annotation:\n\n```python from collections.abc import Sequence\n\ndef func(value: Sequence[int]) -> None: ... ```\n\nIn other words, moving `from collections.abc import Sequence` into an `if TYPE_CHECKING:` block above would cause a runtime error, as the type would no longer be available at runtime.\n\nBy default, Ruff will respect such runtime semantics and avoid moving the import to prevent such runtime errors.\n\nSetting `quote-annotations` to `true` will instruct Ruff to add quotes around the annotation (e.g., `\"Sequence[int]\"`), which in turn enables Ruff to move the import into an `if TYPE_CHECKING:` block, like so:\n\n```python from typing import TYPE_CHECKING\n\nif TYPE_CHECKING: from collections.abc import Sequence\n\ndef func(value: \"Sequence[int]\") -> None: ... ```\n\nNote that this setting has no effect when `from __future__ import annotations` is present, as `__future__` annotations are always treated equivalently to quoted annotations.", + "type": [ + "boolean", + "null" + ] + }, "runtime-evaluated-base-classes": { "description": "Exempt classes that list any of the enumerated classes as a base class from needing to be moved into type-checking blocks.\n\nCommon examples include Pydantic's `pydantic.BaseModel` and SQLAlchemy's `sqlalchemy.orm.DeclarativeBase`, but can also support user-defined classes that inherit from those base classes. For example, if you define a common `DeclarativeBase` subclass that's used throughout your project (e.g., `class Base(DeclarativeBase) ...` in `base.py`), you can add it to this list (`runtime-evaluated-base-classes = [\"base.Base\"]`) to exempt models from being moved into type-checking blocks.", "type": [ @@ -1241,6 +1258,24 @@ "description": "Experimental: Configures how `ruff format` formats your code.\n\nPlease provide feedback in [this discussion](https://github.com/astral-sh/ruff/discussions/7310).", "type": "object", "properties": { + "docstring-code-format": { + "description": "Whether to format code snippets in docstrings.\n\nWhen this is enabled, Python code examples within docstrings are automatically reformatted.\n\nFor example, when this is enabled, the following code:\n\n```python def f(x): \"\"\" Something about `f`. And an example in doctest format:\n\n>>> f( x )\n\nMarkdown is also supported:\n\n```py f( x ) ```\n\nAs are reStructuredText literal blocks::\n\nf( x )\n\nAnd reStructuredText code blocks:\n\n.. code-block:: python\n\nf( x ) \"\"\" pass ```\n\n... will be reformatted (assuming the rest of the options are set to their defaults) as:\n\n```python def f(x): \"\"\" Something about `f`. And an example in doctest format:\n\n>>> f(x)\n\nMarkdown is also supported:\n\n```py f(x) ```\n\nAs are reStructuredText literal blocks::\n\nf(x)\n\nAnd reStructuredText code blocks:\n\n.. code-block:: python\n\nf(x) \"\"\" pass ```\n\nIf a code snippt in a docstring contains invalid Python code or if the formatter would otherwise write invalid Python code, then the code example is ignored by the formatter and kept as-is.\n\nCurrently, doctest, Markdown, reStructuredText literal blocks, and reStructuredText code blocks are all supported and automatically recognized. In the case of unlabeled fenced code blocks in Markdown and reStructuredText literal blocks, the contents are assumed to be Python and reformatted. As with any other format, if the contents aren't valid Python, then the block is left untouched automatically.", + "type": [ + "boolean", + "null" + ] + }, + "docstring-code-line-length": { + "description": "Set the line length used when formatting code snippets in docstrings.\n\nThis only has an effect when the `docstring-code-format` setting is enabled.\n\nThe default value for this setting is `\"dynamic\"`, which has the effect of ensuring that any reformatted code examples in docstrings adhere to the global line length configuration that is used for the surrounding Python code. The point of this setting is that it takes the indentation of the docstring into account when reformatting code examples.\n\nAlternatively, this can be set to a fixed integer, which will result in the same line length limit being applied to all reformatted code examples in docstrings. When set to a fixed integer, the indent of the docstring is not taken into account. That is, this may result in lines in the reformatted code example that exceed the globally configured line length limit.\n\nFor example, when this is set to `20` and `docstring-code-format` is enabled, then this code:\n\n```python def f(x): ''' Something about `f`. And an example:\n\n.. code-block:: python\n\nfoo, bar, quux = this_is_a_long_line(lion, hippo, lemur, bear) ''' pass ```\n\n... will be reformatted (assuming the rest of the options are set to their defaults) as:\n\n```python def f(x): \"\"\" Something about `f`. And an example:\n\n.. code-block:: python\n\n( foo, bar, quux, ) = this_is_a_long_line( lion, hippo, lemur, bear, ) \"\"\" pass ```", + "anyOf": [ + { + "$ref": "#/definitions/DocstringCodeLineWidth" + }, + { + "type": "null" + } + ] + }, "exclude": { "description": "A list of file patterns to exclude from formatting in addition to the files excluded globally (see [`exclude`](#exclude), and [`extend-exclude`](#extend-exclude)).\n\nExclusions are based on globs, and can be either:\n\n- Single-path patterns, like `.mypy_cache` (to exclude any directory named `.mypy_cache` in the tree), `foo.py` (to exclude any file named `foo.py`), or `foo_*.py` (to exclude any file matching `foo_*.py` ). - Relative patterns, like `directory/foo.py` (to exclude that specific file) or `directory/*.py` (to exclude any Python files in `directory`). Note that these paths are relative to the project root (e.g., the directory containing your `pyproject.toml`).\n\nFor more information on the glob syntax, refer to the [`globset` documentation](https://docs.rs/globset/latest/globset/#syntax).", "type": [ @@ -1281,7 +1316,7 @@ ] }, "quote-style": { - "description": "Whether to prefer single `'` or double `\"` quotes for strings. Defaults to double quotes.\n\nIn compliance with [PEP 8](https://peps.python.org/pep-0008/) and [PEP 257](https://peps.python.org/pep-0257/), Ruff prefers double quotes for multiline strings and docstrings, regardless of the configured quote style.\n\nRuff may also deviate from this option if using the configured quotes would require escaping quote characters within the string. For example, given:\n\n```python a = \"a string without any quotes\" b = \"It's monday morning\" ```\n\nRuff will change `a` to use single quotes when using `quote-style = \"single\"`. However, `b` will be unchanged, as converting to single quotes would require the inner `'` to be escaped, which leads to less readable code: `'It\\'s monday morning'`.", + "description": "Configures the preferred quote character for strings. Valid options are:\n\n* `double` (default): Use double quotes `\"` * `single`: Use single quotes `'` * `preserve` (preview only): Keeps the existing quote character. We don't recommend using this option except for projects that already use a mixture of single and double quotes and can't migrate to using double or single quotes.\n\nIn compliance with [PEP 8](https://peps.python.org/pep-0008/) and [PEP 257](https://peps.python.org/pep-0257/), Ruff prefers double quotes for multiline strings and docstrings, regardless of the configured quote style.\n\nRuff may also deviate from using the configured quotes if doing so requires escaping quote characters within the string. For example, given:\n\n```python a = \"a string without any quotes\" b = \"It's monday morning\" ```\n\nRuff will change `a` to use single quotes when using `quote-style = \"single\"`. However, `b` remains unchanged, as converting to single quotes requires escaping the inner `'`, which leads to less readable code: `'It\\'s monday morning'`. This does not apply when using `preserve`.", "anyOf": [ { "$ref": "#/definitions/QuoteStyle" @@ -1645,6 +1680,12 @@ "maximum": 320.0, "minimum": 1.0 }, + "LineWidth": { + "description": "The maximum visual width to which the formatter should try to limit a line.", + "type": "integer", + "format": "uint16", + "minimum": 1.0 + }, "LintOptions": { "description": "Experimental section to configure Ruff's linting. This new section will eventually replace the top-level linting options.\n\nOptions specified in the `lint` section take precedence over the top-level settings.", "type": "object", @@ -2369,6 +2410,24 @@ "format": "uint", "minimum": 0.0 }, + "max-locals": { + "description": "Maximum number of local variables allowed for a function or method body (see: `PLR0914`).", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0.0 + }, + "max-positional-args": { + "description": "Maximum number of positional arguments allowed for a function or method definition (see: `PLR0917`).\n\nIf not specified, defaults to the value of `max-args`.", + "type": [ + "integer", + "null" + ], + "format": "uint", + "minimum": 0.0 + }, "max-public-methods": { "description": "Maximum number of public methods allowed for a class (see: `PLR0904`).", "type": [ @@ -2432,7 +2491,8 @@ "type": "string", "enum": [ "single", - "double" + "double", + "preserve" ] }, "RelativeImportsOrder": { @@ -2852,6 +2912,7 @@ "FURB105", "FURB11", "FURB113", + "FURB118", "FURB13", "FURB131", "FURB132", @@ -2869,6 +2930,8 @@ "FURB17", "FURB171", "FURB177", + "FURB18", + "FURB181", "G", "G0", "G00", @@ -3115,8 +3178,10 @@ "PLR0911", "PLR0912", "PLR0913", + "PLR0914", "PLR0915", "PLR0916", + "PLR0917", "PLR1", "PLR17", "PLR170", @@ -3508,6 +3573,7 @@ "TCH003", "TCH004", "TCH005", + "TCH006", "TD", "TD0", "TD00", @@ -3643,7 +3709,8 @@ "github", "gitlab", "pylint", - "azure" + "azure", + "sarif" ] }, "Strictness": { diff --git a/scripts/benchmarks/pyproject.toml b/scripts/benchmarks/pyproject.toml index fedde2ac9d..c7b4070e1a 100644 --- a/scripts/benchmarks/pyproject.toml +++ b/scripts/benchmarks/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "scripts" -version = "0.1.6" +version = "0.1.8" description = "" authors = ["Charles Marsh "] diff --git a/scripts/check_ecosystem.py b/scripts/check_ecosystem.py index f45fae776c..f13d623391 100755 --- a/scripts/check_ecosystem.py +++ b/scripts/check_ecosystem.py @@ -128,6 +128,7 @@ REPOSITORIES: list[Repository] = [ Repository("docker", "docker-py", "main"), Repository("freedomofpress", "securedrop", "develop"), Repository("fronzbot", "blinkpy", "dev"), + Repository("binary-husky", "gpt_academic", "master"), Repository("ibis-project", "ibis", "master"), Repository("ing-bank", "probatus", "main"), Repository("jrnl-org", "jrnl", "develop"), diff --git a/scripts/formatter_ecosystem_checks.sh b/scripts/formatter_ecosystem_checks.sh index 7acf0974a1..349e82077c 100755 --- a/scripts/formatter_ecosystem_checks.sh +++ b/scripts/formatter_ecosystem_checks.sh @@ -67,7 +67,7 @@ git -C "$dir/home-assistant" checkout -q 88296c1998fd1943576e0167ab190d25af17525 if [ ! -d "$dir/poetry/.git" ]; then git clone --filter=tree:0 https://github.com/python-poetry/poetry "$dir/poetry" fi -git -C "$dir/poetry" checkout -q f5cb9f0fb19063cf280faf5e39c82d5691da9939 +git -C "$dir/poetry" checkout -q f310a592ad3ab41bb8d635af6bacaf044a1fefef # cpython itself if [ ! -d "$dir/cpython/.git" ]; then @@ -75,12 +75,6 @@ if [ ! -d "$dir/cpython/.git" ]; then fi git -C "$dir/cpython" checkout -q b75186f69edcf54615910a5cd707996144163ef7 -# poetry itself -if [ ! -d "$dir/poetry/.git" ]; then - git clone --filter=tree:0 https://github.com/python-poetry/poetry "$dir/poetry" -fi -git -C "$dir/poetry" checkout -q 611033a7335f3c8e2b74dd58688fb9021cf84a5b - # Uncomment if you want to update the hashes #for i in "$dir"/*/; do git -C "$i" switch main && git -C "$i" pull; done #for i in "$dir"/*/; do echo "# $(basename "$i") $(git -C "$i" rev-parse HEAD)"; done