mirror of https://github.com/astral-sh/uv
Merge branch 'main' into ag/package-level-conflict
This commit is contained in:
commit
dd410d0b1d
|
|
@ -718,7 +718,7 @@ jobs:
|
|||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist --features self-update
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
@ -767,7 +767,7 @@ jobs:
|
|||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel uv-build"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
|
|||
|
|
@ -225,6 +225,7 @@ jobs:
|
|||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/
|
||||
ENV UV_TOOL_BIN_DIR="/usr/local/bin"
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/uv"]
|
||||
EOF
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ jobs:
|
|||
run: rustup component add rustfmt
|
||||
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
|
||||
- name: "rustfmt"
|
||||
run: cargo fmt --all --check
|
||||
|
|
@ -188,7 +188,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: "Install cargo shear"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-shear
|
||||
- run: cargo shear
|
||||
|
|
@ -213,16 +213,19 @@ jobs:
|
|||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
- name: "Cargo test"
|
||||
env:
|
||||
# Retry more than default to reduce flakes in CI
|
||||
UV_HTTP_RETRIES: 5
|
||||
run: |
|
||||
cargo nextest run \
|
||||
--features python-patch \
|
||||
|
|
@ -233,7 +236,7 @@ jobs:
|
|||
timeout-minutes: 15
|
||||
needs: determine_changes
|
||||
# Only run macOS tests on main without opt-in
|
||||
if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos' || github.ref == 'refs/heads/main') }}
|
||||
if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos') || github.ref == 'refs/heads/main' }}
|
||||
runs-on: macos-latest-xlarge # github-macos-14-aarch64-6
|
||||
name: "cargo test | macos"
|
||||
steps:
|
||||
|
|
@ -246,16 +249,19 @@ jobs:
|
|||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
- name: "Cargo test"
|
||||
env:
|
||||
# Retry more than default to reduce flakes in CI
|
||||
UV_HTTP_RETRIES: 5
|
||||
run: |
|
||||
cargo nextest run \
|
||||
--no-default-features \
|
||||
|
|
@ -280,7 +286,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
|
|
@ -293,13 +299,15 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
- name: "Cargo test"
|
||||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
env:
|
||||
# Retry more than default to reduce flakes in CI
|
||||
UV_HTTP_RETRIES: 5
|
||||
# Avoid permission errors during concurrent tests
|
||||
# See https://github.com/astral-sh/uv/issues/6940
|
||||
UV_LINK_MODE: copy
|
||||
|
|
@ -344,7 +352,7 @@ jobs:
|
|||
rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc
|
||||
|
||||
- name: "Install cargo-bloat"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-bloat
|
||||
|
||||
|
|
@ -431,7 +439,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
|
@ -646,8 +654,8 @@ jobs:
|
|||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uvx.exe
|
||||
retention-days: 1
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
build-binary-msrv:
|
||||
name: "build binary | msrv"
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: github-ubuntu-24.04-x86_64-8
|
||||
|
|
@ -1034,7 +1042,7 @@ jobs:
|
|||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -p 3.13t --managed-python
|
||||
./uv venv -c -p 3.13t --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
|
|
@ -1079,7 +1087,7 @@ jobs:
|
|||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -p 3.13 --managed-python
|
||||
./uv venv -c -p 3.13 --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
|
|
@ -1124,7 +1132,7 @@ jobs:
|
|||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -p 3.13 --managed-python
|
||||
./uv venv -c -p 3.13 --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
|
|
@ -1586,7 +1594,7 @@ jobs:
|
|||
run: chmod +x ./uv
|
||||
|
||||
- name: "Configure AWS credentials"
|
||||
uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d
|
||||
uses: aws-actions/configure-aws-credentials@a159d7bb5354cf786f855f2f5d1d8d768d9a08d1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
|
@ -1605,12 +1613,12 @@ jobs:
|
|||
|
||||
- name: "Authenticate with GCP"
|
||||
id: "auth"
|
||||
uses: "google-github-actions/auth@0920706a19e9d22c3d0da43d1db5939c6ad837a8"
|
||||
uses: "google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}"
|
||||
|
||||
- name: "Set up GCP SDK"
|
||||
uses: "google-github-actions/setup-gcloud@a8b58010a5b2a061afd605f50e88629c9ec7536b"
|
||||
uses: "google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9"
|
||||
|
||||
- name: "Get GCP Artifact Registry token"
|
||||
id: get_token
|
||||
|
|
@ -1750,14 +1758,14 @@ jobs:
|
|||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||
|
||||
# Test both `build_wheel` and `build_sdist` through uv
|
||||
./uv venv -v
|
||||
./uv venv -c -v
|
||||
./uv build -v --force-pep517 scripts/packages/built-by-uv --find-links crates/uv-build/dist --offline
|
||||
./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||
|
||||
# Test both `build_wheel` and `build_sdist` through the official `build`
|
||||
rm -rf scripts/packages/built-by-uv/dist/
|
||||
./uv venv -v
|
||||
./uv venv -c -v
|
||||
./uv pip install build
|
||||
# Add the uv binary to PATH for `build` to find
|
||||
PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv scripts/packages/built-by-uv
|
||||
|
|
@ -2253,7 +2261,7 @@ jobs:
|
|||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Validate global Python install"
|
||||
run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe
|
||||
run: py -3.13-arm64 ./scripts/check_system_python.py --uv ./uv.exe
|
||||
|
||||
# Test our PEP 514 integration that installs Python into the Windows registry.
|
||||
system-test-windows-registry:
|
||||
|
|
@ -2508,7 +2516,7 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
|
@ -2524,7 +2532,7 @@ jobs:
|
|||
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
|
@ -2545,7 +2553,7 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
|
@ -2561,7 +2569,7 @@ jobs:
|
|||
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv-*
|
||||
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv_build-*
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.2
|
||||
rev: v0.12.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
3.8.20
|
||||
# The following are required for packse scenarios
|
||||
3.9.20
|
||||
3.9.18
|
||||
3.9.12
|
||||
# The following is needed for `==3.13` request tests
|
||||
3.13.0
|
||||
|
|
|
|||
220
CHANGELOG.md
220
CHANGELOG.md
|
|
@ -3,6 +3,226 @@
|
|||
<!-- prettier-ignore-start -->
|
||||
|
||||
|
||||
## 0.8.0
|
||||
|
||||
Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.7.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes.
|
||||
|
||||
This release also includes the stabilization of a couple `uv python install` features, which have been available under preview since late last year.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Install Python executables into a directory on the `PATH` ([#14626](https://github.com/astral-sh/uv/pull/14626))**
|
||||
|
||||
`uv python install` now installs a versioned Python executable (e.g., `python3.13`) into a directory on the `PATH` (e.g., `~/.local/bin`) by default. This behavior has been available under the `--preview` flag since [Oct 2024](https://github.com/astral-sh/uv/pull/8458). This change should not be breaking unless it shadows a Python executable elsewhere on the `PATH`.
|
||||
|
||||
To install unversioned executables, i.e., `python3` and `python`, use the `--default` flag. The `--default` flag has also been in preview, but is not stabilized in this release.
|
||||
|
||||
Note that these executables point to the base Python installation and only include the standard library. That means they will not include dependencies from your current project (use `uv run python` instead) and you cannot install packages into their environment (use `uvx --with <package> python` instead).
|
||||
|
||||
As with tool installation, the target directory respects common variables like `XDG_BIN_HOME` and can be overridden with a `UV_PYTHON_BIN_DIR` variable.
|
||||
|
||||
You can opt out of this behavior with `uv python install --no-bin` or `UV_PYTHON_INSTALL_BIN=0`.
|
||||
|
||||
See the [documentation on installing Python executables](https://docs.astral.sh/uv/concepts/python-versions/#installing-python-executables) for more details.
|
||||
|
||||
- **Register Python versions with the Windows Registry ([#14625](https://github.com/astral-sh/uv/pull/14625))**
|
||||
|
||||
`uv python install` now registers the installed Python version with the Windows Registry as specified by [PEP 514](https://peps.python.org/pep-0514/). This allows using uv installed Python versions via the `py` launcher. This behavior has been available under the `--preview` flag since [Jan 2025](https://github.com/astral-sh/uv/pull/10634). This change should not be breaking, as using the uv Python versions with `py` requires explicit opt in.
|
||||
|
||||
You can opt out of this behavior with `uv python install --no-registry` or `UV_PYTHON_INSTALL_REGISTRY=0`.
|
||||
|
||||
- **Prompt before removing an existing directory in `uv venv` ([#14309](https://github.com/astral-sh/uv/pull/14309))**
|
||||
|
||||
Previously, `uv venv` would remove an existing virtual environment without confirmation. While this is consistent with the behavior of project commands (e.g., `uv sync`), it's surprising to users that are using imperative workflows (i.e., `uv pip`). Now, `uv venv` will prompt for confirmation before removing an existing virtual environment. **If not in an interactive context, uv will still remove the virtual environment for backwards compatibility. However, this behavior is likely to change in a future release.**
|
||||
|
||||
The behavior for other commands (e.g., `uv sync`) is unchanged.
|
||||
|
||||
You can opt out of this behavior by setting `UV_VENV_CLEAR=1` or passing the `--clear` flag.
|
||||
|
||||
- **Validate that discovered interpreters meet the Python preference ([#7934](https://github.com/astral-sh/uv/pull/7934))**
|
||||
|
||||
uv allows opting out of its managed Python versions with the `--no-managed-python` and `python-preference` options.
|
||||
|
||||
Previously, uv would not enforce this option for Python interpreters discovered on the `PATH`. For example, if a symlink to a managed Python interpreter was created, uv would allow it to be used even if `--no-managed-python` was provided. Now, uv ignores Python interpreters that do not match the Python preference _unless_ they are in an active virtual environment or are explicitly requested, e.g., with `--python /path/to/python3.13`.
|
||||
|
||||
Similarly, uv would previously not invalidate existing project environments if they did not match the Python preference. Now, uv will invalidate and recreate project environments when the Python preference changes.
|
||||
|
||||
You can opt out of this behavior by providing the explicit path to the Python interpreter providing `--managed-python` / `--no-managed-python` matching the interpreter you want.
|
||||
|
||||
- **Install dependencies without build systems when they are `path` sources ([#14413](https://github.com/astral-sh/uv/pull/14413))**
|
||||
|
||||
When working on a project, uv uses the [presence of a build system](https://docs.astral.sh/uv/concepts/projects/config/#build-systems) to determine if it should be built and installed into the environment. However, when a project is a dependency of another project, it can be surprising for the dependency to be missing from the environment.
|
||||
|
||||
Previously, uv would not build and install dependencies with [`path` sources](https://docs.astral.sh/uv/concepts/projects/dependencies/#path) unless they declared a build system or set `tool.uv.package = true`. Now, dependencies with `path` sources are built and installed regardless of the presence of a build system. If a build system is not present, the `setuptools.build_meta:__legacy__ ` backend will be used (per [PEP 517](https://peps.python.org/pep-0517/#source-trees)).
|
||||
|
||||
You can opt out of this behavior by setting `package = false` in the source declaration, e.g.:
|
||||
|
||||
```toml
|
||||
[tool.uv.sources]
|
||||
foo = { path = "./foo", package = false }
|
||||
```
|
||||
|
||||
Or, by setting `tool.uv.package = false` in the dependent `pyproject.toml`.
|
||||
|
||||
See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
|
||||
|
||||
- **Install dependencies without build systems when they are workspace members ([#14663](https://github.com/astral-sh/uv/pull/14663))**
|
||||
|
||||
As described above for dependencies with `path` sources, uv previously would not build and install workspace members that did not declare a build system. Now, uv will build and install workspace members that are a dependency of _another_ workspace member regardless of the presence of a build system. The behavior is unchanged for workspace members that are not included in the `project.dependencies`, `project.optional-dependencies`, or `dependency-groups` tables of another workspace member.
|
||||
|
||||
You can opt out of this behavior by setting `tool.uv.package = false` in the workspace member's `pyproject.toml`.
|
||||
|
||||
See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
|
||||
|
||||
- **Bump `--python-platform linux` to `manylinux_2_28` ([#14300](https://github.com/astral-sh/uv/pull/14300))**
|
||||
|
||||
uv allows performing [platform-specific resolution](https://docs.astral.sh/uv/concepts/resolution/#platform-specific-resolution) for explicit targets and provides short aliases, e.g., `linux`, for common targets.
|
||||
|
||||
Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2019 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330).
|
||||
|
||||
This change only affects users requesting a specific target platform. Otherwise, uv detects the `manylinux` target from your local glibc version.
|
||||
|
||||
You can opt out of this behavior by using `--python-platform x86_64-manylinux_2_17` instead.
|
||||
|
||||
- **Remove `uv version` fallback ([#14161](https://github.com/astral-sh/uv/pull/14161))**
|
||||
|
||||
In [Apr 2025](https://github.com/astral-sh/uv/pull/12349), uv changed the `uv version` command to an interface for viewing and updating the version of the current project. However, when outside a project, `uv version` would continue to display uv's version for backwards compatibility. Now, when used outside of a project, `uv version` will fail.
|
||||
|
||||
You cannot opt out of this behavior. Use `uv self version` instead.
|
||||
|
||||
- **Require `--global` for removal of the global Python pin ([#14169](https://github.com/astral-sh/uv/pull/14169))**
|
||||
|
||||
Previously, `uv python pin --rm` would allow you to remove the global Python pin without opt in. Now, uv requires the `--global` flag to remove the global Python pin.
|
||||
|
||||
You cannot opt out of this behavior. Use the `--global` flag instead.
|
||||
|
||||
- **Support conflicting editable settings across groups ([#14197](https://github.com/astral-sh/uv/pull/14197))**
|
||||
|
||||
Previously, uv would always treat a package as editable if any requirement requested it as editable. However, this prevented users from declaring `path` sources that toggled the `editable` setting across dependency groups. Now, uv allows declaring different `editable` values for conflicting groups. However, if a project includes a path dependency twice, once with `editable = true` and once without any editable annotation, those are now considered conflicting, and uv will exit with an error.
|
||||
|
||||
You cannot opt out of this behavior. Use consistent `editable` settings or [mark groups as conflicting](https://docs.astral.sh/uv/concepts/projects/config/#conflicting-dependencies).
|
||||
|
||||
- **Make `uv_build` the default build backend in `uv init` ([#14661](https://github.com/astral-sh/uv/pull/14661))**
|
||||
|
||||
The uv build backend (`uv_build`) was [stabilized in uv 0.7.19](https://github.com/astral-sh/uv/releases/tag/0.7.19). Now, it is the default build backend for `uv init --package` and `uv init --lib`. Previously, `hatchling` was the default build backend. A build backend is still not used without opt-in in `uv init`, but we expect to change this in a future release.
|
||||
|
||||
You can opt out of this behavior with `uv init --build-backend hatchling`.
|
||||
|
||||
- **Set default `UV_TOOL_BIN_DIR` on Docker images ([#13391](https://github.com/astral-sh/uv/pull/13391))**
|
||||
|
||||
Previously, `UV_TOOL_BIN_DIR` was not set in Docker images which meant that `uv tool install` did not install tools into a directory on the `PATH` without additional configuration. Now, `UV_TOOL_BIN_DIR` is set to `/usr/local/bin` in all Docker derived images.
|
||||
|
||||
When the default image user is overridden (e.g. `USER <UID>`) with a less privileged user, this may cause `uv tool install` to fail.
|
||||
|
||||
You can opt out of this behavior by setting an alternative `UV_TOOL_BIN_DIR`.
|
||||
|
||||
- **Update `--check` to return an exit code of 1 ([#14167](https://github.com/astral-sh/uv/pull/14167))**
|
||||
|
||||
uv uses an exit code of 1 to indicate a "successful failure" and an exit code of 2 to indicate an "error".
|
||||
|
||||
Previously, `uv lock --check` and `uv sync --check` would exit with a code of 2 when the lockfile or environment were outdated. Now, uv will exit with a code of 1.
|
||||
|
||||
You cannot opt out of this behavior.
|
||||
|
||||
- **Use an ephemeral environment for `uv run --with` invocations ([#14447](https://github.com/astral-sh/uv/pull/14447))**
|
||||
|
||||
When using `uv run --with`, uv layers the requirements requested using `--with` into another virtual environment and caches it. Previously, uv would invoke the Python interpreter in this layered environment. However, this allows poisoning the cached environment and introduces race conditions for concurrent invocations. Now, uv will layer _another_ empty virtual environment on top of the cached environment and invoke the Python interpreter there. This should only cause breakage in cases where the environment is being inspected at runtime.
|
||||
|
||||
You cannot opt out of this behavior.
|
||||
|
||||
- **Restructure the `uv venv` command output and exit codes ([#14546](https://github.com/astral-sh/uv/pull/14546))**
|
||||
|
||||
Previously, uv used `miette` to format the `uv venv` output. However, this was inconsistent with most of the uv CLI. Now, the output is a little different and the exit code has switched from 1 to 2 for some error cases.
|
||||
|
||||
You cannot opt out of this behavior.
|
||||
|
||||
- **Default to `--workspace` when adding subdirectories ([#14529](https://github.com/astral-sh/uv/pull/14529))**
|
||||
|
||||
When using `uv add` to add a subdirectory in a workspace, uv now defaults to adding the target as a workspace member.
|
||||
|
||||
You can opt out of this behavior by providing `--no-workspace`.
|
||||
|
||||
- **Add missing validations for disallowed `uv.toml` fields ([#14322](https://github.com/astral-sh/uv/pull/14322))**
|
||||
|
||||
uv does not allow some settings in the `uv.toml`. Previously, some settings were silently ignored when present in the `uv.toml`. Now, uv will error.
|
||||
|
||||
You cannot opt out of this behavior. Use `--no-config` or remove the invalid settings.
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add support for toggling Python bin and registry install options via env vars ([#14662](https://github.com/astral-sh/uv/pull/14662))
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Python
|
||||
|
||||
- Upgrade GraalPy to 24.2.2
|
||||
|
||||
See the [GraalPy release notes](https://github.com/oracle/graalpython/releases/tag/graal-24.2.2) for more details.
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_COMPILE_BYTECODE_TIMEOUT` environment variable ([#14369](https://github.com/astral-sh/uv/pull/14369))
|
||||
- Allow users to override index `cache-control` headers ([#14620](https://github.com/astral-sh/uv/pull/14620))
|
||||
- Add `UV_LIBC` to override libc selection in multi-libc environment ([#14646](https://github.com/astral-sh/uv/pull/14646))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `--all-arches` when paired with `--only-downloads` ([#14629](https://github.com/astral-sh/uv/pull/14629))
|
||||
- Skip Windows Python interpreters that return a broken MSIX package code ([#14636](https://github.com/astral-sh/uv/pull/14636))
|
||||
- Warn on invalid `uv.toml` when provided via direct path ([#14653](https://github.com/astral-sh/uv/pull/14653))
|
||||
- Improve async signal safety in Windows exception handler ([#14619](https://github.com/astral-sh/uv/pull/14619))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Mention the `revision` in the lockfile versioning doc ([#14634](https://github.com/astral-sh/uv/pull/14634))
|
||||
- Move "Conflicting dependencies" to the "Resolution" page ([#14633](https://github.com/astral-sh/uv/pull/14633))
|
||||
- Rename "Dependency specifiers" section to exclude PEP 508 reference ([#14631](https://github.com/astral-sh/uv/pull/14631))
|
||||
- Suggest `uv cache clean` prior to `--reinstall` ([#14659](https://github.com/astral-sh/uv/pull/14659))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Make preview Python registration on Windows non-fatal ([#14614](https://github.com/astral-sh/uv/pull/14614))
|
||||
- Update preview installation of Python executables to be non-fatal ([#14612](https://github.com/astral-sh/uv/pull/14612))
|
||||
- Add `uv python update-shell` ([#14627](https://github.com/astral-sh/uv/pull/14627))
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Python
|
||||
|
||||
- Restore the SQLite `fts4`, `fts5`, `rtree`, and `geopoly` extensions on macOS and Linux
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250712)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--python-platform` to `uv sync` ([#14320](https://github.com/astral-sh/uv/pull/14320))
|
||||
- Support pre-releases in `uv version --bump` ([#13578](https://github.com/astral-sh/uv/pull/13578))
|
||||
- Add `-w` shorthand for `--with` ([#14530](https://github.com/astral-sh/uv/pull/14530))
|
||||
- Add an exception handler on Windows to display information on crash ([#14582](https://github.com/astral-sh/uv/pull/14582))
|
||||
- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522))
|
||||
- Add `UV_HTTP_RETRIES` to customize retry counts ([#14544](https://github.com/astral-sh/uv/pull/14544))
|
||||
- Follow leaf symlinks matched by globs in `cache-key` ([#13438](https://github.com/astral-sh/uv/pull/13438))
|
||||
- Support parent path components (`..`) in globs in `cache-key` ([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
- Improve `cache-key` performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `uv sync --output-format json` ([#13689](https://github.com/astral-sh/uv/pull/13689))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python` ([#14606](https://github.com/astral-sh/uv/pull/14606))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document how to nest dependency groups with `include-group` ([#14539](https://github.com/astral-sh/uv/pull/14539))
|
||||
- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554))
|
||||
- Update CONTRIBUTING.md with instructions to format Markdown files via Docker ([#14246](https://github.com/astral-sh/uv/pull/14246))
|
||||
- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533))
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Python
|
||||
|
|
|
|||
|
|
@ -165,6 +165,13 @@ After making changes to the documentation, format the markdown files with:
|
|||
npx prettier --prose-wrap always --write "**/*.md"
|
||||
```
|
||||
|
||||
Note that the command above requires Node.js and npm to be installed on your system. As an
|
||||
alternative, you can run this command using Docker:
|
||||
|
||||
```console
|
||||
$ docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md"
|
||||
```
|
||||
|
||||
## Releases
|
||||
|
||||
Releases can only be performed by Astral team members.
|
||||
|
|
|
|||
|
|
@ -251,7 +251,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "async_zip"
|
||||
version = "0.0.17"
|
||||
source = "git+https://github.com/charliermarsh/rs-async-zip?rev=c909fda63fcafe4af496a07bfda28a5aae97e58d#c909fda63fcafe4af496a07bfda28a5aae97e58d"
|
||||
source = "git+https://github.com/astral-sh/rs-async-zip?rev=c909fda63fcafe4af496a07bfda28a5aae97e58d#c909fda63fcafe4af496a07bfda28a5aae97e58d"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"crc32fast",
|
||||
|
|
@ -619,9 +619,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.40"
|
||||
version = "4.5.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f"
|
||||
checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
|
@ -629,9 +629,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.40"
|
||||
version = "4.5.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e"
|
||||
checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -672,9 +672,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.40"
|
||||
version = "4.5.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce"
|
||||
checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
@ -690,9 +690,9 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
|
|||
|
||||
[[package]]
|
||||
name = "codspeed"
|
||||
version = "3.0.2"
|
||||
version = "3.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "922018102595f6668cdd09c03f4bff2d951ce2318c6dca4fe11bdcb24b65b2bf"
|
||||
checksum = "a7524e02ff6173bc143d9abc01b518711b77addb60de871bbe5686843f88fb48"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
|
@ -708,9 +708,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat"
|
||||
version = "3.0.2"
|
||||
version = "3.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d8ad82d2383cb74995f58993cbdd2914aed57b2f91f46580310dd81dc3d05a"
|
||||
checksum = "2f71662331c4f854131a42b95055f3f8cbca53640348985f699635b1f96d8c26"
|
||||
dependencies = [
|
||||
"codspeed",
|
||||
"codspeed-criterion-compat-walltime",
|
||||
|
|
@ -719,9 +719,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat-walltime"
|
||||
version = "3.0.2"
|
||||
version = "3.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61badaa6c452d192a29f8387147888f0ab358553597c3fe9bf8a162ef7c2fa64"
|
||||
checksum = "e3c9bd9e895e0aa263d139a8b5f58a4ea4abb86d5982ec7f58d3c7b8465c1e01"
|
||||
dependencies = [
|
||||
"anes",
|
||||
"cast",
|
||||
|
|
@ -761,7 +761,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -788,10 +788,22 @@ dependencies = [
|
|||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"unicode-width 0.2.1",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "console"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e09ced7ebbccb63b4c65413d821f2e00ce54c5ca4514ddc6b3c892fdbcbc69d"
|
||||
dependencies = [
|
||||
"encode_unicode",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"unicode-width 0.2.1",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.10.0"
|
||||
|
|
@ -1240,9 +1252,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
|||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
|
||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||
|
||||
[[package]]
|
||||
name = "fontconfig-parser"
|
||||
|
|
@ -1703,9 +1715,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "hyper-util"
|
||||
version = "0.1.14"
|
||||
version = "0.1.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc2fdfdbff08affe55bb779f33b053aa1fe5dd5b54c257343c17edfa55711bdb"
|
||||
checksum = "7f66d5bd4c6f02bf0542fad85d626775bab9258cf795a4256dcaf3161114d1df"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
|
|
@ -1910,14 +1922,14 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "indicatif"
|
||||
version = "0.17.11"
|
||||
version = "0.18.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "183b3088984b400f4cfac3620d5e076c84da5364016b4f49473de574b2586235"
|
||||
checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd"
|
||||
dependencies = [
|
||||
"console",
|
||||
"number_prefix",
|
||||
"console 0.16.0",
|
||||
"portable-atomic",
|
||||
"unicode-width 0.2.1",
|
||||
"unit-prefix",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
|
|
@ -1933,7 +1945,7 @@ version = "1.43.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371"
|
||||
dependencies = [
|
||||
"console",
|
||||
"console 0.15.11",
|
||||
"once_cell",
|
||||
"pest",
|
||||
"pest_derive",
|
||||
|
|
@ -1942,6 +1954,17 @@ dependencies = [
|
|||
"similar",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "io-uring"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.11.0"
|
||||
|
|
@ -2454,12 +2477,6 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "number_prefix"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.36.7"
|
||||
|
|
@ -3016,7 +3033,7 @@ checksum = "78c81d000a2c524133cc00d2f92f019d399e57906c3b7119271a2495354fe895"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"windows 0.61.1",
|
||||
]
|
||||
|
||||
|
|
@ -3322,9 +3339,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "1.0.7"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266"
|
||||
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"errno",
|
||||
|
|
@ -3567,9 +3584,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
version = "1.0.141"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
|
|
@ -3579,9 +3596,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.9"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
|
||||
checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
|
@ -3729,9 +3746,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "spdx"
|
||||
version = "0.10.8"
|
||||
version = "0.10.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58b69356da67e2fc1f542c71ea7e654a361a79c938e4424392ecf4fa065d2193"
|
||||
checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3"
|
||||
dependencies = [
|
||||
"smallvec",
|
||||
]
|
||||
|
|
@ -3912,7 +3929,7 @@ dependencies = [
|
|||
"fastrand",
|
||||
"getrandom 0.3.1",
|
||||
"once_cell",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
|
|
@ -4134,17 +4151,19 @@ source = "git+https://github.com/astral-sh/tl.git?rev=6e25b2ee2513d75385101a8ff9
|
|||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.45.1"
|
||||
version = "1.46.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
|
||||
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
"io-uring",
|
||||
"libc",
|
||||
"mio",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"slab",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.52.0",
|
||||
|
|
@ -4199,44 +4218,58 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.23"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
|
||||
checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac"
|
||||
dependencies = [
|
||||
"foldhash",
|
||||
"indexmap",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_edit",
|
||||
"toml_parser",
|
||||
"toml_writer",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_datetime"
|
||||
version = "0.6.11"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
|
||||
checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.22.27"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
|
||||
checksum = "d1dee9dc43ac2aaf7d3b774e2fba5148212bf2bd9374f4e50152ebe9afd03d42"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_write",
|
||||
"toml_parser",
|
||||
"toml_writer",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_write"
|
||||
version = "0.1.2"
|
||||
name = "toml_parser"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
|
||||
checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30"
|
||||
dependencies = [
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_writer"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64"
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
|
|
@ -4510,6 +4543,12 @@ version = "0.2.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
|
||||
|
||||
[[package]]
|
||||
name = "unit-prefix"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "323402cff2dd658f39ca17c789b502021b3f18707c91cdf22e3838e1b4023817"
|
||||
|
||||
[[package]]
|
||||
name = "unsafe-libyaml"
|
||||
version = "0.2.11"
|
||||
|
|
@ -4608,17 +4647,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv"
|
||||
version = "0.7.20"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anyhow",
|
||||
"arrayvec",
|
||||
"assert_cmd",
|
||||
"assert_fs",
|
||||
"axoupdater",
|
||||
"base64 0.22.1",
|
||||
"byteorder",
|
||||
"clap",
|
||||
"console",
|
||||
"console 0.15.11",
|
||||
"ctrlc",
|
||||
"dotenvy",
|
||||
"dunce",
|
||||
|
|
@ -4670,7 +4710,6 @@ dependencies = [
|
|||
"uv-client",
|
||||
"uv-configuration",
|
||||
"uv-console",
|
||||
"uv-dirs",
|
||||
"uv-dispatch",
|
||||
"uv-distribution",
|
||||
"uv-distribution-filename",
|
||||
|
|
@ -4709,6 +4748,8 @@ dependencies = [
|
|||
"walkdir",
|
||||
"which",
|
||||
"whoami",
|
||||
"windows 0.59.0",
|
||||
"windows-result 0.3.4",
|
||||
"wiremock",
|
||||
"zip",
|
||||
]
|
||||
|
|
@ -4772,7 +4813,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv-build"
|
||||
version = "0.7.20"
|
||||
version = "0.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"uv-build-backend",
|
||||
|
|
@ -4790,6 +4831,7 @@ dependencies = [
|
|||
"indoc",
|
||||
"insta",
|
||||
"itertools 0.14.0",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"schemars",
|
||||
"serde",
|
||||
|
|
@ -4881,10 +4923,12 @@ dependencies = [
|
|||
name = "uv-cache-info"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"fs-err 3.1.1",
|
||||
"globwalk",
|
||||
"schemars",
|
||||
"serde",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"toml",
|
||||
"tracing",
|
||||
|
|
@ -5025,7 +5069,7 @@ dependencies = [
|
|||
name = "uv-console"
|
||||
version = "0.0.1"
|
||||
dependencies = [
|
||||
"console",
|
||||
"console 0.15.11",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -5259,7 +5303,7 @@ dependencies = [
|
|||
"junction",
|
||||
"path-slash",
|
||||
"percent-encoding",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"same-file",
|
||||
"schemars",
|
||||
"serde",
|
||||
|
|
@ -5662,7 +5706,7 @@ version = "0.1.0"
|
|||
dependencies = [
|
||||
"anyhow",
|
||||
"configparser",
|
||||
"console",
|
||||
"console 0.15.11",
|
||||
"fs-err 3.1.1",
|
||||
"futures",
|
||||
"rustc-hash",
|
||||
|
|
@ -5963,24 +6007,28 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv-version"
|
||||
version = "0.7.20"
|
||||
version = "0.8.0"
|
||||
|
||||
[[package]]
|
||||
name = "uv-virtualenv"
|
||||
version = "0.0.4"
|
||||
dependencies = [
|
||||
"console 0.15.11",
|
||||
"fs-err 3.1.1",
|
||||
"itertools 0.14.0",
|
||||
"owo-colors",
|
||||
"pathdiff",
|
||||
"self-replace",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
"uv-configuration",
|
||||
"uv-console",
|
||||
"uv-fs",
|
||||
"uv-pypi-types",
|
||||
"uv-python",
|
||||
"uv-shell",
|
||||
"uv-version",
|
||||
"uv-warnings",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -6250,7 +6298,7 @@ checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d"
|
|||
dependencies = [
|
||||
"env_home",
|
||||
"regex",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"winsafe",
|
||||
]
|
||||
|
||||
|
|
@ -6293,7 +6341,7 @@ version = "0.1.9"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -6319,7 +6367,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1"
|
||||
dependencies = [
|
||||
"windows-core 0.59.0",
|
||||
"windows-targets 0.53.0",
|
||||
"windows-targets 0.53.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -6366,7 +6414,7 @@ dependencies = [
|
|||
"windows-interface 0.59.1",
|
||||
"windows-result 0.3.4",
|
||||
"windows-strings 0.3.1",
|
||||
"windows-targets 0.53.0",
|
||||
"windows-targets 0.53.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -6537,6 +6585,15 @@ dependencies = [
|
|||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
|
||||
dependencies = [
|
||||
"windows-targets 0.53.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.48.5"
|
||||
|
|
@ -6570,9 +6627,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.53.0"
|
||||
version = "0.53.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
|
||||
checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.53.0",
|
||||
"windows_aarch64_msvc 0.53.0",
|
||||
|
|
|
|||
11
Cargo.toml
11
Cargo.toml
|
|
@ -75,12 +75,13 @@ uv-workspace = { path = "crates/uv-workspace" }
|
|||
anstream = { version = "0.6.15" }
|
||||
anyhow = { version = "1.0.89" }
|
||||
arcstr = { version = "1.2.0" }
|
||||
arrayvec = { version = "0.7.6" }
|
||||
astral-tokio-tar = { version = "0.5.1" }
|
||||
async-channel = { version = "2.3.1" }
|
||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||
async-trait = { version = "0.1.82" }
|
||||
async_http_range_reader = { version = "0.9.1" }
|
||||
async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
axoupdater = { version = "0.9.0", default-features = false }
|
||||
backon = { version = "1.3.0" }
|
||||
base64 = { version = "0.22.1" }
|
||||
|
|
@ -116,7 +117,7 @@ home = { version = "0.5.9" }
|
|||
html-escape = { version = "0.2.13" }
|
||||
http = { version = "1.1.0" }
|
||||
indexmap = { version = "2.5.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indoc = { version = "2.0.5" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0", features = ["serde"] }
|
||||
|
|
@ -171,8 +172,8 @@ tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101
|
|||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] }
|
||||
tokio-stream = { version = "0.1.16" }
|
||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||
toml = { version = "0.8.19" }
|
||||
toml_edit = { version = "0.22.21", features = ["serde"] }
|
||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||
toml_edit = { version = "0.23.2", features = ["serde"] }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-durations-export = { version = "0.3.0", features = ["plot"] }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry"] }
|
||||
|
|
@ -184,7 +185,7 @@ url = { version = "2.5.2", features = ["serde"] }
|
|||
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||
walkdir = { version = "2.5.0" }
|
||||
which = { version = "8.0.0", features = ["regex"] }
|
||||
windows = { version = "0.59.0", features = ["Win32_Storage_FileSystem"] }
|
||||
windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] }
|
||||
windows-core = { version = "0.59.0" }
|
||||
windows-registry = { version = "0.5.0" }
|
||||
windows-result = { version = "0.3.0" }
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ mod credentials;
|
|||
mod index;
|
||||
mod keyring;
|
||||
mod middleware;
|
||||
mod providers;
|
||||
mod realm;
|
||||
|
||||
// TODO(zanieb): Consider passing a cache explicitly throughout
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use reqwest::{Request, Response};
|
|||
use reqwest_middleware::{Error, Middleware, Next};
|
||||
use tracing::{debug, trace, warn};
|
||||
|
||||
use crate::providers::HuggingFaceProvider;
|
||||
use crate::{
|
||||
CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||
cache::FetchUrl,
|
||||
|
|
@ -457,9 +458,8 @@ impl AuthMiddleware {
|
|||
Some(credentials)
|
||||
};
|
||||
|
||||
return self
|
||||
.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await;
|
||||
self.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Fetch credentials for a URL.
|
||||
|
|
@ -503,6 +503,13 @@ impl AuthMiddleware {
|
|||
return credentials;
|
||||
}
|
||||
|
||||
// Support for known providers, like Hugging Face.
|
||||
if let Some(credentials) = HuggingFaceProvider::credentials_for(url).map(Arc::new) {
|
||||
debug!("Found Hugging Face credentials for {url}");
|
||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||
return Some(credentials);
|
||||
}
|
||||
|
||||
// Netrc support based on: <https://github.com/gribouille/netrc>.
|
||||
let credentials = if let Some(credentials) = self.netrc.get().and_then(|netrc| {
|
||||
debug!("Checking netrc for credentials for {url}");
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
use std::sync::LazyLock;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::Credentials;
|
||||
use crate::realm::{Realm, RealmRef};
|
||||
|
||||
/// The [`Realm`] for the Hugging Face platform.
|
||||
static HUGGING_FACE_REALM: LazyLock<Realm> = LazyLock::new(|| {
|
||||
let url = Url::parse("https://huggingface.co").expect("Failed to parse Hugging Face URL");
|
||||
Realm::from(&url)
|
||||
});
|
||||
|
||||
/// The authentication token for the Hugging Face platform, if set.
|
||||
static HUGGING_FACE_TOKEN: LazyLock<Option<Vec<u8>>> = LazyLock::new(|| {
|
||||
// Extract the Hugging Face token from the environment variable, if it exists.
|
||||
let hf_token = std::env::var(EnvVars::HF_TOKEN)
|
||||
.ok()
|
||||
.map(String::into_bytes)
|
||||
.filter(|token| !token.is_empty())?;
|
||||
|
||||
if std::env::var_os(EnvVars::UV_NO_HF_TOKEN).is_some() {
|
||||
debug!("Ignoring Hugging Face token from environment due to `UV_NO_HF_TOKEN`");
|
||||
return None;
|
||||
}
|
||||
|
||||
debug!("Found Hugging Face token in environment");
|
||||
Some(hf_token)
|
||||
});
|
||||
|
||||
/// A provider for authentication credentials for the Hugging Face platform.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct HuggingFaceProvider;
|
||||
|
||||
impl HuggingFaceProvider {
|
||||
/// Returns the credentials for the Hugging Face platform, if available.
|
||||
pub(crate) fn credentials_for(url: &Url) -> Option<Credentials> {
|
||||
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
||||
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
||||
return Some(Credentials::Bearer {
|
||||
token: token.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
use std::hash::{Hash, Hasher};
|
||||
use std::{fmt::Display, fmt::Formatter};
|
||||
|
||||
use url::Url;
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ use uv_small_str::SmallString;
|
|||
// The port is only allowed to differ if it matches the "default port" for the scheme.
|
||||
// However, `url` (and therefore `reqwest`) sets the `port` to `None` if it matches the default port
|
||||
// so we do not need any special handling here.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct Realm {
|
||||
scheme: SmallString,
|
||||
host: Option<SmallString>,
|
||||
|
|
@ -59,6 +59,76 @@ impl Display for Realm {
|
|||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Realm {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
RealmRef::from(self) == RealmRef::from(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Realm {}
|
||||
|
||||
impl Hash for Realm {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
RealmRef::from(self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub(crate) struct RealmRef<'a> {
|
||||
scheme: &'a str,
|
||||
host: Option<&'a str>,
|
||||
port: Option<u16>,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Url> for RealmRef<'a> {
|
||||
fn from(url: &'a Url) -> Self {
|
||||
Self {
|
||||
scheme: url.scheme(),
|
||||
host: url.host_str(),
|
||||
port: url.port(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for RealmRef<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.scheme == other.scheme && self.host == other.host && self.port == other.port
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RealmRef<'_> {}
|
||||
|
||||
impl Hash for RealmRef<'_> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.scheme.hash(state);
|
||||
self.host.hash(state);
|
||||
self.port.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<RealmRef<'a>> for Realm {
|
||||
fn eq(&self, rhs: &RealmRef<'a>) -> bool {
|
||||
RealmRef::from(self) == *rhs
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Realm> for RealmRef<'_> {
|
||||
fn eq(&self, rhs: &Realm) -> bool {
|
||||
*self == RealmRef::from(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Realm> for RealmRef<'a> {
|
||||
fn from(realm: &'a Realm) -> Self {
|
||||
Self {
|
||||
scheme: &realm.scheme,
|
||||
host: realm.host.as_deref(),
|
||||
port: realm.port,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::{ParseError, Url};
|
||||
|
|
|
|||
|
|
@ -86,8 +86,8 @@ mod resolver {
|
|||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_configuration::{
|
||||
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, PreviewMode,
|
||||
SourceStrategy,
|
||||
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy,
|
||||
PackageConfigSettings, PreviewMode, SourceStrategy,
|
||||
};
|
||||
use uv_dispatch::{BuildDispatch, SharedState};
|
||||
use uv_distribution::DistributionDatabase;
|
||||
|
|
@ -144,6 +144,7 @@ mod resolver {
|
|||
let build_options = BuildOptions::default();
|
||||
let concurrency = Concurrency::default();
|
||||
let config_settings = ConfigSettings::default();
|
||||
let config_settings_package = PackageConfigSettings::default();
|
||||
let exclude_newer = Some(
|
||||
jiff::civil::date(2024, 9, 1)
|
||||
.to_zoned(jiff::tz::TimeZone::UTC)
|
||||
|
|
@ -184,6 +185,7 @@ mod resolver {
|
|||
state,
|
||||
IndexStrategy::default(),
|
||||
&config_settings,
|
||||
&config_settings_package,
|
||||
build_isolation,
|
||||
LinkMode::default(),
|
||||
&build_options,
|
||||
|
|
|
|||
|
|
@ -57,4 +57,5 @@ schemars = ["dep:schemars", "uv-pypi-types/schemars"]
|
|||
[dev-dependencies]
|
||||
indoc = { workspace = true }
|
||||
insta = { version = "1.40.0", features = ["filters"] }
|
||||
regex = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -355,6 +355,7 @@ mod tests {
|
|||
use indoc::indoc;
|
||||
use insta::assert_snapshot;
|
||||
use itertools::Itertools;
|
||||
use regex::Regex;
|
||||
use sha2::Digest;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::iter;
|
||||
|
|
@ -362,6 +363,8 @@ mod tests {
|
|||
use uv_distribution_filename::{SourceDistFilename, WheelFilename};
|
||||
use uv_fs::{copy_dir_all, relative_to};
|
||||
|
||||
const MOCK_UV_VERSION: &str = "1.0.0+test";
|
||||
|
||||
fn format_err(err: &Error) -> String {
|
||||
let context = iter::successors(std::error::Error::source(&err), |&err| err.source())
|
||||
.map(|err| format!(" Caused by: {err}"))
|
||||
|
|
@ -388,19 +391,19 @@ mod tests {
|
|||
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
||||
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
||||
// latest and remove it since it has the same filename as the indirect wheel.
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, "1.0.0+test")?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, "1.0.0+test")?;
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION)?;
|
||||
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
||||
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
||||
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
||||
fs_err::remove_file(&direct_wheel_path)?;
|
||||
|
||||
// Build a source distribution.
|
||||
let (_name, source_dist_list_files) = list_source_dist(source_root, "1.0.0+test")?;
|
||||
let (_name, source_dist_list_files) = list_source_dist(source_root, MOCK_UV_VERSION)?;
|
||||
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
||||
// normalize the path.
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, "1.0.0+test")?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, "1.0.0+test")?;
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION)?;
|
||||
let source_dist_path = dist.join(source_dist_filename.to_string());
|
||||
let source_dist_contents = sdist_contents(&source_dist_path);
|
||||
|
||||
|
|
@ -414,7 +417,7 @@ mod tests {
|
|||
source_dist_filename.name.as_dist_info_name(),
|
||||
source_dist_filename.version
|
||||
));
|
||||
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, "1.0.0+test")?;
|
||||
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, MOCK_UV_VERSION)?;
|
||||
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
||||
|
||||
// Check that direct and indirect wheels are identical.
|
||||
|
|
@ -515,14 +518,14 @@ mod tests {
|
|||
] {
|
||||
copy_dir_all(built_by_uv.join(dir), src.path().join(dir)).unwrap();
|
||||
}
|
||||
for dir in [
|
||||
for filename in [
|
||||
"pyproject.toml",
|
||||
"README.md",
|
||||
"uv.lock",
|
||||
"LICENSE-APACHE",
|
||||
"LICENSE-MIT",
|
||||
] {
|
||||
fs_err::copy(built_by_uv.join(dir), src.path().join(dir)).unwrap();
|
||||
fs_err::copy(built_by_uv.join(filename), src.path().join(filename)).unwrap();
|
||||
}
|
||||
|
||||
// Clear executable bit on Unix to build the same archive between Unix and Windows.
|
||||
|
|
@ -539,6 +542,14 @@ mod tests {
|
|||
fs_err::set_permissions(&path, perms).unwrap();
|
||||
}
|
||||
|
||||
// Redact the uv_build version to keep the hash stable across releases
|
||||
let pyproject_toml = fs_err::read_to_string(src.path().join("pyproject.toml")).unwrap();
|
||||
let current_requires =
|
||||
Regex::new(r#"requires = \["uv_build>=[0-9.]+,<[0-9.]+"\]"#).unwrap();
|
||||
let mocked_requires = r#"requires = ["uv_build>=1,<2"]"#;
|
||||
let pyproject_toml = current_requires.replace(pyproject_toml.as_str(), mocked_requires);
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml.as_bytes()).unwrap();
|
||||
|
||||
// Add some files to be excluded
|
||||
let module_root = src.path().join("src").join("built_by_uv");
|
||||
fs_err::create_dir_all(module_root.join("__pycache__")).unwrap();
|
||||
|
|
@ -557,7 +568,7 @@ mod tests {
|
|||
// Check that the source dist is reproducible across platforms.
|
||||
assert_snapshot!(
|
||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
||||
@"dab46bcc4d66960a11cfdc19604512a8e1a3241a67536f7e962166760e9c575c"
|
||||
@"871d1f859140721b67cbeaca074e7a2740c88c38028d0509eba87d1285f1da9e"
|
||||
);
|
||||
// Check both the files we report and the actual files
|
||||
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
||||
|
|
@ -669,7 +680,7 @@ mod tests {
|
|||
license = { file = "license.txt" }
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -737,7 +748,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -801,7 +812,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -843,7 +854,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -868,7 +879,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -917,7 +928,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -948,7 +959,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -999,7 +1010,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -1025,7 +1036,7 @@ mod tests {
|
|||
module-name = "simple_namespace.part"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1093,7 +1104,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1116,7 +1127,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1177,7 +1188,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1200,7 +1211,7 @@ mod tests {
|
|||
module-name = "cloud-stubs.db.schema"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1250,7 +1261,7 @@ mod tests {
|
|||
module-name = ["foo", "simple_namespace.part_a", "simple_namespace.part_b"]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
|
|||
|
|
@ -171,7 +171,7 @@ impl PyProjectToml {
|
|||
///
|
||||
/// ```toml
|
||||
/// [build-system]
|
||||
/// requires = ["uv_build>=0.4.15,<5"]
|
||||
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
/// build-backend = "uv_build"
|
||||
/// ```
|
||||
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
||||
|
|
@ -703,7 +703,7 @@ struct Project {
|
|||
/// The optional `project.readme` key in a pyproject.toml as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/#readme>.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged, rename_all = "kebab-case")]
|
||||
#[serde(untagged, rename_all_fields = "kebab-case")]
|
||||
pub(crate) enum Readme {
|
||||
/// Relative path to the README.
|
||||
String(PathBuf),
|
||||
|
|
@ -713,7 +713,7 @@ pub(crate) enum Readme {
|
|||
content_type: String,
|
||||
charset: Option<String>,
|
||||
},
|
||||
/// The full description of the project as inline value.
|
||||
/// The full description of the project as an inline value.
|
||||
Text {
|
||||
text: String,
|
||||
content_type: String,
|
||||
|
|
@ -826,7 +826,7 @@ mod tests {
|
|||
{payload}
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
}
|
||||
|
|
@ -909,7 +909,7 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -965,6 +965,65 @@ mod tests {
|
|||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn readme() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("Readme.md"),
|
||||
indoc! {r"
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("License.txt"),
|
||||
indoc! {r#"
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"#},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = indoc! {r#"
|
||||
# See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example
|
||||
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
description = "A Python package"
|
||||
readme = { file = "Readme.md", content-type = "text/markdown" }
|
||||
requires_python = ">=3.12"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
Summary: A Python package
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_extras() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
|
@ -1036,7 +1095,7 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1104,7 +1163,7 @@ mod tests {
|
|||
let contents = extend_project("");
|
||||
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("1.0.0+test").join("\n"),
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@""
|
||||
);
|
||||
}
|
||||
|
|
@ -1135,7 +1194,7 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5", "wheel"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
|
|
@ -1171,7 +1230,7 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
|
|
|
|||
|
|
@ -19,8 +19,8 @@ use fs_err as fs;
|
|||
use indoc::formatdoc;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::de::{IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer, de};
|
||||
use serde::de::{self, IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Command;
|
||||
|
|
@ -331,7 +331,7 @@ impl SourceBuild {
|
|||
interpreter.clone(),
|
||||
uv_virtualenv::Prompt::None,
|
||||
false,
|
||||
false,
|
||||
uv_virtualenv::OnExisting::Remove,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
|
|
@ -511,11 +511,9 @@ impl SourceBuild {
|
|||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||
Ok(toml) => {
|
||||
let pyproject_toml: toml_edit::ImDocument<_> =
|
||||
toml_edit::ImDocument::from_str(&toml)
|
||||
let pyproject_toml = toml_edit::Document::from_str(&toml)
|
||||
.map_err(Error::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml: PyProjectToml =
|
||||
PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(Error::InvalidPyprojectTomlSchema)?;
|
||||
|
||||
let backend = if let Some(build_system) = pyproject_toml.build_system {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv-build"
|
||||
version = "0.7.20"
|
||||
version = "0.8.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "uv-build"
|
||||
version = "0.7.20"
|
||||
version = "0.8.0"
|
||||
description = "The uv build backend"
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
|
|||
|
|
@ -24,3 +24,7 @@ thiserror = { workspace = true }
|
|||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use serde::Deserialize;
|
|||
use tracing::{debug, warn};
|
||||
|
||||
use crate::git_info::{Commit, Tags};
|
||||
use crate::glob::cluster_globs;
|
||||
use crate::timestamp::Timestamp;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
|
@ -212,9 +213,13 @@ impl CacheInfo {
|
|||
}
|
||||
}
|
||||
|
||||
// If we have any globs, process them in a single pass.
|
||||
// If we have any globs, first cluster them using LCP and then do a single pass on each group.
|
||||
if !globs.is_empty() {
|
||||
let walker = globwalk::GlobWalkerBuilder::from_patterns(directory, &globs)
|
||||
for (glob_base, glob_patterns) in cluster_globs(&globs) {
|
||||
let walker = globwalk::GlobWalkerBuilder::from_patterns(
|
||||
directory.join(glob_base),
|
||||
&glob_patterns,
|
||||
)
|
||||
.file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK)
|
||||
.build()?;
|
||||
for entry in walker {
|
||||
|
|
@ -225,23 +230,38 @@ impl CacheInfo {
|
|||
continue;
|
||||
}
|
||||
};
|
||||
let metadata = match entry.metadata() {
|
||||
let metadata = if entry.path_is_symlink() {
|
||||
// resolve symlinks for leaf entries without following symlinks while globbing
|
||||
match fs_err::metadata(entry.path()) {
|
||||
Ok(metadata) => metadata,
|
||||
Err(err) => {
|
||||
warn!("Failed to resolve symlink for glob entry: {err}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match entry.metadata() {
|
||||
Ok(metadata) => metadata,
|
||||
Err(err) => {
|
||||
warn!("Failed to read metadata for glob entry: {err}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
if !metadata.is_file() {
|
||||
if !entry.path_is_symlink() {
|
||||
// don't warn if it was a symlink - it may legitimately resolve to a directory
|
||||
warn!(
|
||||
"Expected file for cache key, but found directory: `{}`",
|
||||
entry.path().display()
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}"
|
||||
|
|
@ -340,3 +360,71 @@ enum DirectoryTimestamp {
|
|||
Timestamp(Timestamp),
|
||||
Inode(u64),
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests_unix {
|
||||
use anyhow::Result;
|
||||
|
||||
use super::{CacheInfo, Timestamp};
|
||||
|
||||
#[test]
|
||||
fn test_cache_info_symlink_resolve() -> Result<()> {
|
||||
let dir = tempfile::tempdir()?;
|
||||
let dir = dir.path().join("dir");
|
||||
fs_err::create_dir_all(&dir)?;
|
||||
|
||||
let write_manifest = |cache_key: &str| {
|
||||
fs_err::write(
|
||||
dir.join("pyproject.toml"),
|
||||
format!(
|
||||
r#"
|
||||
[tool.uv]
|
||||
cache-keys = [
|
||||
"{cache_key}"
|
||||
]
|
||||
"#
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
let touch = |path: &str| -> Result<_> {
|
||||
let path = dir.join(path);
|
||||
fs_err::create_dir_all(path.parent().unwrap())?;
|
||||
fs_err::write(&path, "")?;
|
||||
Ok(Timestamp::from_metadata(&path.metadata()?))
|
||||
};
|
||||
|
||||
let cache_timestamp = || -> Result<_> { Ok(CacheInfo::from_directory(&dir)?.timestamp) };
|
||||
|
||||
write_manifest("x/**")?;
|
||||
assert_eq!(cache_timestamp()?, None);
|
||||
let y = touch("x/y")?;
|
||||
assert_eq!(cache_timestamp()?, Some(y));
|
||||
let z = touch("x/z")?;
|
||||
assert_eq!(cache_timestamp()?, Some(z));
|
||||
|
||||
// leaf entry symlink should be resolved
|
||||
let a = touch("../a")?;
|
||||
fs_err::os::unix::fs::symlink(dir.join("../a"), dir.join("x/a"))?;
|
||||
assert_eq!(cache_timestamp()?, Some(a));
|
||||
|
||||
// symlink directories should not be followed while globbing
|
||||
let c = touch("../b/c")?;
|
||||
fs_err::os::unix::fs::symlink(dir.join("../b"), dir.join("x/b"))?;
|
||||
assert_eq!(cache_timestamp()?, Some(a));
|
||||
|
||||
// no globs, should work as expected
|
||||
write_manifest("x/y")?;
|
||||
assert_eq!(cache_timestamp()?, Some(y));
|
||||
write_manifest("x/a")?;
|
||||
assert_eq!(cache_timestamp()?, Some(a));
|
||||
write_manifest("x/b/c")?;
|
||||
assert_eq!(cache_timestamp()?, Some(c));
|
||||
|
||||
// symlink pointing to a directory
|
||||
write_manifest("x/*b*")?;
|
||||
assert_eq!(cache_timestamp()?, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,318 @@
|
|||
use std::{
|
||||
collections::BTreeMap,
|
||||
path::{Component, Components, Path, PathBuf},
|
||||
};
|
||||
|
||||
/// Check if a component of the path looks like it may be a glob pattern.
|
||||
///
|
||||
/// Note: this function is being used when splitting a glob pattern into a long possible
|
||||
/// base and the glob remainder (scanning through components until we hit the first component
|
||||
/// for which this function returns true). It is acceptable for this function to return
|
||||
/// false positives (e.g. patterns like 'foo[bar' or 'foo{bar') in which case correctness
|
||||
/// will not be affected but efficiency might be (because we'll traverse more than we should),
|
||||
/// however it should not return false negatives.
|
||||
fn is_glob_like(part: Component) -> bool {
|
||||
matches!(part, Component::Normal(_))
|
||||
&& part.as_os_str().to_str().is_some_and(|part| {
|
||||
["*", "{", "}", "?", "[", "]"]
|
||||
.into_iter()
|
||||
.any(|c| part.contains(c))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
struct GlobParts {
|
||||
base: PathBuf,
|
||||
pattern: PathBuf,
|
||||
}
|
||||
|
||||
/// Split a glob into longest possible base + shortest possible glob pattern.
|
||||
fn split_glob(pattern: impl AsRef<str>) -> GlobParts {
|
||||
let pattern: &Path = pattern.as_ref().as_ref();
|
||||
|
||||
let mut glob = GlobParts::default();
|
||||
let mut globbing = false;
|
||||
let mut last = None;
|
||||
|
||||
for part in pattern.components() {
|
||||
if let Some(last) = last {
|
||||
if last != Component::CurDir {
|
||||
if globbing {
|
||||
glob.pattern.push(last);
|
||||
} else {
|
||||
glob.base.push(last);
|
||||
}
|
||||
}
|
||||
}
|
||||
if !globbing {
|
||||
globbing = is_glob_like(part);
|
||||
}
|
||||
// we don't know if this part is the last one, defer handling it by one iteration
|
||||
last = Some(part);
|
||||
}
|
||||
|
||||
if let Some(last) = last {
|
||||
// defer handling the last component to prevent draining entire pattern into base
|
||||
if globbing || matches!(last, Component::Normal(_)) {
|
||||
glob.pattern.push(last);
|
||||
} else {
|
||||
glob.base.push(last);
|
||||
}
|
||||
}
|
||||
glob
|
||||
}
|
||||
|
||||
/// Classic trie with edges being path components and values being glob patterns.
|
||||
#[derive(Default)]
|
||||
struct Trie<'a> {
|
||||
children: BTreeMap<Component<'a>, Trie<'a>>,
|
||||
patterns: Vec<&'a Path>,
|
||||
}
|
||||
|
||||
impl<'a> Trie<'a> {
|
||||
fn insert(&mut self, mut components: Components<'a>, pattern: &'a Path) {
|
||||
if let Some(part) = components.next() {
|
||||
self.children
|
||||
.entry(part)
|
||||
.or_default()
|
||||
.insert(components, pattern);
|
||||
} else {
|
||||
self.patterns.push(pattern);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn collect_patterns(
|
||||
&self,
|
||||
pattern_prefix: PathBuf,
|
||||
group_prefix: PathBuf,
|
||||
patterns: &mut Vec<PathBuf>,
|
||||
groups: &mut Vec<(PathBuf, Vec<PathBuf>)>,
|
||||
) {
|
||||
// collect all patterns beneath and including this node
|
||||
for pattern in &self.patterns {
|
||||
patterns.push(pattern_prefix.join(pattern));
|
||||
}
|
||||
for (part, child) in &self.children {
|
||||
if let Component::Normal(_) = part {
|
||||
// for normal components, collect all descendant patterns ('normal' edges only)
|
||||
child.collect_patterns(
|
||||
pattern_prefix.join(part),
|
||||
group_prefix.join(part),
|
||||
patterns,
|
||||
groups,
|
||||
);
|
||||
} else {
|
||||
// for non-normal component edges, kick off separate group collection at this node
|
||||
child.collect_groups(group_prefix.join(part), groups);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn collect_groups(&self, prefix: PathBuf, groups: &mut Vec<(PathBuf, Vec<PathBuf>)>) {
|
||||
// LCP-style grouping of patterns
|
||||
if self.patterns.is_empty() {
|
||||
// no patterns in this node; child nodes can form independent groups
|
||||
for (part, child) in &self.children {
|
||||
child.collect_groups(prefix.join(part), groups);
|
||||
}
|
||||
} else {
|
||||
// pivot point, we've hit a pattern node; we have to stop here and form a group
|
||||
let mut group = Vec::new();
|
||||
self.collect_patterns(PathBuf::new(), prefix.clone(), &mut group, groups);
|
||||
groups.push((prefix, group));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a collection of globs, cluster them into (base, globs) groups so that:
|
||||
/// - base doesn't contain any glob symbols
|
||||
/// - each directory would only be walked at most once
|
||||
/// - base of each group is the longest common prefix of globs in the group
|
||||
pub(crate) fn cluster_globs(patterns: &[impl AsRef<str>]) -> Vec<(PathBuf, Vec<String>)> {
|
||||
// split all globs into base/pattern
|
||||
let globs: Vec<_> = patterns.iter().map(split_glob).collect();
|
||||
|
||||
// construct a path trie out of all split globs
|
||||
let mut trie = Trie::default();
|
||||
for glob in &globs {
|
||||
trie.insert(glob.base.components(), &glob.pattern);
|
||||
}
|
||||
|
||||
// run LCP-style aggregation of patterns in the trie into groups
|
||||
let mut groups = Vec::new();
|
||||
trie.collect_groups(PathBuf::new(), &mut groups);
|
||||
|
||||
// finally, convert resulting patterns to strings
|
||||
groups
|
||||
.into_iter()
|
||||
.map(|(base, patterns)| {
|
||||
(
|
||||
base,
|
||||
patterns
|
||||
.iter()
|
||||
// NOTE: this unwrap is ok because input patterns are valid utf-8
|
||||
.map(|p| p.to_str().unwrap().to_owned())
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{GlobParts, cluster_globs, split_glob};
|
||||
|
||||
fn windowsify(path: &str) -> String {
|
||||
if cfg!(windows) {
|
||||
path.replace('/', "\\")
|
||||
} else {
|
||||
path.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_split_glob() {
|
||||
#[track_caller]
|
||||
fn check(input: &str, base: &str, pattern: &str) {
|
||||
let result = split_glob(input);
|
||||
let expected = GlobParts {
|
||||
base: base.into(),
|
||||
pattern: pattern.into(),
|
||||
};
|
||||
assert_eq!(result, expected, "{input:?} != {base:?} + {pattern:?}");
|
||||
}
|
||||
|
||||
check("", "", "");
|
||||
check("a", "", "a");
|
||||
check("a/b", "a", "b");
|
||||
check("a/b/", "a", "b");
|
||||
check("a/.//b/", "a", "b");
|
||||
check("./a/b/c", "a/b", "c");
|
||||
check("c/d/*", "c/d", "*");
|
||||
check("c/d/*/../*", "c/d", "*/../*");
|
||||
check("a/?b/c", "a", "?b/c");
|
||||
check("/a/b/*", "/a/b", "*");
|
||||
check("../x/*", "../x", "*");
|
||||
check("a/{b,c}/d", "a", "{b,c}/d");
|
||||
check("a/[bc]/d", "a", "[bc]/d");
|
||||
check("*", "", "*");
|
||||
check("*/*", "", "*/*");
|
||||
check("..", "..", "");
|
||||
check("/", "/", "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cluster_globs() {
|
||||
#[track_caller]
|
||||
fn check(input: &[&str], expected: &[(&str, &[&str])]) {
|
||||
let input = input.iter().map(|s| windowsify(s)).collect::<Vec<_>>();
|
||||
|
||||
let mut result_sorted = cluster_globs(&input);
|
||||
for (_, patterns) in &mut result_sorted {
|
||||
patterns.sort_unstable();
|
||||
}
|
||||
result_sorted.sort_unstable();
|
||||
|
||||
let mut expected_sorted = Vec::new();
|
||||
for (base, patterns) in expected {
|
||||
let mut patterns_sorted = Vec::new();
|
||||
for pattern in *patterns {
|
||||
patterns_sorted.push(windowsify(pattern));
|
||||
}
|
||||
patterns_sorted.sort_unstable();
|
||||
expected_sorted.push((windowsify(base).into(), patterns_sorted));
|
||||
}
|
||||
expected_sorted.sort_unstable();
|
||||
|
||||
assert_eq!(
|
||||
result_sorted, expected_sorted,
|
||||
"{input:?} != {expected_sorted:?} (got: {result_sorted:?})"
|
||||
);
|
||||
}
|
||||
|
||||
check(&["a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]);
|
||||
check(&["./a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]);
|
||||
check(&["/a/b/*", "/a/c/*"], &[("/a/b", &["*"]), ("/a/c", &["*"])]);
|
||||
check(
|
||||
&["../a/b/*", "../a/c/*"],
|
||||
&[("../a/b", &["*"]), ("../a/c", &["*"])],
|
||||
);
|
||||
check(&["x/*", "y/*"], &[("x", &["*"]), ("y", &["*"])]);
|
||||
check(&[], &[]);
|
||||
check(
|
||||
&["./*", "a/*", "../foo/*.png"],
|
||||
&[("", &["*", "a/*"]), ("../foo", &["*.png"])],
|
||||
);
|
||||
check(
|
||||
&[
|
||||
"?",
|
||||
"/foo/?",
|
||||
"/foo/bar/*",
|
||||
"../bar/*.png",
|
||||
"../bar/../baz/*.jpg",
|
||||
],
|
||||
&[
|
||||
("", &["?"]),
|
||||
("/foo", &["?", "bar/*"]),
|
||||
("../bar", &["*.png"]),
|
||||
("../bar/../baz", &["*.jpg"]),
|
||||
],
|
||||
);
|
||||
check(&["/abs/path/*"], &[("/abs/path", &["*"])]);
|
||||
check(&["/abs/*", "rel/*"], &[("/abs", &["*"]), ("rel", &["*"])]);
|
||||
check(&["a/{b,c}/*", "a/d?/*"], &[("a", &["{b,c}/*", "d?/*"])]);
|
||||
check(
|
||||
&[
|
||||
"../shared/a/[abc].png",
|
||||
"../shared/a/b/*",
|
||||
"../shared/b/c/?x/d",
|
||||
"docs/important/*.{doc,xls}",
|
||||
"docs/important/very/*",
|
||||
],
|
||||
&[
|
||||
("../shared/a", &["[abc].png", "b/*"]),
|
||||
("../shared/b/c", &["?x/d"]),
|
||||
("docs/important", &["*.{doc,xls}", "very/*"]),
|
||||
],
|
||||
);
|
||||
check(&["file.txt"], &[("", &["file.txt"])]);
|
||||
check(&["/"], &[("/", &[""])]);
|
||||
check(&[".."], &[("..", &[""])]);
|
||||
check(
|
||||
&["file1.txt", "file2.txt"],
|
||||
&[("", &["file1.txt", "file2.txt"])],
|
||||
);
|
||||
check(
|
||||
&["a/file1.txt", "a/file2.txt"],
|
||||
&[("a", &["file1.txt", "file2.txt"])],
|
||||
);
|
||||
check(
|
||||
&["*", "a/b/*", "a/../c/*.jpg", "a/../c/*.png", "/a/*", "/b/*"],
|
||||
&[
|
||||
("", &["*", "a/b/*"]),
|
||||
("a/../c", &["*.jpg", "*.png"]),
|
||||
("/a", &["*"]),
|
||||
("/b", &["*"]),
|
||||
],
|
||||
);
|
||||
|
||||
if cfg!(windows) {
|
||||
check(
|
||||
&[
|
||||
r"\\foo\bar\shared/a/[abc].png",
|
||||
r"\\foo\bar\shared/a/b/*",
|
||||
r"\\foo\bar/shared/b/c/?x/d",
|
||||
r"D:\docs\important/*.{doc,xls}",
|
||||
r"D:\docs/important/very/*",
|
||||
],
|
||||
&[
|
||||
(r"\\foo\bar\shared\a", &["[abc].png", r"b\*"]),
|
||||
(r"\\foo\bar\shared\b\c", &[r"?x\d"]),
|
||||
(r"D:\docs\important", &["*.{doc,xls}", r"very\*"]),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,4 +3,5 @@ pub use crate::timestamp::*;
|
|||
|
||||
mod cache_info;
|
||||
mod git_info;
|
||||
mod glob;
|
||||
mod timestamp;
|
||||
|
|
|
|||
|
|
@ -266,9 +266,6 @@ enum Resolver {
|
|||
/// These represent a subset of the `virtualenv` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
pub struct VenvCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
clear: bool,
|
||||
|
||||
#[clap(long, hide = true)]
|
||||
no_seed: bool,
|
||||
|
||||
|
|
@ -289,12 +286,6 @@ impl CompatArgs for VenvCompatArgs {
|
|||
/// behavior. If an argument is passed that does _not_ match uv's behavior, this method will
|
||||
/// return an error.
|
||||
fn validate(&self) -> Result<()> {
|
||||
if self.clear {
|
||||
warn_user!(
|
||||
"virtualenv's `--clear` has no effect (uv always clears the virtual environment)"
|
||||
);
|
||||
}
|
||||
|
||||
if self.no_seed {
|
||||
warn_user!(
|
||||
"virtualenv's `--no-seed` has no effect (uv omits seed packages by default)"
|
||||
|
|
|
|||
|
|
@ -10,8 +10,9 @@ use clap::{Args, Parser, Subcommand};
|
|||
|
||||
use uv_cache::CacheArgs;
|
||||
use uv_configuration::{
|
||||
ConfigSettingEntry, ExportFormat, IndexStrategy, KeyringProviderType, PackageNameSpecifier,
|
||||
ProjectBuildBackend, TargetTriple, TrustedHost, TrustedPublishing, VersionControlSystem,
|
||||
ConfigSettingEntry, ConfigSettingPackageEntry, ExportFormat, IndexStrategy,
|
||||
KeyringProviderType, PackageNameSpecifier, ProjectBuildBackend, TargetTriple, TrustedHost,
|
||||
TrustedPublishing, VersionControlSystem,
|
||||
};
|
||||
use uv_distribution_types::{Index, IndexUrl, Origin, PipExtraIndex, PipFindLinks, PipIndex};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName, PipGroupName};
|
||||
|
|
@ -46,6 +47,15 @@ pub enum PythonListFormat {
|
|||
Json,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum SyncFormat {
|
||||
/// Display the result in a human-readable format.
|
||||
#[default]
|
||||
Text,
|
||||
/// Display the result in JSON format.
|
||||
Json,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, clap::ValueEnum)]
|
||||
pub enum ListFormat {
|
||||
/// Display the list of packages in a human-readable table.
|
||||
|
|
@ -1192,6 +1202,14 @@ pub struct PipCompileArgs {
|
|||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub resolver: ResolverArgs,
|
||||
|
||||
|
|
@ -1206,14 +1224,6 @@ pub struct PipCompileArgs {
|
|||
#[arg(long, overrides_with("no_deps"), hide = true)]
|
||||
pub deps: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
/// Write the compiled requirements to the given `requirements.txt` or `pylock.toml` file.
|
||||
///
|
||||
/// If the file already exists, the existing versions will be preferred when resolving
|
||||
|
|
@ -1508,6 +1518,30 @@ pub struct PipSyncArgs {
|
|||
#[arg(long, short, alias = "build-constraint", env = EnvVars::UV_BUILD_CONSTRAINT, value_delimiter = ' ', value_parser = parse_maybe_file_path)]
|
||||
pub build_constraints: Vec<Maybe<PathBuf>>,
|
||||
|
||||
/// Include optional dependencies from the specified extra name; may be provided more than once.
|
||||
///
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")]
|
||||
pub all_extras: bool,
|
||||
|
||||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is
|
||||
/// used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub installer: InstallerArgs,
|
||||
|
||||
|
|
@ -1788,19 +1822,28 @@ pub struct PipInstallArgs {
|
|||
|
||||
/// Include optional dependencies from the specified extra name; may be provided more than once.
|
||||
///
|
||||
/// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")]
|
||||
pub all_extras: bool,
|
||||
|
||||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is
|
||||
/// used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub installer: ResolverInstallerArgs,
|
||||
|
||||
|
|
@ -1815,14 +1858,6 @@ pub struct PipInstallArgs {
|
|||
#[arg(long, overrides_with("no_deps"), hide = true)]
|
||||
pub deps: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
/// Require a matching hash for each requirement.
|
||||
///
|
||||
/// By default, uv will verify any available hashes in the requirements file, but will not
|
||||
|
|
@ -2606,16 +2641,23 @@ pub struct VenvArgs {
|
|||
#[arg(long, value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_SEED)]
|
||||
pub seed: bool,
|
||||
|
||||
/// Remove any existing files or directories at the target path.
|
||||
///
|
||||
/// By default, `uv venv` will exit with an error if the given path is non-empty. The
|
||||
/// `--clear` option will instead clear a non-empty path before creating a new virtual
|
||||
/// environment.
|
||||
#[clap(long, short, overrides_with = "allow_existing", value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_CLEAR)]
|
||||
pub clear: bool,
|
||||
|
||||
/// Preserve any existing files or directories at the target path.
|
||||
///
|
||||
/// By default, `uv venv` will remove an existing virtual environment at the given path, and
|
||||
/// exit with an error if the path is non-empty but _not_ a virtual environment. The
|
||||
/// By default, `uv venv` will exit with an error if the given path is non-empty. The
|
||||
/// `--allow-existing` option will instead write to the given path, regardless of its contents,
|
||||
/// and without clearing it beforehand.
|
||||
///
|
||||
/// WARNING: This option can lead to unexpected behavior if the existing virtual environment and
|
||||
/// the newly-created virtual environment are linked to different Python interpreters.
|
||||
#[clap(long)]
|
||||
#[clap(long, overrides_with = "clear")]
|
||||
pub allow_existing: bool,
|
||||
|
||||
/// The path to the virtual environment to create.
|
||||
|
|
@ -3045,7 +3087,7 @@ pub struct RunArgs {
|
|||
/// When used in a project, these dependencies will be layered on top of the project environment
|
||||
/// in a separate, ephemeral environment. These dependencies are allowed to conflict with those
|
||||
/// specified by the project.
|
||||
#[arg(long)]
|
||||
#[arg(short = 'w', long)]
|
||||
pub with: Vec<comma::CommaSeparatedRequirements>,
|
||||
|
||||
/// Run with the given packages installed in editable mode.
|
||||
|
|
@ -3207,6 +3249,10 @@ pub struct SyncArgs {
|
|||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Select the output format.
|
||||
#[arg(long, value_enum, default_value_t = SyncFormat::default())]
|
||||
pub output_format: SyncFormat,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// When two or more extras are declared as conflicting in `tool.uv.conflicts`, using this flag
|
||||
|
|
@ -3439,6 +3485,23 @@ pub struct SyncArgs {
|
|||
)]
|
||||
pub python: Option<Maybe<String>>,
|
||||
|
||||
/// The platform for which requirements should be installed.
|
||||
///
|
||||
/// Represented as a "target triple", a string that describes the target platform in terms of
|
||||
/// its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or
|
||||
/// `aarch64-apple-darwin`.
|
||||
///
|
||||
/// When targeting macOS (Darwin), the default minimum version is `12.0`. Use
|
||||
/// `MACOSX_DEPLOYMENT_TARGET` to specify a different minimum version, e.g., `13.0`.
|
||||
///
|
||||
/// WARNING: When specified, uv will select wheels that are compatible with the _target_
|
||||
/// platform; as a result, the installed distributions may not be compatible with the _current_
|
||||
/// platform. Conversely, any distributions that are built from source may be incompatible with
|
||||
/// the _target_ platform, as they will be built for the _current_ platform. The
|
||||
/// `--python-platform` option is intended for advanced use cases.
|
||||
#[arg(long)]
|
||||
pub python_platform: Option<TargetTriple>,
|
||||
|
||||
/// Check if the Python environment is synchronized with the project.
|
||||
///
|
||||
/// If the environment is not up to date, uv will exit with an error.
|
||||
|
|
@ -3696,10 +3759,19 @@ pub struct AddArgs {
|
|||
|
||||
/// Add the dependency as a workspace member.
|
||||
///
|
||||
/// When used with a path dependency, the package will be added to the workspace's `members`
|
||||
/// list in the root `pyproject.toml` file.
|
||||
#[arg(long)]
|
||||
/// By default, uv will add path dependencies that are within the workspace directory
|
||||
/// as workspace members. When used with a path dependency, the package will be added
|
||||
/// to the workspace's `members` list in the root `pyproject.toml` file.
|
||||
#[arg(long, overrides_with = "no_workspace")]
|
||||
pub workspace: bool,
|
||||
|
||||
/// Don't add the dependency as a workspace member.
|
||||
///
|
||||
/// By default, when adding a dependency that's a local path and is within the workspace
|
||||
/// directory, uv will add it as a workspace member; pass `--no-workspace` to add the package
|
||||
/// as direct path dependency instead.
|
||||
#[arg(long, overrides_with = "workspace")]
|
||||
pub no_workspace: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
|
|
@ -4256,7 +4328,7 @@ pub struct ToolRunArgs {
|
|||
pub from: Option<String>,
|
||||
|
||||
/// Run with the given packages installed.
|
||||
#[arg(long)]
|
||||
#[arg(short = 'w', long)]
|
||||
pub with: Vec<comma::CommaSeparatedRequirements>,
|
||||
|
||||
/// Run with the given packages installed in editable mode
|
||||
|
|
@ -4371,7 +4443,7 @@ pub struct ToolInstallArgs {
|
|||
pub from: Option<String>,
|
||||
|
||||
/// Include the following additional requirements.
|
||||
#[arg(long)]
|
||||
#[arg(short = 'w', long)]
|
||||
pub with: Vec<comma::CommaSeparatedRequirements>,
|
||||
|
||||
/// Include all requirements listed in the given `requirements.txt` files.
|
||||
|
|
@ -4647,6 +4719,14 @@ pub struct ToolUpgradeArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_setting_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
@ -4764,10 +4844,9 @@ pub enum PythonCommand {
|
|||
/// Python versions are installed into the uv Python directory, which can be retrieved with `uv
|
||||
/// python dir`.
|
||||
///
|
||||
/// A `python` executable is not made globally available, managed Python versions are only used
|
||||
/// in uv commands or in active virtual environments. There is experimental support for adding
|
||||
/// Python executables to a directory on the path — use the `--preview` flag to enable this
|
||||
/// behavior and `uv python dir --bin` to retrieve the target directory.
|
||||
/// By default, Python executables are added to a directory on the path with a minor version
|
||||
/// suffix, e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use
|
||||
/// `uv python dir --bin` to see the target directory.
|
||||
///
|
||||
/// Multiple Python versions may be requested.
|
||||
///
|
||||
|
|
@ -4826,6 +4905,19 @@ pub enum PythonCommand {
|
|||
|
||||
/// Uninstall Python versions.
|
||||
Uninstall(PythonUninstallArgs),
|
||||
|
||||
/// Ensure that the Python executable directory is on the `PATH`.
|
||||
///
|
||||
/// If the Python executable directory is not present on the `PATH`, uv will attempt to add it to
|
||||
/// the relevant shell configuration files.
|
||||
///
|
||||
/// If the shell configuration files already include a blurb to add the executable directory to
|
||||
/// the path, but the directory is not present on the `PATH`, uv will exit with an error.
|
||||
///
|
||||
/// The Python executable directory is determined according to the XDG standard and can be
|
||||
/// retrieved with `uv python dir --bin`.
|
||||
#[command(alias = "ensurepath")]
|
||||
UpdateShell,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
|
|
@ -4911,6 +5003,38 @@ pub struct PythonInstallArgs {
|
|||
#[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)]
|
||||
pub install_dir: Option<PathBuf>,
|
||||
|
||||
/// Install a Python executable into the `bin` directory.
|
||||
///
|
||||
/// This is the default behavior. If this flag is provided explicitly, uv will error if the
|
||||
/// executable cannot be installed.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_BIN=1`.
|
||||
///
|
||||
/// See `UV_PYTHON_BIN_DIR` to customize the target directory.
|
||||
#[arg(long, overrides_with("no_bin"), hide = true)]
|
||||
pub bin: bool,
|
||||
|
||||
/// Do not install a Python executable into the `bin` directory.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_BIN=0`.
|
||||
#[arg(long, overrides_with("bin"), conflicts_with("default"))]
|
||||
pub no_bin: bool,
|
||||
|
||||
/// Register the Python installation in the Windows registry.
|
||||
///
|
||||
/// This is the default behavior on Windows. If this flag is provided explicitly, uv will error if the
|
||||
/// registry entry cannot be created.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=1`.
|
||||
#[arg(long, overrides_with("no_registry"), hide = true)]
|
||||
pub registry: bool,
|
||||
|
||||
/// Do not register the Python installation in the Windows registry.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=0`.
|
||||
#[arg(long, overrides_with("registry"))]
|
||||
pub no_registry: bool,
|
||||
|
||||
/// The Python version(s) to install.
|
||||
///
|
||||
/// If not provided, the requested Python version(s) will be read from the `UV_PYTHON`
|
||||
|
|
@ -4973,7 +5097,7 @@ pub struct PythonInstallArgs {
|
|||
/// and `python`.
|
||||
///
|
||||
/// If multiple Python versions are requested, uv will exit with an error.
|
||||
#[arg(long)]
|
||||
#[arg(long, conflicts_with("no_bin"))]
|
||||
pub default: bool,
|
||||
}
|
||||
|
||||
|
|
@ -5394,6 +5518,14 @@ pub struct InstallerArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_settings_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
@ -5581,6 +5713,14 @@ pub struct ResolverArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_settings_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
@ -5770,6 +5910,14 @@ pub struct ResolverInstallerArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_settings_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use anstream::eprintln;
|
||||
|
||||
use uv_cache::Refresh;
|
||||
use uv_configuration::ConfigSettings;
|
||||
use uv_configuration::{ConfigSettings, PackageConfigSettings};
|
||||
use uv_resolver::PrereleaseMode;
|
||||
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
|
||||
use uv_warnings::owo_colors::OwoColorize;
|
||||
|
|
@ -62,6 +62,7 @@ impl From<ResolverArgs> for PipOptions {
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -84,6 +85,11 @@ impl From<ResolverArgs> for PipOptions {
|
|||
},
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
|
|
@ -104,6 +110,7 @@ impl From<InstallerArgs> for PipOptions {
|
|||
index_strategy,
|
||||
keyring_provider,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
build_isolation,
|
||||
exclude_newer,
|
||||
|
|
@ -120,6 +127,11 @@ impl From<InstallerArgs> for PipOptions {
|
|||
keyring_provider,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
|
|
@ -147,6 +159,7 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -173,6 +186,11 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
fork_strategy,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
|
|
@ -260,6 +278,7 @@ pub fn resolver_options(
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -321,6 +340,11 @@ pub fn resolver_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
|
|
@ -353,6 +377,7 @@ pub fn resolver_installer_options(
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -428,6 +453,11 @@ pub fn resolver_installer_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: if no_build_isolation_package.is_empty() {
|
||||
None
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ use std::sync::Arc;
|
|||
use std::time::Duration;
|
||||
use std::{env, io, iter};
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::anyhow;
|
||||
use http::{
|
||||
HeaderMap, HeaderName, HeaderValue, Method, StatusCode,
|
||||
|
|
@ -166,6 +167,25 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change.
|
||||
///
|
||||
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||
pub fn retries_from_env(self) -> anyhow::Result<Self> {
|
||||
// TODO(zanieb): We should probably parse this in another layer, but there's not a natural
|
||||
// fit for it right now
|
||||
if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) {
|
||||
Ok(self.retries(
|
||||
value
|
||||
.to_string_lossy()
|
||||
.as_ref()
|
||||
.parse::<u32>()
|
||||
.context("Failed to parse `UV_HTTP_RETRIES`")?,
|
||||
))
|
||||
} else {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||
self.native_tls = native_tls;
|
||||
|
|
@ -238,7 +258,11 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
|
||||
/// Create a [`RetryPolicy`] for the client.
|
||||
fn retry_policy(&self) -> ExponentialBackoff {
|
||||
ExponentialBackoff::builder().build_with_max_retries(self.retries)
|
||||
let mut builder = ExponentialBackoff::builder();
|
||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||
}
|
||||
builder.build_with_max_retries(self.retries)
|
||||
}
|
||||
|
||||
pub fn build(&self) -> BaseClient {
|
||||
|
|
@ -896,18 +920,34 @@ pub fn is_extended_transient_error(err: &dyn Error) -> bool {
|
|||
}
|
||||
|
||||
// IO Errors may be nested through custom IO errors.
|
||||
let mut has_io_error = false;
|
||||
for io_err in find_sources::<io::Error>(&err) {
|
||||
if io_err.kind() == io::ErrorKind::ConnectionReset
|
||||
|| io_err.kind() == io::ErrorKind::UnexpectedEof
|
||||
|| io_err.kind() == io::ErrorKind::BrokenPipe
|
||||
{
|
||||
trace!("Retrying error: `ConnectionReset` or `UnexpectedEof`");
|
||||
has_io_error = true;
|
||||
let retryable_io_err_kinds = [
|
||||
// https://github.com/astral-sh/uv/issues/12054
|
||||
io::ErrorKind::BrokenPipe,
|
||||
// From reqwest-middleware
|
||||
io::ErrorKind::ConnectionAborted,
|
||||
// https://github.com/astral-sh/uv/issues/3514
|
||||
io::ErrorKind::ConnectionReset,
|
||||
// https://github.com/astral-sh/uv/issues/14699
|
||||
io::ErrorKind::InvalidData,
|
||||
// https://github.com/astral-sh/uv/issues/9246
|
||||
io::ErrorKind::UnexpectedEof,
|
||||
];
|
||||
if retryable_io_err_kinds.contains(&io_err.kind()) {
|
||||
trace!("Retrying error: `{}`", io_err.kind());
|
||||
return true;
|
||||
}
|
||||
trace!("Cannot retry IO error: not one of `ConnectionReset` or `UnexpectedEof`");
|
||||
trace!(
|
||||
"Cannot retry IO error `{}`, not a retryable IO error kind",
|
||||
io_err.kind()
|
||||
);
|
||||
}
|
||||
|
||||
trace!("Cannot retry error: not an IO error");
|
||||
if !has_io_error {
|
||||
trace!("Cannot retry error: not an extended IO error");
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -196,16 +196,18 @@ impl<E: Into<Self> + std::error::Error + 'static> From<CachedClientError<E>> for
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum CacheControl {
|
||||
pub enum CacheControl<'a> {
|
||||
/// Respect the `cache-control` header from the response.
|
||||
None,
|
||||
/// Apply `max-age=0, must-revalidate` to the request.
|
||||
MustRevalidate,
|
||||
/// Allow the client to return stale responses.
|
||||
AllowStale,
|
||||
/// Override the cache control header with a custom value.
|
||||
Override(&'a str),
|
||||
}
|
||||
|
||||
impl From<Freshness> for CacheControl {
|
||||
impl From<Freshness> for CacheControl<'_> {
|
||||
fn from(value: Freshness) -> Self {
|
||||
match value {
|
||||
Freshness::Fresh => Self::None,
|
||||
|
|
@ -259,7 +261,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let payload = self
|
||||
|
|
@ -292,7 +294,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
||||
|
|
@ -302,7 +304,7 @@ impl CachedClient {
|
|||
.await?
|
||||
} else {
|
||||
debug!("No cache entry for: {}", req.url());
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -316,7 +318,12 @@ impl CachedClient {
|
|||
"Broken fresh cache entry (for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
}
|
||||
},
|
||||
|
|
@ -337,7 +344,12 @@ impl CachedClient {
|
|||
(for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
|
@ -353,7 +365,12 @@ impl CachedClient {
|
|||
// ETag didn't match). We need to make a fresh request.
|
||||
if response.status() == http::StatusCode::NOT_MODIFIED {
|
||||
warn!("Server returned unusable 304 for: {}", fresh_req.url());
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
self.run_response_callback(
|
||||
|
|
@ -377,9 +394,10 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
|
||||
let payload = self
|
||||
.run_response_callback(cache_entry, cache_policy, response, async |resp| {
|
||||
|
|
@ -399,10 +417,11 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let _ = fs_err::tokio::remove_file(&cache_entry.path()).await;
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
self.run_response_callback(cache_entry, cache_policy, response, response_callback)
|
||||
.await
|
||||
}
|
||||
|
|
@ -469,12 +488,12 @@ impl CachedClient {
|
|||
async fn send_cached(
|
||||
&self,
|
||||
mut req: Request,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
cached: DataWithCachePolicy,
|
||||
) -> Result<CachedResponse, Error> {
|
||||
// Apply the cache control header, if necessary.
|
||||
match cache_control {
|
||||
CacheControl::None | CacheControl::AllowStale => {}
|
||||
CacheControl::None | CacheControl::AllowStale | CacheControl::Override(..) => {}
|
||||
CacheControl::MustRevalidate => {
|
||||
req.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
|
|
@ -488,9 +507,14 @@ impl CachedClient {
|
|||
CachedResponse::FreshCache(cached)
|
||||
}
|
||||
BeforeRequest::Stale(new_cache_policy_builder) => match cache_control {
|
||||
CacheControl::None | CacheControl::MustRevalidate => {
|
||||
CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => {
|
||||
debug!("Found stale response for: {}", req.url());
|
||||
self.send_cached_handle_stale(req, cached, new_cache_policy_builder)
|
||||
self.send_cached_handle_stale(
|
||||
req,
|
||||
cache_control,
|
||||
cached,
|
||||
new_cache_policy_builder,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
CacheControl::AllowStale => {
|
||||
|
|
@ -504,7 +528,7 @@ impl CachedClient {
|
|||
"Cached request doesn't match current request for: {}",
|
||||
req.url()
|
||||
);
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -516,12 +540,13 @@ impl CachedClient {
|
|||
async fn send_cached_handle_stale(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
cached: DataWithCachePolicy,
|
||||
new_cache_policy_builder: CachePolicyBuilder,
|
||||
) -> Result<CachedResponse, Error> {
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
debug!("Sending revalidation request for: {url}");
|
||||
let response = self
|
||||
let mut response = self
|
||||
.0
|
||||
.execute(req)
|
||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||
|
|
@ -529,6 +554,16 @@ impl CachedClient {
|
|||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||
.error_for_status()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
match cached
|
||||
.cache_policy
|
||||
.after_response(new_cache_policy_builder, &response)
|
||||
|
|
@ -557,16 +592,26 @@ impl CachedClient {
|
|||
async fn fresh_request(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
trace!("Sending fresh {} request for {}", req.method(), url);
|
||||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||
let response = self
|
||||
let mut response = self
|
||||
.0
|
||||
.execute(req)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
let retry_count = response
|
||||
.extensions()
|
||||
.get::<reqwest_retry::RetryCount>()
|
||||
|
|
@ -599,7 +644,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let payload = self
|
||||
|
|
@ -623,7 +668,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let mut past_retries = 0;
|
||||
|
|
@ -681,6 +726,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let mut past_retries = 0;
|
||||
|
|
@ -689,7 +735,7 @@ impl CachedClient {
|
|||
loop {
|
||||
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
||||
let result = self
|
||||
.skip_cache(fresh_req, cache_entry, &response_callback)
|
||||
.skip_cache(fresh_req, cache_entry, cache_control, &response_callback)
|
||||
.await;
|
||||
|
||||
// Check if the middleware already performed retries
|
||||
|
|
|
|||
|
|
@ -259,6 +259,9 @@ pub enum ErrorKind {
|
|||
"Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`"
|
||||
)]
|
||||
Offline(String),
|
||||
|
||||
#[error("Invalid cache control header: `{0}`")]
|
||||
InvalidCacheControl(String),
|
||||
}
|
||||
|
||||
impl ErrorKind {
|
||||
|
|
|
|||
|
|
@ -115,6 +115,11 @@ impl<'a> RegistryClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn retries_from_env(mut self) -> anyhow::Result<Self> {
|
||||
self.base_client_builder = self.base_client_builder.retries_from_env()?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||
self.base_client_builder = self.base_client_builder.native_tls(native_tls);
|
||||
|
|
@ -506,11 +511,17 @@ impl RegistryClient {
|
|||
format!("{package_name}.rkyv"),
|
||||
);
|
||||
let cache_control = match self.connectivity {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = self.index_urls.simple_api_cache_control_for(index) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(package_name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -566,7 +577,7 @@ impl RegistryClient {
|
|||
package_name: &PackageName,
|
||||
url: &DisplaySafeUrl,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||
let simple_request = self
|
||||
.uncached_client(url)
|
||||
|
|
@ -778,11 +789,17 @@ impl RegistryClient {
|
|||
format!("{}.msgpack", filename.cache_key()),
|
||||
);
|
||||
let cache_control = match self.connectivity {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = self.index_urls.artifact_cache_control_for(index) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(&filename.name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -848,11 +865,25 @@ impl RegistryClient {
|
|||
format!("{}.msgpack", filename.cache_key()),
|
||||
);
|
||||
let cache_control = match self.connectivity {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(index) = index {
|
||||
if let Some(header) = self.index_urls.artifact_cache_control_for(index) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(&filename.name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(&filename.name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ use std::{
|
|||
str::FromStr,
|
||||
};
|
||||
use uv_cache_key::CacheKeyHasher;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConfigSettingEntry {
|
||||
|
|
@ -28,6 +29,32 @@ impl FromStr for ConfigSettingEntry {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConfigSettingPackageEntry {
|
||||
/// The package name to apply the setting to.
|
||||
package: PackageName,
|
||||
/// The config setting entry.
|
||||
setting: ConfigSettingEntry,
|
||||
}
|
||||
|
||||
impl FromStr for ConfigSettingPackageEntry {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let Some((package_str, config_str)) = s.split_once(':') else {
|
||||
return Err(format!(
|
||||
"Invalid config setting: {s} (expected `PACKAGE:KEY=VALUE`)"
|
||||
));
|
||||
};
|
||||
|
||||
let package = PackageName::from_str(package_str.trim())
|
||||
.map_err(|e| format!("Invalid package name: {e}"))?;
|
||||
let setting = ConfigSettingEntry::from_str(config_str)?;
|
||||
|
||||
Ok(Self { package, setting })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema), schemars(untagged))]
|
||||
enum ConfigSettingValue {
|
||||
|
|
@ -212,6 +239,111 @@ impl<'de> serde::Deserialize<'de> for ConfigSettings {
|
|||
}
|
||||
}
|
||||
|
||||
/// Settings to pass to PEP 517 build backends on a per-package basis.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct PackageConfigSettings(BTreeMap<PackageName, ConfigSettings>);
|
||||
|
||||
impl FromIterator<ConfigSettingPackageEntry> for PackageConfigSettings {
|
||||
fn from_iter<T: IntoIterator<Item = ConfigSettingPackageEntry>>(iter: T) -> Self {
|
||||
let mut package_configs: BTreeMap<PackageName, Vec<ConfigSettingEntry>> = BTreeMap::new();
|
||||
|
||||
for entry in iter {
|
||||
package_configs
|
||||
.entry(entry.package)
|
||||
.or_default()
|
||||
.push(entry.setting);
|
||||
}
|
||||
|
||||
let configs = package_configs
|
||||
.into_iter()
|
||||
.map(|(package, entries)| (package, entries.into_iter().collect()))
|
||||
.collect();
|
||||
|
||||
Self(configs)
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageConfigSettings {
|
||||
/// Returns the config settings for a specific package, if any.
|
||||
pub fn get(&self, package: &PackageName) -> Option<&ConfigSettings> {
|
||||
self.0.get(package)
|
||||
}
|
||||
|
||||
/// Returns `true` if there are no package-specific settings.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Merge two sets of package config settings, with the values in `self` taking precedence.
|
||||
#[must_use]
|
||||
pub fn merge(mut self, other: PackageConfigSettings) -> PackageConfigSettings {
|
||||
for (package, settings) in other.0 {
|
||||
match self.0.entry(package) {
|
||||
Entry::Vacant(vacant) => {
|
||||
vacant.insert(settings);
|
||||
}
|
||||
Entry::Occupied(mut occupied) => {
|
||||
let merged = occupied.get().clone().merge(settings);
|
||||
occupied.insert(merged);
|
||||
}
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl uv_cache_key::CacheKey for PackageConfigSettings {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
for (package, settings) in &self.0 {
|
||||
package.to_string().cache_key(state);
|
||||
settings.cache_key(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for PackageConfigSettings {
|
||||
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
use serde::ser::SerializeMap;
|
||||
|
||||
let mut map = serializer.serialize_map(Some(self.0.len()))?;
|
||||
for (key, value) in &self.0 {
|
||||
map.serialize_entry(&key.to_string(), value)?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for PackageConfigSettings {
|
||||
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||
struct Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||
type Value = PackageConfigSettings;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a map from package name to config settings")
|
||||
}
|
||||
|
||||
fn visit_map<A: serde::de::MapAccess<'de>>(
|
||||
self,
|
||||
mut map: A,
|
||||
) -> Result<Self::Value, A::Error> {
|
||||
let mut config = BTreeMap::default();
|
||||
while let Some((key, value)) = map.next_entry::<String, ConfigSettings>()? {
|
||||
let package = PackageName::from_str(&key).map_err(|e| {
|
||||
serde::de::Error::custom(format!("Invalid package name: {e}"))
|
||||
})?;
|
||||
config.insert(package, value);
|
||||
}
|
||||
Ok(PackageConfigSettings(config))
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_map(Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
@ -291,4 +423,56 @@ mod tests {
|
|||
);
|
||||
assert_eq!(settings.escape_for_python(), r#"{"key":"val\\1 {}value"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_config_setting_package_entry() {
|
||||
// Test valid parsing
|
||||
let entry = ConfigSettingPackageEntry::from_str("numpy:editable_mode=compat").unwrap();
|
||||
assert_eq!(entry.package.as_ref(), "numpy");
|
||||
assert_eq!(entry.setting.key, "editable_mode");
|
||||
assert_eq!(entry.setting.value, "compat");
|
||||
|
||||
// Test with package name containing hyphens
|
||||
let entry = ConfigSettingPackageEntry::from_str("my-package:some_key=value").unwrap();
|
||||
assert_eq!(entry.package.as_ref(), "my-package");
|
||||
assert_eq!(entry.setting.key, "some_key");
|
||||
assert_eq!(entry.setting.value, "value");
|
||||
|
||||
// Test with spaces around values
|
||||
let entry = ConfigSettingPackageEntry::from_str(" numpy : key = value ").unwrap();
|
||||
assert_eq!(entry.package.as_ref(), "numpy");
|
||||
assert_eq!(entry.setting.key, "key");
|
||||
assert_eq!(entry.setting.value, "value");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_config_settings_package() {
|
||||
let settings: PackageConfigSettings = vec![
|
||||
ConfigSettingPackageEntry::from_str("numpy:editable_mode=compat").unwrap(),
|
||||
ConfigSettingPackageEntry::from_str("numpy:another_key=value").unwrap(),
|
||||
ConfigSettingPackageEntry::from_str("scipy:build_option=fast").unwrap(),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let numpy_settings = settings
|
||||
.get(&PackageName::from_str("numpy").unwrap())
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
numpy_settings.0.get("editable_mode"),
|
||||
Some(&ConfigSettingValue::String("compat".to_string()))
|
||||
);
|
||||
assert_eq!(
|
||||
numpy_settings.0.get("another_key"),
|
||||
Some(&ConfigSettingValue::String("value".to_string()))
|
||||
);
|
||||
|
||||
let scipy_settings = settings
|
||||
.get(&PackageName::from_str("scipy").unwrap())
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
scipy_settings.0.get("build_option"),
|
||||
Some(&ConfigSettingValue::String("fast".to_string()))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -186,6 +186,18 @@ impl DependencyGroupsInner {
|
|||
self.include.names().chain(&self.exclude)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all groups that are included in the specification,
|
||||
/// assuming `all_names` is an iterator over all groups.
|
||||
pub fn group_names<'a, Names>(
|
||||
&'a self,
|
||||
all_names: Names,
|
||||
) -> impl Iterator<Item = &'a GroupName> + 'a
|
||||
where
|
||||
Names: Iterator<Item = &'a GroupName> + 'a,
|
||||
{
|
||||
all_names.filter(move |name| self.contains(name))
|
||||
}
|
||||
|
||||
/// Iterate over all groups the user explicitly asked for on the CLI
|
||||
pub fn explicit_names(&self) -> impl Iterator<Item = &GroupName> {
|
||||
let DependencyGroupsHistory {
|
||||
|
|
|
|||
|
|
@ -155,7 +155,8 @@ impl ExtrasSpecificationInner {
|
|||
self.include.names().chain(&self.exclude)
|
||||
}
|
||||
|
||||
/// Returns `true` if the specification includes the given extra.
|
||||
/// Returns an iterator over all extras that are included in the specification,
|
||||
/// assuming `all_names` is an iterator over all extras.
|
||||
pub fn extra_names<'a, Names>(
|
||||
&'a self,
|
||||
all_names: Names,
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ pub enum TargetTriple {
|
|||
#[serde(rename = "i686-pc-windows-msvc")]
|
||||
I686PcWindowsMsvc,
|
||||
|
||||
/// An x86 Linux target. Equivalent to `x86_64-manylinux_2_17`.
|
||||
/// An x86 Linux target. Equivalent to `x86_64-manylinux_2_28`.
|
||||
#[cfg_attr(feature = "clap", value(name = "x86_64-unknown-linux-gnu"))]
|
||||
#[serde(rename = "x86_64-unknown-linux-gnu")]
|
||||
#[serde(alias = "x8664-unknown-linux-gnu")]
|
||||
|
|
@ -56,7 +56,7 @@ pub enum TargetTriple {
|
|||
#[serde(alias = "x8664-apple-darwin")]
|
||||
X8664AppleDarwin,
|
||||
|
||||
/// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_17`.
|
||||
/// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_28`.
|
||||
#[cfg_attr(feature = "clap", value(name = "aarch64-unknown-linux-gnu"))]
|
||||
#[serde(rename = "aarch64-unknown-linux-gnu")]
|
||||
Aarch64UnknownLinuxGnu,
|
||||
|
|
@ -227,7 +227,7 @@ pub enum TargetTriple {
|
|||
#[serde(alias = "aarch64-manylinux240")]
|
||||
Aarch64Manylinux240,
|
||||
|
||||
/// A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12.
|
||||
/// A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12.
|
||||
#[cfg_attr(feature = "clap", value(name = "wasm32-pyodide2024"))]
|
||||
Wasm32Pyodide2024,
|
||||
}
|
||||
|
|
@ -240,7 +240,7 @@ impl TargetTriple {
|
|||
Self::Linux | Self::X8664UnknownLinuxGnu => Platform::new(
|
||||
Os::Manylinux {
|
||||
major: 2,
|
||||
minor: 17,
|
||||
minor: 28,
|
||||
},
|
||||
Arch::X86_64,
|
||||
),
|
||||
|
|
@ -262,7 +262,7 @@ impl TargetTriple {
|
|||
Self::Aarch64UnknownLinuxGnu => Platform::new(
|
||||
Os::Manylinux {
|
||||
major: 2,
|
||||
minor: 17,
|
||||
minor: 28,
|
||||
},
|
||||
Arch::Aarch64,
|
||||
),
|
||||
|
|
|
|||
|
|
@ -6,6 +6,25 @@ use std::{cmp::Ordering, iter};
|
|||
/// This is a slimmed-down version of `dialoguer::Confirm`, with the post-confirmation report
|
||||
/// enabled.
|
||||
pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result<bool> {
|
||||
confirm_inner(message, None, term, default)
|
||||
}
|
||||
|
||||
/// Prompt the user for confirmation in the given [`Term`], with a hint.
|
||||
pub fn confirm_with_hint(
|
||||
message: &str,
|
||||
hint: &str,
|
||||
term: &Term,
|
||||
default: bool,
|
||||
) -> std::io::Result<bool> {
|
||||
confirm_inner(message, Some(hint), term, default)
|
||||
}
|
||||
|
||||
fn confirm_inner(
|
||||
message: &str,
|
||||
hint: Option<&str>,
|
||||
term: &Term,
|
||||
default: bool,
|
||||
) -> std::io::Result<bool> {
|
||||
let prompt = format!(
|
||||
"{} {} {} {} {}",
|
||||
style("?".to_string()).for_stderr().yellow(),
|
||||
|
|
@ -18,6 +37,13 @@ pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result<boo
|
|||
);
|
||||
|
||||
term.write_str(&prompt)?;
|
||||
if let Some(hint) = hint {
|
||||
term.write_str(&format!(
|
||||
"\n\n{}{} {hint}",
|
||||
style("hint").for_stderr().bold().cyan(),
|
||||
style(":").for_stderr().bold()
|
||||
))?;
|
||||
}
|
||||
term.hide_cursor()?;
|
||||
term.flush()?;
|
||||
|
||||
|
|
@ -56,7 +82,14 @@ pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result<boo
|
|||
.cyan(),
|
||||
);
|
||||
|
||||
if hint.is_some() {
|
||||
term.clear_last_lines(2)?;
|
||||
// It's not clear why we need to clear to the end of the screen here, but it fixes lingering
|
||||
// display of the hint on `bash` (the issue did not reproduce on `zsh`).
|
||||
term.clear_to_end_of_screen()?;
|
||||
} else {
|
||||
term.clear_line()?;
|
||||
}
|
||||
term.write_line(&report)?;
|
||||
term.show_cursor()?;
|
||||
term.flush()?;
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use crate::ROOT_DIR;
|
|||
use crate::generate_all::Mode;
|
||||
|
||||
/// Contains current supported targets
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250708/cpython-unix/targets.yml";
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250712/cpython-unix/targets.yml";
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
|
|
@ -130,7 +130,7 @@ async fn generate() -> Result<String> {
|
|||
output.push_str("//! DO NOT EDIT\n");
|
||||
output.push_str("//!\n");
|
||||
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250708/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250712/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//!\n");
|
||||
|
||||
// Disable clippy/fmt
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@ use uv_build_frontend::{SourceBuild, SourceBuildContext};
|
|||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_configuration::{
|
||||
BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, PreviewMode, Reinstall,
|
||||
SourceStrategy,
|
||||
BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, PackageConfigSettings,
|
||||
PreviewMode, Reinstall, SourceStrategy,
|
||||
};
|
||||
use uv_configuration::{BuildOutput, Concurrency};
|
||||
use uv_distribution::DistributionDatabase;
|
||||
|
|
@ -91,6 +91,7 @@ pub struct BuildDispatch<'a> {
|
|||
link_mode: uv_install_wheel::LinkMode,
|
||||
build_options: &'a BuildOptions,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
hasher: &'a HashStrategy,
|
||||
exclude_newer: Option<ExcludeNewer>,
|
||||
source_build_context: SourceBuildContext,
|
||||
|
|
@ -113,6 +114,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
shared_state: SharedState,
|
||||
index_strategy: IndexStrategy,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
build_isolation: BuildIsolation<'a>,
|
||||
link_mode: uv_install_wheel::LinkMode,
|
||||
build_options: &'a BuildOptions,
|
||||
|
|
@ -134,6 +136,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
dependency_metadata,
|
||||
index_strategy,
|
||||
config_settings,
|
||||
config_settings_package,
|
||||
build_isolation,
|
||||
link_mode,
|
||||
build_options,
|
||||
|
|
@ -200,6 +203,10 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
self.config_settings
|
||||
}
|
||||
|
||||
fn config_settings_package(&self) -> &PackageConfigSettings {
|
||||
self.config_settings_package
|
||||
}
|
||||
|
||||
fn sources(&self) -> SourceStrategy {
|
||||
self.sources
|
||||
}
|
||||
|
|
@ -295,6 +302,7 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
self.hasher,
|
||||
self.index_locations,
|
||||
self.config_settings,
|
||||
self.config_settings_package,
|
||||
self.cache(),
|
||||
venv,
|
||||
tags,
|
||||
|
|
@ -418,6 +426,17 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
build_stack.insert(dist.distribution_id());
|
||||
}
|
||||
|
||||
// Get package-specific config settings if available; otherwise, use global settings.
|
||||
let config_settings = if let Some(name) = dist_name {
|
||||
if let Some(package_settings) = self.config_settings_package.get(name) {
|
||||
package_settings.clone().merge(self.config_settings.clone())
|
||||
} else {
|
||||
self.config_settings.clone()
|
||||
}
|
||||
} else {
|
||||
self.config_settings.clone()
|
||||
};
|
||||
|
||||
let builder = SourceBuild::setup(
|
||||
source,
|
||||
subdirectory,
|
||||
|
|
@ -431,7 +450,7 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
self.index_locations,
|
||||
sources,
|
||||
self.workspace_cache(),
|
||||
self.config_settings.clone(),
|
||||
config_settings,
|
||||
self.build_isolation,
|
||||
&build_stack,
|
||||
build_kind,
|
||||
|
|
|
|||
|
|
@ -124,7 +124,10 @@ impl SourceUrl<'_> {
|
|||
pub fn is_editable(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Directory(DirectorySourceUrl { editable: true, .. })
|
||||
Self::Directory(DirectorySourceUrl {
|
||||
editable: Some(true),
|
||||
..
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -210,7 +213,7 @@ impl<'a> From<&'a PathSourceDist> for PathSourceUrl<'a> {
|
|||
pub struct DirectorySourceUrl<'a> {
|
||||
pub url: &'a DisplaySafeUrl,
|
||||
pub install_path: Cow<'a, Path>,
|
||||
pub editable: bool,
|
||||
pub editable: Option<bool>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DirectorySourceUrl<'_> {
|
||||
|
|
|
|||
|
|
@ -30,21 +30,20 @@ impl DependencyMetadata {
|
|||
|
||||
if let Some(version) = version {
|
||||
// If a specific version was requested, search for an exact match, then a global match.
|
||||
let metadata = versions
|
||||
let metadata = if let Some(metadata) = versions
|
||||
.iter()
|
||||
.find(|v| v.version.as_ref() == Some(version))
|
||||
.inspect(|_| {
|
||||
.find(|entry| entry.version.as_ref() == Some(version))
|
||||
{
|
||||
debug!("Found dependency metadata entry for `{package}=={version}`");
|
||||
})
|
||||
.or_else(|| versions.iter().find(|v| v.version.is_none()))
|
||||
.inspect(|_| {
|
||||
metadata
|
||||
} else if let Some(metadata) = versions.iter().find(|entry| entry.version.is_none()) {
|
||||
debug!("Found global metadata entry for `{package}`");
|
||||
});
|
||||
let Some(metadata) = metadata else {
|
||||
metadata
|
||||
} else {
|
||||
warn!("No dependency metadata entry found for `{package}=={version}`");
|
||||
return None;
|
||||
};
|
||||
debug!("Found dependency metadata entry for `{package}=={version}`");
|
||||
|
||||
Some(ResolutionMetadata {
|
||||
name: metadata.name.clone(),
|
||||
version: version.clone(),
|
||||
|
|
@ -65,6 +64,7 @@ impl DependencyMetadata {
|
|||
return None;
|
||||
};
|
||||
debug!("Found dependency metadata entry for `{package}` (assuming: `{version}`)");
|
||||
|
||||
Some(ResolutionMetadata {
|
||||
name: metadata.name.clone(),
|
||||
version,
|
||||
|
|
@ -86,7 +86,7 @@ impl DependencyMetadata {
|
|||
/// <https://packaging.python.org/specifications/core-metadata/>.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct StaticMetadata {
|
||||
// Mandatory fields
|
||||
pub name: PackageName,
|
||||
|
|
|
|||
|
|
@ -131,11 +131,11 @@ impl DerivationChain {
|
|||
));
|
||||
let target = edge.source();
|
||||
let extra = match edge.weight() {
|
||||
Edge::Optional(extra, ..) => Some(extra.clone()),
|
||||
Edge::Optional(extra) => Some(extra.clone()),
|
||||
_ => None,
|
||||
};
|
||||
let group = match edge.weight() {
|
||||
Edge::Dev(group, ..) => Some(group.clone()),
|
||||
Edge::Dev(group) => Some(group.clone()),
|
||||
_ => None,
|
||||
};
|
||||
queue.push_back((target, extra, group, path));
|
||||
|
|
|
|||
|
|
@ -6,11 +6,23 @@ use thiserror::Error;
|
|||
|
||||
use uv_auth::{AuthPolicy, Credentials};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::index_name::{IndexName, IndexNameError};
|
||||
use crate::origin::Origin;
|
||||
use crate::{IndexStatusCodeStrategy, IndexUrl, IndexUrlError, SerializableStatusCode};
|
||||
|
||||
/// Cache control configuration for an index.
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Default)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct IndexCacheControl {
|
||||
/// Cache control header for Simple API requests.
|
||||
pub api: Option<SmallString>,
|
||||
/// Cache control header for file downloads.
|
||||
pub files: Option<SmallString>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
|
|
@ -104,6 +116,19 @@ pub struct Index {
|
|||
/// ```
|
||||
#[serde(default)]
|
||||
pub ignore_error_codes: Option<Vec<SerializableStatusCode>>,
|
||||
/// Cache control configuration for this index.
|
||||
///
|
||||
/// When set, these headers will override the server's cache control headers
|
||||
/// for both package metadata requests and artifact downloads.
|
||||
///
|
||||
/// ```toml
|
||||
/// [[tool.uv.index]]
|
||||
/// name = "my-index"
|
||||
/// url = "https://<omitted>/simple"
|
||||
/// cache-control = { api = "max-age=600", files = "max-age=3600" }
|
||||
/// ```
|
||||
#[serde(default)]
|
||||
pub cache_control: Option<IndexCacheControl>,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
|
|
@ -142,6 +167,7 @@ impl Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -157,6 +183,7 @@ impl Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -172,6 +199,7 @@ impl Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -250,6 +278,7 @@ impl From<IndexUrl> for Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -273,6 +302,7 @@ impl FromStr for Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -289,6 +319,7 @@ impl FromStr for Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -384,3 +415,55 @@ pub enum IndexSourceError {
|
|||
#[error("Index included a name, but the name was empty")]
|
||||
EmptyName,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_index_cache_control_headers() {
|
||||
// Test that cache control headers are properly parsed from TOML
|
||||
let toml_str = r#"
|
||||
name = "test-index"
|
||||
url = "https://test.example.com/simple"
|
||||
cache-control = { api = "max-age=600", files = "max-age=3600" }
|
||||
"#;
|
||||
|
||||
let index: Index = toml::from_str(toml_str).unwrap();
|
||||
assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
|
||||
assert!(index.cache_control.is_some());
|
||||
let cache_control = index.cache_control.as_ref().unwrap();
|
||||
assert_eq!(cache_control.api.as_deref(), Some("max-age=600"));
|
||||
assert_eq!(cache_control.files.as_deref(), Some("max-age=3600"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_without_cache_control() {
|
||||
// Test that indexes work without cache control headers
|
||||
let toml_str = r#"
|
||||
name = "test-index"
|
||||
url = "https://test.example.com/simple"
|
||||
"#;
|
||||
|
||||
let index: Index = toml::from_str(toml_str).unwrap();
|
||||
assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
|
||||
assert_eq!(index.cache_control, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_partial_cache_control() {
|
||||
// Test that cache control can have just one field
|
||||
let toml_str = r#"
|
||||
name = "test-index"
|
||||
url = "https://test.example.com/simple"
|
||||
cache-control = { api = "max-age=300" }
|
||||
"#;
|
||||
|
||||
let index: Index = toml::from_str(toml_str).unwrap();
|
||||
assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
|
||||
assert!(index.cache_control.is_some());
|
||||
let cache_control = index.cache_control.as_ref().unwrap();
|
||||
assert_eq!(cache_control.api.as_deref(), Some("max-age=300"));
|
||||
assert_eq!(cache_control.files, None);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -39,33 +39,8 @@ impl IndexUrl {
|
|||
/// If no root directory is provided, relative paths are resolved against the current working
|
||||
/// directory.
|
||||
pub fn parse(path: &str, root_dir: Option<&Path>) -> Result<Self, IndexUrlError> {
|
||||
let url = match split_scheme(path) {
|
||||
Some((scheme, ..)) => {
|
||||
match Scheme::parse(scheme) {
|
||||
Some(_) => {
|
||||
// Ex) `https://pypi.org/simple`
|
||||
VerbatimUrl::parse_url(path)?
|
||||
}
|
||||
None => {
|
||||
// Ex) `C:\Users\user\index`
|
||||
if let Some(root_dir) = root_dir {
|
||||
VerbatimUrl::from_path(path, root_dir)?
|
||||
} else {
|
||||
VerbatimUrl::from_absolute_path(std::path::absolute(path)?)?
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// Ex) `/Users/user/index`
|
||||
if let Some(root_dir) = root_dir {
|
||||
VerbatimUrl::from_path(path, root_dir)?
|
||||
} else {
|
||||
VerbatimUrl::from_absolute_path(std::path::absolute(path)?)?
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(Self::from(url.with_given(path)))
|
||||
let url = VerbatimUrl::from_url_or_path(path, root_dir)?;
|
||||
Ok(Self::from(url))
|
||||
}
|
||||
|
||||
/// Return the root [`Url`] of the index, if applicable.
|
||||
|
|
@ -466,6 +441,26 @@ impl<'a> IndexLocations {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the Simple API cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.api.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Return the artifact cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.files.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&IndexLocations> for uv_auth::Indexes {
|
||||
|
|
@ -599,6 +594,26 @@ impl<'a> IndexUrls {
|
|||
}
|
||||
IndexStatusCodeStrategy::Default
|
||||
}
|
||||
|
||||
/// Return the Simple API cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.api.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Return the artifact cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.files.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
bitflags::bitflags! {
|
||||
|
|
@ -717,4 +732,64 @@ mod tests {
|
|||
"git+https://github.com/example/repo.git"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_control_lookup() {
|
||||
use std::str::FromStr;
|
||||
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::IndexFormat;
|
||||
use crate::index_name::IndexName;
|
||||
|
||||
let indexes = vec![
|
||||
Index {
|
||||
name: Some(IndexName::from_str("index1").unwrap()),
|
||||
url: IndexUrl::from_str("https://index1.example.com/simple").unwrap(),
|
||||
cache_control: Some(crate::IndexCacheControl {
|
||||
api: Some(SmallString::from("max-age=300")),
|
||||
files: Some(SmallString::from("max-age=1800")),
|
||||
}),
|
||||
explicit: false,
|
||||
default: false,
|
||||
origin: None,
|
||||
format: IndexFormat::Simple,
|
||||
publish_url: None,
|
||||
authenticate: uv_auth::AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
},
|
||||
Index {
|
||||
name: Some(IndexName::from_str("index2").unwrap()),
|
||||
url: IndexUrl::from_str("https://index2.example.com/simple").unwrap(),
|
||||
cache_control: None,
|
||||
explicit: false,
|
||||
default: false,
|
||||
origin: None,
|
||||
format: IndexFormat::Simple,
|
||||
publish_url: None,
|
||||
authenticate: uv_auth::AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
},
|
||||
];
|
||||
|
||||
let index_urls = IndexUrls::from_indexes(indexes);
|
||||
|
||||
let url1 = IndexUrl::from_str("https://index1.example.com/simple").unwrap();
|
||||
assert_eq!(
|
||||
index_urls.simple_api_cache_control_for(&url1),
|
||||
Some("max-age=300")
|
||||
);
|
||||
assert_eq!(
|
||||
index_urls.artifact_cache_control_for(&url1),
|
||||
Some("max-age=1800")
|
||||
);
|
||||
|
||||
let url2 = IndexUrl::from_str("https://index2.example.com/simple").unwrap();
|
||||
assert_eq!(index_urls.simple_api_cache_control_for(&url2), None);
|
||||
assert_eq!(index_urls.artifact_cache_control_for(&url2), None);
|
||||
|
||||
let url3 = IndexUrl::from_str("https://index3.example.com/simple").unwrap();
|
||||
assert_eq!(index_urls.simple_api_cache_control_for(&url3), None);
|
||||
assert_eq!(index_urls.artifact_cache_control_for(&url3), None);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -343,9 +343,9 @@ pub struct DirectorySourceDist {
|
|||
/// The absolute path to the distribution which we use for installing.
|
||||
pub install_path: Box<Path>,
|
||||
/// Whether the package should be installed in editable mode.
|
||||
pub editable: bool,
|
||||
pub editable: Option<bool>,
|
||||
/// Whether the package should be built and installed.
|
||||
pub r#virtual: bool,
|
||||
pub r#virtual: Option<bool>,
|
||||
/// The URL as it was provided by the user.
|
||||
pub url: VerbatimUrl,
|
||||
}
|
||||
|
|
@ -452,8 +452,8 @@ impl Dist {
|
|||
name: PackageName,
|
||||
url: VerbatimUrl,
|
||||
install_path: &Path,
|
||||
editable: bool,
|
||||
r#virtual: bool,
|
||||
editable: Option<bool>,
|
||||
r#virtual: Option<bool>,
|
||||
) -> Result<Dist, Error> {
|
||||
// Convert to an absolute path.
|
||||
let install_path = path::absolute(install_path)?;
|
||||
|
|
@ -655,7 +655,7 @@ impl SourceDist {
|
|||
/// Returns `true` if the distribution is editable.
|
||||
pub fn is_editable(&self) -> bool {
|
||||
match self {
|
||||
Self::Directory(DirectorySourceDist { editable, .. }) => *editable,
|
||||
Self::Directory(DirectorySourceDist { editable, .. }) => editable.unwrap_or(false),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
@ -663,7 +663,7 @@ impl SourceDist {
|
|||
/// Returns `true` if the distribution is virtual.
|
||||
pub fn is_virtual(&self) -> bool {
|
||||
match self {
|
||||
Self::Directory(DirectorySourceDist { r#virtual, .. }) => *r#virtual,
|
||||
Self::Directory(DirectorySourceDist { r#virtual, .. }) => r#virtual.unwrap_or(false),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -429,9 +429,9 @@ pub enum RequirementSource {
|
|||
/// The absolute path to the distribution which we use for installing.
|
||||
install_path: Box<Path>,
|
||||
/// For a source tree (a directory), whether to install as an editable.
|
||||
editable: bool,
|
||||
editable: Option<bool>,
|
||||
/// For a source tree (a directory), whether the project should be built and installed.
|
||||
r#virtual: bool,
|
||||
r#virtual: Option<bool>,
|
||||
/// The PEP 508 style URL in the format
|
||||
/// `file:///<path>#subdirectory=<subdirectory>`.
|
||||
url: VerbatimUrl,
|
||||
|
|
@ -545,7 +545,13 @@ impl RequirementSource {
|
|||
|
||||
/// Returns `true` if the source is editable.
|
||||
pub fn is_editable(&self) -> bool {
|
||||
matches!(self, Self::Directory { editable: true, .. })
|
||||
matches!(
|
||||
self,
|
||||
Self::Directory {
|
||||
editable: Some(true),
|
||||
..
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns `true` if the source is empty.
|
||||
|
|
@ -792,11 +798,11 @@ impl From<RequirementSource> for RequirementSourceWire {
|
|||
r#virtual,
|
||||
url: _,
|
||||
} => {
|
||||
if editable {
|
||||
if editable.unwrap_or(false) {
|
||||
Self::Editable {
|
||||
editable: PortablePathBuf::from(install_path),
|
||||
}
|
||||
} else if r#virtual {
|
||||
} else if r#virtual.unwrap_or(false) {
|
||||
Self::Virtual {
|
||||
r#virtual: PortablePathBuf::from(install_path),
|
||||
}
|
||||
|
|
@ -908,8 +914,8 @@ impl TryFrom<RequirementSourceWire> for RequirementSource {
|
|||
))?;
|
||||
Ok(Self::Directory {
|
||||
install_path: directory,
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(false),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
|
@ -920,8 +926,8 @@ impl TryFrom<RequirementSourceWire> for RequirementSource {
|
|||
))?;
|
||||
Ok(Self::Directory {
|
||||
install_path: editable,
|
||||
editable: true,
|
||||
r#virtual: false,
|
||||
editable: Some(true),
|
||||
r#virtual: Some(false),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
|
@ -932,8 +938,8 @@ impl TryFrom<RequirementSourceWire> for RequirementSource {
|
|||
))?;
|
||||
Ok(Self::Directory {
|
||||
install_path: r#virtual,
|
||||
editable: false,
|
||||
r#virtual: true,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(true),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
|
@ -980,8 +986,8 @@ mod tests {
|
|||
marker: MarkerTree::TRUE,
|
||||
source: RequirementSource::Directory {
|
||||
install_path: PathBuf::from(path).into_boxed_path(),
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(false),
|
||||
url: VerbatimUrl::from_absolute_path(path).unwrap(),
|
||||
},
|
||||
origin: None,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use uv_distribution_filename::DistExtension;
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep508::MarkerTree;
|
||||
use uv_pypi_types::{HashDigest, HashDigests};
|
||||
|
||||
use crate::{
|
||||
|
|
@ -202,12 +201,12 @@ impl Node {
|
|||
}
|
||||
}
|
||||
|
||||
/// An edge in the resolution graph, along with the marker that must be satisfied to traverse it.
|
||||
/// An edge in the resolution graph.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Edge {
|
||||
Prod(MarkerTree),
|
||||
Optional(ExtraName, MarkerTree),
|
||||
Dev(GroupName, MarkerTree),
|
||||
Prod,
|
||||
Optional(ExtraName),
|
||||
Dev(GroupName),
|
||||
}
|
||||
|
||||
impl From<&ResolvedDist> for RequirementSource {
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ use uv_client::{
|
|||
};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_distribution_types::{
|
||||
BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, InstalledDist, Name, SourceDist,
|
||||
BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, IndexUrl, InstalledDist, Name, SourceDist,
|
||||
};
|
||||
use uv_extract::hash::Hasher;
|
||||
use uv_fs::write_atomic;
|
||||
|
|
@ -201,6 +201,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
match self
|
||||
.stream_wheel(
|
||||
url.clone(),
|
||||
dist.index(),
|
||||
&wheel.filename,
|
||||
wheel.file.size,
|
||||
&wheel_entry,
|
||||
|
|
@ -236,6 +237,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
let archive = self
|
||||
.download_wheel(
|
||||
url,
|
||||
dist.index(),
|
||||
&wheel.filename,
|
||||
wheel.file.size,
|
||||
&wheel_entry,
|
||||
|
|
@ -272,6 +274,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
match self
|
||||
.stream_wheel(
|
||||
wheel.url.raw().clone(),
|
||||
None,
|
||||
&wheel.filename,
|
||||
None,
|
||||
&wheel_entry,
|
||||
|
|
@ -301,6 +304,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
let archive = self
|
||||
.download_wheel(
|
||||
wheel.url.raw().clone(),
|
||||
None,
|
||||
&wheel.filename,
|
||||
None,
|
||||
&wheel_entry,
|
||||
|
|
@ -534,6 +538,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
async fn stream_wheel(
|
||||
&self,
|
||||
url: DisplaySafeUrl,
|
||||
index: Option<&IndexUrl>,
|
||||
filename: &WheelFilename,
|
||||
size: Option<u64>,
|
||||
wheel_entry: &CacheEntry,
|
||||
|
|
@ -616,13 +621,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
// Fetch the archive from the cache, or download it if necessary.
|
||||
let req = self.request(url.clone())?;
|
||||
|
||||
// Determine the cache control policy for the URL.
|
||||
let cache_control = match self.client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -654,7 +670,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.skip_cache_with_retry(
|
||||
self.request(url)?,
|
||||
&http_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
|
|
@ -671,6 +692,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
async fn download_wheel(
|
||||
&self,
|
||||
url: DisplaySafeUrl,
|
||||
index: Option<&IndexUrl>,
|
||||
filename: &WheelFilename,
|
||||
size: Option<u64>,
|
||||
wheel_entry: &CacheEntry,
|
||||
|
|
@ -783,13 +805,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
// Fetch the archive from the cache, or download it if necessary.
|
||||
let req = self.request(url.clone())?;
|
||||
|
||||
// Determine the cache control policy for the URL.
|
||||
let cache_control = match self.client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -821,7 +854,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.skip_cache_with_retry(
|
||||
self.request(url)?,
|
||||
&http_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
use std::borrow::Cow;
|
||||
use uv_cache::{Cache, CacheBucket, CacheShard, WheelCache};
|
||||
use uv_cache_info::CacheInfo;
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_configuration::ConfigSettings;
|
||||
use uv_configuration::{ConfigSettings, PackageConfigSettings};
|
||||
use uv_distribution_types::{
|
||||
DirectUrlSourceDist, DirectorySourceDist, GitSourceDist, Hashed, PathSourceDist,
|
||||
};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
|
|
@ -18,7 +20,8 @@ pub struct BuiltWheelIndex<'a> {
|
|||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
hasher: &'a HashStrategy,
|
||||
build_configuration: &'a ConfigSettings,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
}
|
||||
|
||||
impl<'a> BuiltWheelIndex<'a> {
|
||||
|
|
@ -27,13 +30,15 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
hasher: &'a HashStrategy,
|
||||
build_configuration: &'a ConfigSettings,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
tags,
|
||||
hasher,
|
||||
build_configuration,
|
||||
config_settings,
|
||||
config_settings_package,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -63,10 +68,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
Ok(self.find(&cache_shard))
|
||||
|
|
@ -100,10 +106,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
Ok(self
|
||||
|
|
@ -119,7 +126,7 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
) -> Result<Option<CachedWheel>, Error> {
|
||||
let cache_shard = self.cache.shard(
|
||||
CacheBucket::SourceDistributions,
|
||||
if source_dist.editable {
|
||||
if source_dist.editable.unwrap_or(false) {
|
||||
WheelCache::Editable(&source_dist.url).root()
|
||||
} else {
|
||||
WheelCache::Path(&source_dist.url).root()
|
||||
|
|
@ -148,10 +155,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
Ok(self
|
||||
|
|
@ -174,10 +182,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
);
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
self.find(&cache_shard)
|
||||
|
|
@ -239,4 +248,13 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
|
||||
candidate
|
||||
}
|
||||
|
||||
/// Determine the [`ConfigSettings`] for the given package name.
|
||||
fn config_settings_for(&self, name: &PackageName) -> Cow<'_, ConfigSettings> {
|
||||
if let Some(package_settings) = self.config_settings_package.get(name) {
|
||||
Cow::Owned(package_settings.clone().merge(self.config_settings.clone()))
|
||||
} else {
|
||||
Cow::Borrowed(self.config_settings)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -306,19 +306,22 @@ impl LoweredRequirement {
|
|||
},
|
||||
url,
|
||||
}
|
||||
} else if member.pyproject_toml().is_package() {
|
||||
} else if member
|
||||
.pyproject_toml()
|
||||
.is_package(!workspace.is_required_member(&requirement.name))
|
||||
{
|
||||
RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: true,
|
||||
r#virtual: false,
|
||||
editable: Some(true),
|
||||
r#virtual: Some(false),
|
||||
}
|
||||
} else {
|
||||
RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: false,
|
||||
r#virtual: true,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(true),
|
||||
}
|
||||
};
|
||||
(source, marker)
|
||||
|
|
@ -724,26 +727,31 @@ fn path_source(
|
|||
Ok(RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: true,
|
||||
r#virtual: false,
|
||||
editable,
|
||||
r#virtual: Some(false),
|
||||
})
|
||||
} else {
|
||||
// Determine whether the project is a package or virtual.
|
||||
// If the `package` option is unset, check if `tool.uv.package` is set
|
||||
// on the path source (otherwise, default to `true`).
|
||||
let is_package = package.unwrap_or_else(|| {
|
||||
let pyproject_path = install_path.join("pyproject.toml");
|
||||
fs_err::read_to_string(&pyproject_path)
|
||||
.ok()
|
||||
.and_then(|contents| PyProjectToml::from_string(contents).ok())
|
||||
.map(|pyproject_toml| pyproject_toml.is_package())
|
||||
// We don't require a build system for path dependencies
|
||||
.map(|pyproject_toml| pyproject_toml.is_package(false))
|
||||
.unwrap_or(true)
|
||||
});
|
||||
|
||||
// If the project is not a package, treat it as a virtual dependency.
|
||||
let r#virtual = !is_package;
|
||||
|
||||
Ok(RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: false,
|
||||
// If a project is not a package, treat it as a virtual dependency.
|
||||
r#virtual: !is_package,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(r#virtual),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -618,14 +618,13 @@ mod test {
|
|||
tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
|
||||
"#};
|
||||
|
||||
assert_snapshot!(format_err(input).await, @r###"
|
||||
error: TOML parse error at line 8, column 16
|
||||
assert_snapshot!(format_err(input).await, @r#"
|
||||
error: TOML parse error at line 8, column 28
|
||||
|
|
||||
8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
"###);
|
||||
missing comma between key-value pairs, expected `,`
|
||||
"#);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
|||
|
|
@ -29,10 +29,10 @@ use uv_cache_key::cache_digest;
|
|||
use uv_client::{
|
||||
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
|
||||
};
|
||||
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||
use uv_distribution_filename::{SourceDistExtension, WheelFilename};
|
||||
use uv_distribution_types::{
|
||||
BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, PathSourceUrl,
|
||||
BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, IndexUrl, PathSourceUrl,
|
||||
SourceDist, SourceUrl,
|
||||
};
|
||||
use uv_extract::hash::Hasher;
|
||||
|
|
@ -148,6 +148,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
&url,
|
||||
Some(&dist.index),
|
||||
&cache_shard,
|
||||
None,
|
||||
dist.ext,
|
||||
|
|
@ -168,6 +169,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
&dist.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
dist.subdirectory.as_deref(),
|
||||
dist.ext,
|
||||
|
|
@ -213,6 +215,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
resource.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
resource.subdirectory,
|
||||
resource.ext,
|
||||
|
|
@ -288,7 +291,16 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.await;
|
||||
}
|
||||
|
||||
self.url_metadata(source, &url, &cache_shard, None, dist.ext, hashes, client)
|
||||
self.url_metadata(
|
||||
source,
|
||||
&url,
|
||||
Some(&dist.index),
|
||||
&cache_shard,
|
||||
None,
|
||||
dist.ext,
|
||||
hashes,
|
||||
client,
|
||||
)
|
||||
.boxed_local()
|
||||
.await?
|
||||
}
|
||||
|
|
@ -302,6 +314,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url_metadata(
|
||||
source,
|
||||
&dist.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
dist.subdirectory.as_deref(),
|
||||
dist.ext,
|
||||
|
|
@ -340,6 +353,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url_metadata(
|
||||
source,
|
||||
resource.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
resource.subdirectory,
|
||||
resource.ext,
|
||||
|
|
@ -373,11 +387,29 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Determine the [`ConfigSettings`] for the given package name.
|
||||
fn config_settings_for(&self, name: Option<&PackageName>) -> Cow<'_, ConfigSettings> {
|
||||
if let Some(name) = name {
|
||||
if let Some(package_settings) = self.build_context.config_settings_package().get(name) {
|
||||
Cow::Owned(
|
||||
package_settings
|
||||
.clone()
|
||||
.merge(self.build_context.config_settings().clone()),
|
||||
)
|
||||
} else {
|
||||
Cow::Borrowed(self.build_context.config_settings())
|
||||
}
|
||||
} else {
|
||||
Cow::Borrowed(self.build_context.config_settings())
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a source distribution from a remote URL.
|
||||
async fn url<'data>(
|
||||
&self,
|
||||
source: &BuildableSource<'data>,
|
||||
url: &'data DisplaySafeUrl,
|
||||
index: Option<&'data IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
ext: SourceDistExtension,
|
||||
|
|
@ -389,7 +421,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
.url_revision(source, ext, url, cache_shard, hashes, client)
|
||||
.url_revision(source, ext, url, index, cache_shard, hashes, client)
|
||||
.await?;
|
||||
|
||||
// Before running the build, check that the hashes match.
|
||||
|
|
@ -407,11 +439,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let source_dist_entry = cache_shard.entry(SOURCE);
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -431,6 +463,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source,
|
||||
ext,
|
||||
url,
|
||||
index,
|
||||
&source_dist_entry,
|
||||
revision,
|
||||
hashes,
|
||||
|
|
@ -494,6 +527,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'data>,
|
||||
url: &'data Url,
|
||||
index: Option<&'data IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
ext: SourceDistExtension,
|
||||
|
|
@ -504,7 +538,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
.url_revision(source, ext, url, cache_shard, hashes, client)
|
||||
.url_revision(source, ext, url, index, cache_shard, hashes, client)
|
||||
.await?;
|
||||
|
||||
// Before running the build, check that the hashes match.
|
||||
|
|
@ -561,6 +595,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source,
|
||||
ext,
|
||||
url,
|
||||
index,
|
||||
&source_dist_entry,
|
||||
revision,
|
||||
hashes,
|
||||
|
|
@ -580,11 +615,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we either need to build the metadata.
|
||||
|
|
@ -672,18 +707,31 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
ext: SourceDistExtension,
|
||||
url: &Url,
|
||||
index: Option<&IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
hashes: HashPolicy<'_>,
|
||||
client: &ManagedClient<'_>,
|
||||
) -> Result<Revision, Error> {
|
||||
let cache_entry = cache_shard.entry(HTTP_REVISION);
|
||||
|
||||
// Determine the cache control policy for the request.
|
||||
let cache_control = match client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&cache_entry, source.name(), source.source_tree())
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -733,6 +781,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.skip_cache_with_retry(
|
||||
Self::request(DisplaySafeUrl::from(url.clone()), client)?,
|
||||
&cache_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
|
|
@ -779,11 +828,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let source_entry = cache_shard.entry(SOURCE);
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -941,11 +990,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we need to build a wheel.
|
||||
|
|
@ -1060,7 +1109,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::SourceDistributions,
|
||||
if resource.editable {
|
||||
if resource.editable.unwrap_or(false) {
|
||||
WheelCache::Editable(resource.url).root()
|
||||
} else {
|
||||
WheelCache::Path(resource.url).root()
|
||||
|
|
@ -1083,11 +1132,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -1173,7 +1222,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::SourceDistributions,
|
||||
if resource.editable {
|
||||
if resource.editable.unwrap_or(false) {
|
||||
WheelCache::Editable(resource.url).root()
|
||||
} else {
|
||||
WheelCache::Path(resource.url).root()
|
||||
|
|
@ -1271,11 +1320,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we need to build a wheel.
|
||||
|
|
@ -1476,11 +1525,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let _lock = cache_shard.lock().await.map_err(Error::CacheWrite)?;
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -1779,11 +1828,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we need to build a wheel.
|
||||
|
|
@ -2039,6 +2088,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
ext: SourceDistExtension,
|
||||
url: &Url,
|
||||
index: Option<&IndexUrl>,
|
||||
entry: &CacheEntry,
|
||||
revision: Revision,
|
||||
hashes: HashPolicy<'_>,
|
||||
|
|
@ -2046,6 +2096,28 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
) -> Result<Revision, Error> {
|
||||
warn!("Re-downloading missing source distribution: {source}");
|
||||
let cache_entry = entry.shard().entry(HTTP_REVISION);
|
||||
|
||||
// Determine the cache control policy for the request.
|
||||
let cache_control = match client.unmanaged.connectivity() {
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&cache_entry, source.name(), source.source_tree())
|
||||
.map_err(Error::CacheRead)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
let download = |response| {
|
||||
async {
|
||||
// Take the union of the requested and existing hash algorithms.
|
||||
|
|
@ -2079,6 +2151,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.skip_cache_with_retry(
|
||||
Self::request(DisplaySafeUrl::from(url.clone()), client)?,
|
||||
&cache_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
|
|
|
|||
|
|
@ -84,6 +84,8 @@ pub async fn read_to_string_transcode(path: impl AsRef<Path>) -> std::io::Result
|
|||
/// junction at the same path.
|
||||
///
|
||||
/// Note that because junctions are used, the source must be a directory.
|
||||
///
|
||||
/// Changes to this function should be reflected in [`create_symlink`].
|
||||
#[cfg(windows)]
|
||||
pub fn replace_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
// If the source is a file, we can't create a junction
|
||||
|
|
@ -138,6 +140,38 @@ pub fn replace_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io:
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a symlink at `dst` pointing to `src`.
|
||||
///
|
||||
/// On Windows, this uses the `junction` crate to create a junction point.
|
||||
///
|
||||
/// Note that because junctions are used, the source must be a directory.
|
||||
///
|
||||
/// Changes to this function should be reflected in [`replace_symlink`].
|
||||
#[cfg(windows)]
|
||||
pub fn create_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
// If the source is a file, we can't create a junction
|
||||
if src.as_ref().is_file() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Cannot create a junction for {}: is not a directory",
|
||||
src.as_ref().display()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
junction::create(
|
||||
dunce::simplified(src.as_ref()),
|
||||
dunce::simplified(dst.as_ref()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a symlink at `dst` pointing to `src`.
|
||||
#[cfg(unix)]
|
||||
pub fn create_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
fs_err::os::unix::fs::symlink(src.as_ref(), dst.as_ref())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn remove_symlink(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
fs_err::remove_file(path.as_ref())
|
||||
|
|
|
|||
|
|
@ -398,6 +398,12 @@ impl From<Box<Path>> for PortablePathBuf {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Path> for PortablePathBuf {
|
||||
fn from(path: &'a Path) -> Self {
|
||||
Box::<Path>::from(path).into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
impl serde::Serialize for PortablePathBuf {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ use std::panic::AssertUnwindSafe;
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::process::Stdio;
|
||||
use std::time::Duration;
|
||||
use std::{io, panic};
|
||||
use std::{env, io, panic};
|
||||
|
||||
use async_channel::{Receiver, SendError};
|
||||
use tempfile::tempdir_in;
|
||||
|
|
@ -20,7 +20,7 @@ use uv_warnings::warn_user;
|
|||
|
||||
const COMPILEALL_SCRIPT: &str = include_str!("pip_compileall.py");
|
||||
/// This is longer than any compilation should ever take.
|
||||
const COMPILE_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
const DEFAULT_COMPILE_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum CompileError {
|
||||
|
|
@ -55,6 +55,8 @@ pub enum CompileError {
|
|||
},
|
||||
#[error("Python startup timed out ({}s)", _0.as_secs_f32())]
|
||||
StartupTimeout(Duration),
|
||||
#[error("Got invalid value from environment for {var}: {message}.")]
|
||||
EnvironmentError { var: &'static str, message: String },
|
||||
}
|
||||
|
||||
/// Bytecode compile all file in `dir` using a pool of Python interpreters running a Python script
|
||||
|
|
@ -88,6 +90,30 @@ pub async fn compile_tree(
|
|||
let tempdir = tempdir_in(cache).map_err(CompileError::TempFile)?;
|
||||
let pip_compileall_py = tempdir.path().join("pip_compileall.py");
|
||||
|
||||
let timeout: Option<Duration> = match env::var(EnvVars::UV_COMPILE_BYTECODE_TIMEOUT) {
|
||||
Ok(value) => match value.as_str() {
|
||||
"0" => None,
|
||||
_ => match value.parse::<u64>().map(Duration::from_secs) {
|
||||
Ok(duration) => Some(duration),
|
||||
Err(_) => {
|
||||
return Err(CompileError::EnvironmentError {
|
||||
var: "UV_COMPILE_BYTECODE_TIMEOUT",
|
||||
message: format!("Expected an integer number of seconds, got \"{value}\""),
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
Err(_) => Some(DEFAULT_COMPILE_TIMEOUT),
|
||||
};
|
||||
if let Some(duration) = timeout {
|
||||
debug!(
|
||||
"Using bytecode compilation timeout of {}s",
|
||||
duration.as_secs()
|
||||
);
|
||||
} else {
|
||||
debug!("Disabling bytecode compilation timeout");
|
||||
}
|
||||
|
||||
debug!("Starting {} bytecode compilation workers", worker_count);
|
||||
let mut worker_handles = Vec::new();
|
||||
for _ in 0..worker_count {
|
||||
|
|
@ -98,6 +124,7 @@ pub async fn compile_tree(
|
|||
python_executable.to_path_buf(),
|
||||
pip_compileall_py.clone(),
|
||||
receiver.clone(),
|
||||
timeout,
|
||||
);
|
||||
|
||||
// Spawn each worker on a dedicated thread.
|
||||
|
|
@ -189,6 +216,7 @@ async fn worker(
|
|||
interpreter: PathBuf,
|
||||
pip_compileall_py: PathBuf,
|
||||
receiver: Receiver<PathBuf>,
|
||||
timeout: Option<Duration>,
|
||||
) -> Result<(), CompileError> {
|
||||
fs_err::tokio::write(&pip_compileall_py, COMPILEALL_SCRIPT)
|
||||
.await
|
||||
|
|
@ -208,12 +236,17 @@ async fn worker(
|
|||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Handle a broken `python` by using a timeout, one that's higher than any compilation
|
||||
// should ever take.
|
||||
let (mut bytecode_compiler, child_stdin, mut child_stdout, mut child_stderr) =
|
||||
tokio::time::timeout(COMPILE_TIMEOUT, wait_until_ready)
|
||||
if let Some(duration) = timeout {
|
||||
tokio::time::timeout(duration, wait_until_ready)
|
||||
.await
|
||||
.map_err(|_| CompileError::StartupTimeout(COMPILE_TIMEOUT))??;
|
||||
.map_err(|_| CompileError::StartupTimeout(timeout.unwrap()))??
|
||||
} else {
|
||||
wait_until_ready.await?
|
||||
};
|
||||
|
||||
let stderr_reader = tokio::task::spawn(async move {
|
||||
let mut child_stderr_collected: Vec<u8> = Vec::new();
|
||||
|
|
@ -223,7 +256,7 @@ async fn worker(
|
|||
Ok(child_stderr_collected)
|
||||
});
|
||||
|
||||
let result = worker_main_loop(receiver, child_stdin, &mut child_stdout).await;
|
||||
let result = worker_main_loop(receiver, child_stdin, &mut child_stdout, timeout).await;
|
||||
// Reap the process to avoid zombies.
|
||||
let _ = bytecode_compiler.kill().await;
|
||||
|
||||
|
|
@ -340,6 +373,7 @@ async fn worker_main_loop(
|
|||
receiver: Receiver<PathBuf>,
|
||||
mut child_stdin: ChildStdin,
|
||||
child_stdout: &mut BufReader<ChildStdout>,
|
||||
timeout: Option<Duration>,
|
||||
) -> Result<(), CompileError> {
|
||||
let mut out_line = String::new();
|
||||
while let Ok(source_file) = receiver.recv().await {
|
||||
|
|
@ -372,12 +406,16 @@ async fn worker_main_loop(
|
|||
|
||||
// Handle a broken `python` by using a timeout, one that's higher than any compilation
|
||||
// should ever take.
|
||||
tokio::time::timeout(COMPILE_TIMEOUT, python_handle)
|
||||
if let Some(duration) = timeout {
|
||||
tokio::time::timeout(duration, python_handle)
|
||||
.await
|
||||
.map_err(|_| CompileError::CompileTimeout {
|
||||
elapsed: COMPILE_TIMEOUT,
|
||||
elapsed: duration,
|
||||
source_file: source_file.clone(),
|
||||
})??;
|
||||
} else {
|
||||
python_handle.await?;
|
||||
}
|
||||
|
||||
// This is a sanity check, if we don't get the path back something has gone wrong, e.g.
|
||||
// we're not actually running a python interpreter.
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use tracing::{debug, warn};
|
|||
|
||||
use uv_cache::{Cache, CacheBucket, WheelCache};
|
||||
use uv_cache_info::Timestamp;
|
||||
use uv_configuration::{BuildOptions, ConfigSettings, Reinstall};
|
||||
use uv_configuration::{BuildOptions, ConfigSettings, PackageConfigSettings, Reinstall};
|
||||
use uv_distribution::{
|
||||
BuiltWheelIndex, HttpArchivePointer, LocalArchivePointer, RegistryWheelIndex,
|
||||
};
|
||||
|
|
@ -52,6 +52,7 @@ impl<'a> Planner<'a> {
|
|||
hasher: &HashStrategy,
|
||||
index_locations: &IndexLocations,
|
||||
config_settings: &ConfigSettings,
|
||||
config_settings_package: &PackageConfigSettings,
|
||||
cache: &Cache,
|
||||
venv: &PythonEnvironment,
|
||||
tags: &Tags,
|
||||
|
|
@ -59,7 +60,13 @@ impl<'a> Planner<'a> {
|
|||
// Index all the already-downloaded wheels in the cache.
|
||||
let mut registry_index =
|
||||
RegistryWheelIndex::new(cache, tags, index_locations, hasher, config_settings);
|
||||
let built_index = BuiltWheelIndex::new(cache, tags, hasher, config_settings);
|
||||
let built_index = BuiltWheelIndex::new(
|
||||
cache,
|
||||
tags,
|
||||
hasher,
|
||||
config_settings,
|
||||
config_settings_package,
|
||||
);
|
||||
|
||||
let mut cached = vec![];
|
||||
let mut remote = vec![];
|
||||
|
|
|
|||
|
|
@ -241,7 +241,7 @@ impl RequirementSatisfaction {
|
|||
return Self::Mismatch;
|
||||
};
|
||||
|
||||
if *requested_editable != installed_editable.unwrap_or_default() {
|
||||
if requested_editable != installed_editable {
|
||||
trace!(
|
||||
"Editable mismatch: {:?} vs. {:?}",
|
||||
*requested_editable,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
|
|
@ -98,17 +98,6 @@ pub struct PipGroupName {
|
|||
pub name: GroupName,
|
||||
}
|
||||
|
||||
impl PipGroupName {
|
||||
/// Gets the path to use, applying the default if it's missing
|
||||
pub fn path(&self) -> &Path {
|
||||
if let Some(path) = &self.path {
|
||||
path
|
||||
} else {
|
||||
Path::new("pyproject.toml")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PipGroupName {
|
||||
type Err = InvalidPipGroupError;
|
||||
|
||||
|
|
|
|||
|
|
@ -69,12 +69,20 @@ impl Display for OptionEntry {
|
|||
///
|
||||
/// It extracts the options by calling the [`OptionsMetadata::record`] of a type implementing
|
||||
/// [`OptionsMetadata`].
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct OptionSet {
|
||||
record: fn(&mut dyn Visit),
|
||||
doc: fn() -> Option<&'static str>,
|
||||
}
|
||||
|
||||
impl PartialEq for OptionSet {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
std::ptr::fn_addr_eq(self.record, other.record) && std::ptr::fn_addr_eq(self.doc, other.doc)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for OptionSet {}
|
||||
|
||||
impl OptionSet {
|
||||
pub fn of<T>() -> Self
|
||||
where
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
//! let marker = r#"requests [security,tests] >= 2.8.1, == 2.8.* ; python_version > "3.8""#;
|
||||
//! let dependency_specification = Requirement::<VerbatimUrl>::from_str(marker).unwrap();
|
||||
//! assert_eq!(dependency_specification.name.as_ref(), "requests");
|
||||
//! assert_eq!(dependency_specification.extras, vec![ExtraName::from_str("security").unwrap(), ExtraName::from_str("tests").unwrap()]);
|
||||
//! assert_eq!(dependency_specification.extras, vec![ExtraName::from_str("security").unwrap(), ExtraName::from_str("tests").unwrap()].into());
|
||||
//! ```
|
||||
|
||||
#![warn(missing_docs)]
|
||||
|
|
@ -32,8 +32,8 @@ pub use marker::{
|
|||
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
ContainsMarkerTree, ExtraMarkerTree, ExtraOperator, InMarkerTree, MarkerEnvironment,
|
||||
MarkerEnvironmentBuilder, MarkerExpression, MarkerOperator, MarkerTree, MarkerTreeContents,
|
||||
MarkerTreeKind, MarkerValue, MarkerValueExtra, MarkerValueString, MarkerValueVersion,
|
||||
MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
MarkerTreeKind, MarkerValue, MarkerValueExtra, MarkerValueList, MarkerValueString,
|
||||
MarkerValueVersion, MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
};
|
||||
pub use origin::RequirementOrigin;
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
|
|
|
|||
|
|
@ -59,8 +59,10 @@ use uv_pep440::{Operator, Version, VersionSpecifier, release_specifier_to_range}
|
|||
|
||||
use crate::marker::MarkerValueExtra;
|
||||
use crate::marker::lowering::{
|
||||
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
CanonicalMarkerListPair, CanonicalMarkerValueExtra, CanonicalMarkerValueString,
|
||||
CanonicalMarkerValueVersion,
|
||||
};
|
||||
use crate::marker::tree::ContainerOperator;
|
||||
use crate::{
|
||||
ExtraOperator, MarkerExpression, MarkerOperator, MarkerValueString, MarkerValueVersion,
|
||||
};
|
||||
|
|
@ -186,19 +188,19 @@ impl InternerGuard<'_> {
|
|||
MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
} => match key {
|
||||
MarkerValueVersion::ImplementationVersion => (
|
||||
Variable::Version(CanonicalMarkerValueVersion::ImplementationVersion),
|
||||
Edges::from_versions(&versions, negated),
|
||||
Edges::from_versions(&versions, operator),
|
||||
),
|
||||
MarkerValueVersion::PythonFullVersion => (
|
||||
Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion),
|
||||
Edges::from_versions(&versions, negated),
|
||||
Edges::from_versions(&versions, operator),
|
||||
),
|
||||
// Normalize `python_version` markers to `python_full_version` nodes.
|
||||
MarkerValueVersion::PythonVersion => {
|
||||
match Edges::from_python_versions(versions, negated) {
|
||||
match Edges::from_python_versions(versions, operator) {
|
||||
Ok(edges) => (
|
||||
Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion),
|
||||
edges,
|
||||
|
|
@ -313,6 +315,10 @@ impl InternerGuard<'_> {
|
|||
};
|
||||
(Variable::String(key), Edges::from_string(operator, value))
|
||||
}
|
||||
MarkerExpression::List { pair, operator } => (
|
||||
Variable::List(pair),
|
||||
Edges::from_bool(operator == ContainerOperator::In),
|
||||
),
|
||||
// A variable representing the existence or absence of a particular extra.
|
||||
MarkerExpression::Extra {
|
||||
name: MarkerValueExtra::Extra(extra),
|
||||
|
|
@ -328,7 +334,7 @@ impl InternerGuard<'_> {
|
|||
Variable::Extra(CanonicalMarkerValueExtra::Extra(extra)),
|
||||
Edges::from_bool(false),
|
||||
),
|
||||
// Invalid extras are always `false`.
|
||||
// Invalid `extra` names are always `false`.
|
||||
MarkerExpression::Extra {
|
||||
name: MarkerValueExtra::Arbitrary(_),
|
||||
..
|
||||
|
|
@ -1046,6 +1052,12 @@ pub(crate) enum Variable {
|
|||
/// We keep extras at the leaves of the tree, so when simplifying extras we can
|
||||
/// trivially remove the leaves without having to reconstruct the entire tree.
|
||||
Extra(CanonicalMarkerValueExtra),
|
||||
/// A variable representing whether a `<value> in <key>` or `<value> not in <key>`
|
||||
/// expression, where the key is a list.
|
||||
///
|
||||
/// We keep extras and groups at the leaves of the tree, so when simplifying extras we can
|
||||
/// trivially remove the leaves without having to reconstruct the entire tree.
|
||||
List(CanonicalMarkerListPair),
|
||||
}
|
||||
|
||||
impl Variable {
|
||||
|
|
@ -1223,7 +1235,10 @@ impl Edges {
|
|||
/// Returns an [`Edges`] where values in the given range are `true`.
|
||||
///
|
||||
/// Only for use when the `key` is a `PythonVersion`. Normalizes to `PythonFullVersion`.
|
||||
fn from_python_versions(versions: Vec<Version>, negated: bool) -> Result<Edges, NodeId> {
|
||||
fn from_python_versions(
|
||||
versions: Vec<Version>,
|
||||
operator: ContainerOperator,
|
||||
) -> Result<Edges, NodeId> {
|
||||
let mut range: Ranges<Version> = versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
|
|
@ -1234,7 +1249,7 @@ impl Edges {
|
|||
.flatten_ok()
|
||||
.collect::<Result<Ranges<_>, NodeId>>()?;
|
||||
|
||||
if negated {
|
||||
if operator == ContainerOperator::NotIn {
|
||||
range = range.complement();
|
||||
}
|
||||
|
||||
|
|
@ -1244,7 +1259,7 @@ impl Edges {
|
|||
}
|
||||
|
||||
/// Returns an [`Edges`] where values in the given range are `true`.
|
||||
fn from_versions(versions: &[Version], negated: bool) -> Edges {
|
||||
fn from_versions(versions: &[Version], operator: ContainerOperator) -> Edges {
|
||||
let mut range: Ranges<Version> = versions
|
||||
.iter()
|
||||
.map(|version| {
|
||||
|
|
@ -1255,7 +1270,7 @@ impl Edges {
|
|||
})
|
||||
.collect();
|
||||
|
||||
if negated {
|
||||
if operator == ContainerOperator::NotIn {
|
||||
range = range.complement();
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_normalize::{ExtraName, GroupName};
|
||||
|
||||
use crate::marker::tree::MarkerValueList;
|
||||
use crate::{MarkerValueExtra, MarkerValueString, MarkerValueVersion};
|
||||
|
||||
/// Those environment markers with a PEP 440 version as value such as `python_version`
|
||||
|
|
@ -128,7 +129,7 @@ impl Display for CanonicalMarkerValueString {
|
|||
}
|
||||
}
|
||||
|
||||
/// The [`ExtraName`] value used in `extra` markers.
|
||||
/// The [`ExtraName`] value used in `extra` and `extras` markers.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum CanonicalMarkerValueExtra {
|
||||
/// A valid [`ExtraName`].
|
||||
|
|
@ -159,3 +160,36 @@ impl Display for CanonicalMarkerValueExtra {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A key-value pair for `<value> in <key>` or `<value> not in <key>`, where the key is a list.
|
||||
///
|
||||
/// Used for PEP 751 markers.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum CanonicalMarkerListPair {
|
||||
/// A valid [`ExtraName`].
|
||||
Extras(ExtraName),
|
||||
/// A valid [`GroupName`].
|
||||
DependencyGroup(GroupName),
|
||||
/// For leniency, preserve invalid values.
|
||||
Arbitrary { key: MarkerValueList, value: String },
|
||||
}
|
||||
|
||||
impl CanonicalMarkerListPair {
|
||||
/// The key (RHS) of the marker expression.
|
||||
pub(crate) fn key(&self) -> MarkerValueList {
|
||||
match self {
|
||||
Self::Extras(_) => MarkerValueList::Extras,
|
||||
Self::DependencyGroup(_) => MarkerValueList::DependencyGroups,
|
||||
Self::Arbitrary { key, .. } => *key,
|
||||
}
|
||||
}
|
||||
|
||||
/// The value (LHS) of the marker expression.
|
||||
pub(crate) fn value(&self) -> String {
|
||||
match self {
|
||||
Self::Extras(extra) => extra.to_string(),
|
||||
Self::DependencyGroup(group) => group.to_string(),
|
||||
Self::Arbitrary { value, .. } => value.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,8 +23,8 @@ pub use lowering::{
|
|||
pub use tree::{
|
||||
ContainsMarkerTree, ExtraMarkerTree, ExtraOperator, InMarkerTree, MarkerExpression,
|
||||
MarkerOperator, MarkerTree, MarkerTreeContents, MarkerTreeDebugGraph, MarkerTreeKind,
|
||||
MarkerValue, MarkerValueExtra, MarkerValueString, MarkerValueVersion, MarkerWarningKind,
|
||||
StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
MarkerValue, MarkerValueExtra, MarkerValueList, MarkerValueString, MarkerValueVersion,
|
||||
MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
};
|
||||
|
||||
/// `serde` helpers for [`MarkerTree`].
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
use arcstr::ArcStr;
|
||||
use std::str::FromStr;
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_normalize::{ExtraName, GroupName};
|
||||
use uv_pep440::{Version, VersionPattern, VersionSpecifier};
|
||||
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::MarkerValueExtra;
|
||||
use crate::marker::lowering::CanonicalMarkerListPair;
|
||||
use crate::marker::tree::{ContainerOperator, MarkerValueList};
|
||||
use crate::{
|
||||
ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerValue, MarkerValueString,
|
||||
MarkerValueVersion, MarkerWarningKind, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter,
|
||||
|
|
@ -168,6 +170,7 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
reporter: &mut impl Reporter,
|
||||
) -> Result<Option<MarkerExpression>, Pep508Error<T>> {
|
||||
cursor.eat_whitespace();
|
||||
let start = cursor.pos();
|
||||
let l_value = parse_marker_value(cursor, reporter)?;
|
||||
cursor.eat_whitespace();
|
||||
// "not in" and "in" must be preceded by whitespace. We must already have matched a whitespace
|
||||
|
|
@ -176,6 +179,7 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
let operator = parse_marker_operator(cursor)?;
|
||||
cursor.eat_whitespace();
|
||||
let r_value = parse_marker_value(cursor, reporter)?;
|
||||
let len = cursor.pos() - start;
|
||||
|
||||
// Convert a `<marker_value> <marker_op> <marker_value>` expression into its
|
||||
// typed equivalent.
|
||||
|
|
@ -209,7 +213,8 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
let value = match r_value {
|
||||
MarkerValue::Extra
|
||||
| MarkerValue::MarkerEnvVersion(_)
|
||||
| MarkerValue::MarkerEnvString(_) => {
|
||||
| MarkerValue::MarkerEnvString(_)
|
||||
| MarkerValue::MarkerEnvList(_) => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::MarkerMarkerComparison,
|
||||
"Comparing two markers with each other doesn't make any sense,
|
||||
|
|
@ -237,11 +242,23 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
value,
|
||||
})
|
||||
}
|
||||
// `extras in "test"` or `dependency_groups not in "dev"` are invalid.
|
||||
MarkerValue::MarkerEnvList(key) => {
|
||||
return Err(Pep508Error {
|
||||
message: Pep508ErrorSource::String(format!(
|
||||
"The marker {key} must be on the right hand side of the expression"
|
||||
)),
|
||||
start,
|
||||
len,
|
||||
input: cursor.to_string(),
|
||||
});
|
||||
}
|
||||
// `extra == '...'`
|
||||
MarkerValue::Extra => {
|
||||
let value = match r_value {
|
||||
MarkerValue::MarkerEnvVersion(_)
|
||||
| MarkerValue::MarkerEnvString(_)
|
||||
| MarkerValue::MarkerEnvList(_)
|
||||
| MarkerValue::Extra => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtraInvalidComparison,
|
||||
|
|
@ -257,7 +274,7 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
|
||||
parse_extra_expr(operator, &value, reporter)
|
||||
}
|
||||
// This is either MarkerEnvVersion, MarkerEnvString or Extra inverted
|
||||
// This is either MarkerEnvVersion, MarkerEnvString, Extra (inverted), or Extras
|
||||
MarkerValue::QuotedString(l_string) => {
|
||||
match r_value {
|
||||
// The only sound choice for this is `<quoted PEP 440 version> <version op>` <version key>
|
||||
|
|
@ -271,6 +288,54 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
operator: operator.invert(),
|
||||
value: l_string,
|
||||
}),
|
||||
// `"test" in extras` or `"dev" in dependency_groups`
|
||||
MarkerValue::MarkerEnvList(key) => {
|
||||
let operator =
|
||||
ContainerOperator::from_marker_operator(operator).ok_or_else(|| {
|
||||
Pep508Error {
|
||||
message: Pep508ErrorSource::String(format!(
|
||||
"The operator {operator} is not supported with the marker {key}, only the `in` and `not in` operators are supported"
|
||||
)),
|
||||
start,
|
||||
len,
|
||||
input: cursor.to_string(),
|
||||
}
|
||||
})?;
|
||||
let pair = match key {
|
||||
// `'...' in extras`
|
||||
MarkerValueList::Extras => match ExtraName::from_str(&l_string) {
|
||||
Ok(name) => CanonicalMarkerListPair::Extras(name),
|
||||
Err(err) => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtrasInvalidComparison,
|
||||
format!("Expected extra name (found `{l_string}`): {err}"),
|
||||
);
|
||||
CanonicalMarkerListPair::Arbitrary {
|
||||
key,
|
||||
value: l_string.to_string(),
|
||||
}
|
||||
}
|
||||
},
|
||||
// `'...' in dependency_groups`
|
||||
MarkerValueList::DependencyGroups => {
|
||||
match GroupName::from_str(&l_string) {
|
||||
Ok(name) => CanonicalMarkerListPair::DependencyGroup(name),
|
||||
Err(err) => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtrasInvalidComparison,
|
||||
format!("Expected dependency group name (found `{l_string}`): {err}"),
|
||||
);
|
||||
CanonicalMarkerListPair::Arbitrary {
|
||||
key,
|
||||
value: l_string.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Some(MarkerExpression::List { pair, operator })
|
||||
}
|
||||
// `'...' == extra`
|
||||
MarkerValue::Extra => parse_extra_expr(operator, &l_string, reporter),
|
||||
// `'...' == '...'`, doesn't make much sense
|
||||
|
|
@ -319,10 +384,7 @@ fn parse_version_in_expr(
|
|||
value: &str,
|
||||
reporter: &mut impl Reporter,
|
||||
) -> Option<MarkerExpression> {
|
||||
if !matches!(operator, MarkerOperator::In | MarkerOperator::NotIn) {
|
||||
return None;
|
||||
}
|
||||
let negated = matches!(operator, MarkerOperator::NotIn);
|
||||
let operator = ContainerOperator::from_marker_operator(operator)?;
|
||||
|
||||
let mut cursor = Cursor::new(value);
|
||||
let mut versions = Vec::new();
|
||||
|
|
@ -358,7 +420,7 @@ fn parse_version_in_expr(
|
|||
Some(MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -491,8 +553,7 @@ fn parse_extra_expr(
|
|||
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtraInvalidComparison,
|
||||
"Comparing extra with something other than a quoted string is wrong,
|
||||
will be ignored"
|
||||
"Comparing `extra` with any operator other than `==` or `!=` is wrong and will be ignored"
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ use version_ranges::Ranges;
|
|||
|
||||
use uv_pep440::{Version, VersionSpecifier};
|
||||
|
||||
use crate::marker::tree::ContainerOperator;
|
||||
use crate::{ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerTreeKind};
|
||||
|
||||
/// Returns a simplified DNF expression for a given marker tree.
|
||||
|
|
@ -161,6 +162,22 @@ fn collect_dnf(
|
|||
path.pop();
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
for (is_high, tree) in marker.children() {
|
||||
let expr = MarkerExpression::List {
|
||||
pair: marker.pair().clone(),
|
||||
operator: if is_high {
|
||||
ContainerOperator::In
|
||||
} else {
|
||||
ContainerOperator::NotIn
|
||||
},
|
||||
};
|
||||
|
||||
path.push(expr);
|
||||
collect_dnf(tree, dnf, path);
|
||||
path.pop();
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::Extra(marker) => {
|
||||
for (value, tree) in marker.children() {
|
||||
let operator = if value {
|
||||
|
|
@ -396,18 +413,18 @@ fn is_negation(left: &MarkerExpression, right: &MarkerExpression) -> bool {
|
|||
MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
} => {
|
||||
let MarkerExpression::VersionIn {
|
||||
key: key2,
|
||||
versions: versions2,
|
||||
negated: negated2,
|
||||
operator: operator2,
|
||||
} = right
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
key == key2 && versions == versions2 && negated != negated2
|
||||
key == key2 && versions == versions2 && operator != operator2
|
||||
}
|
||||
MarkerExpression::String {
|
||||
key,
|
||||
|
|
@ -440,5 +457,16 @@ fn is_negation(left: &MarkerExpression, right: &MarkerExpression) -> bool {
|
|||
|
||||
name == name2 && operator.negate() == *operator2
|
||||
}
|
||||
MarkerExpression::List { pair, operator } => {
|
||||
let MarkerExpression::List {
|
||||
pair: pair2,
|
||||
operator: operator2,
|
||||
} = right
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
pair == pair2 && operator != operator2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,18 +9,19 @@ use itertools::Itertools;
|
|||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use version_ranges::Ranges;
|
||||
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_normalize::{ExtraName, GroupName};
|
||||
use uv_pep440::{Version, VersionParseError, VersionSpecifier};
|
||||
|
||||
use super::algebra::{Edges, INTERNER, NodeId, Variable};
|
||||
use super::simplify;
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::lowering::{
|
||||
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
CanonicalMarkerListPair, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
};
|
||||
use crate::marker::parse;
|
||||
use crate::{
|
||||
MarkerEnvironment, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter, TracingReporter,
|
||||
CanonicalMarkerValueExtra, MarkerEnvironment, Pep508Error, Pep508ErrorSource, Pep508Url,
|
||||
Reporter, TracingReporter,
|
||||
};
|
||||
|
||||
/// Ways in which marker evaluation can fail
|
||||
|
|
@ -32,6 +33,12 @@ pub enum MarkerWarningKind {
|
|||
/// Doing an operation other than `==` and `!=` on a quoted string with `extra`, such as
|
||||
/// `extra > "perf"` or `extra == os_name`
|
||||
ExtraInvalidComparison,
|
||||
/// Doing an operation other than `in` and `not in` on a quoted string with `extra`, such as
|
||||
/// `extras > "perf"` or `extras == os_name`
|
||||
ExtrasInvalidComparison,
|
||||
/// Doing an operation other than `in` and `not in` on a quoted string with `dependency_groups`,
|
||||
/// such as `dependency_groups > "perf"` or `dependency_groups == os_name`
|
||||
DependencyGroupsInvalidComparison,
|
||||
/// Comparing a string valued marker and a string lexicographically, such as `"3.9" > "3.10"`
|
||||
LexicographicComparison,
|
||||
/// Comparing two markers, such as `os_name != sys_implementation`
|
||||
|
|
@ -119,6 +126,26 @@ impl Display for MarkerValueString {
|
|||
}
|
||||
}
|
||||
|
||||
/// Those markers with exclusively `in` and `not in` operators.
|
||||
///
|
||||
/// Contains PEP 751 lockfile markers.
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum MarkerValueList {
|
||||
/// `extras`. This one is special because it's a list, and user-provided
|
||||
Extras,
|
||||
/// `dependency_groups`. This one is special because it's a list, and user-provided
|
||||
DependencyGroups,
|
||||
}
|
||||
|
||||
impl Display for MarkerValueList {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Extras => f.write_str("extras"),
|
||||
Self::DependencyGroups => f.write_str("dependency_groups"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// One of the predefined environment values
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/dependency-specifiers/#environment-markers>
|
||||
|
|
@ -128,7 +155,9 @@ pub enum MarkerValue {
|
|||
MarkerEnvVersion(MarkerValueVersion),
|
||||
/// Those environment markers with an arbitrary string as value such as `sys_platform`
|
||||
MarkerEnvString(MarkerValueString),
|
||||
/// `extra`. This one is special because it's a list and not env but user given
|
||||
/// Those markers with exclusively `in` and `not in` operators
|
||||
MarkerEnvList(MarkerValueList),
|
||||
/// `extra`. This one is special because it's a list, and user-provided
|
||||
Extra,
|
||||
/// Not a constant, but a user given quoted string with a value inside such as '3.8' or "windows"
|
||||
QuotedString(ArcStr),
|
||||
|
|
@ -169,6 +198,8 @@ impl FromStr for MarkerValue {
|
|||
"python_version" => Self::MarkerEnvVersion(MarkerValueVersion::PythonVersion),
|
||||
"sys_platform" => Self::MarkerEnvString(MarkerValueString::SysPlatform),
|
||||
"sys.platform" => Self::MarkerEnvString(MarkerValueString::SysPlatformDeprecated),
|
||||
"extras" => Self::MarkerEnvList(MarkerValueList::Extras),
|
||||
"dependency_groups" => Self::MarkerEnvList(MarkerValueList::DependencyGroups),
|
||||
"extra" => Self::Extra,
|
||||
_ => return Err(format!("Invalid key: {s}")),
|
||||
};
|
||||
|
|
@ -181,6 +212,7 @@ impl Display for MarkerValue {
|
|||
match self {
|
||||
Self::MarkerEnvVersion(marker_value_version) => marker_value_version.fmt(f),
|
||||
Self::MarkerEnvString(marker_value_string) => marker_value_string.fmt(f),
|
||||
Self::MarkerEnvList(marker_value_contains) => marker_value_contains.fmt(f),
|
||||
Self::Extra => f.write_str("extra"),
|
||||
Self::QuotedString(value) => write!(f, "'{value}'"),
|
||||
}
|
||||
|
|
@ -433,7 +465,7 @@ impl Deref for StringVersion {
|
|||
}
|
||||
}
|
||||
|
||||
/// The [`ExtraName`] value used in `extra` markers.
|
||||
/// The [`ExtraName`] value used in `extra` and `extras` markers.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum MarkerValueExtra {
|
||||
/// A valid [`ExtraName`].
|
||||
|
|
@ -492,7 +524,7 @@ pub enum MarkerExpression {
|
|||
VersionIn {
|
||||
key: MarkerValueVersion,
|
||||
versions: Vec<Version>,
|
||||
negated: bool,
|
||||
operator: ContainerOperator,
|
||||
},
|
||||
/// An string marker comparison, e.g. `sys_platform == '...'`.
|
||||
///
|
||||
|
|
@ -502,10 +534,15 @@ pub enum MarkerExpression {
|
|||
operator: MarkerOperator,
|
||||
value: ArcStr,
|
||||
},
|
||||
/// `'...' in <key>`, a PEP 751 expression.
|
||||
List {
|
||||
pair: CanonicalMarkerListPair,
|
||||
operator: ContainerOperator,
|
||||
},
|
||||
/// `extra <extra op> '...'` or `'...' <extra op> extra`.
|
||||
Extra {
|
||||
operator: ExtraOperator,
|
||||
name: MarkerValueExtra,
|
||||
operator: ExtraOperator,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -514,10 +551,12 @@ pub enum MarkerExpression {
|
|||
pub(crate) enum MarkerExpressionKind {
|
||||
/// A version expression, e.g. `<version key> <version op> <quoted PEP 440 version>`.
|
||||
Version(MarkerValueVersion),
|
||||
/// A version "in" expression, e.g. `<version key> in <quoted list of PEP 440 versions>`.
|
||||
/// A version `in` expression, e.g. `<version key> in <quoted list of PEP 440 versions>`.
|
||||
VersionIn(MarkerValueVersion),
|
||||
/// A string marker comparison, e.g. `sys_platform == '...'`.
|
||||
String(MarkerValueString),
|
||||
/// A list `in` or `not in` expression, e.g. `'...' in dependency_groups`.
|
||||
List(MarkerValueList),
|
||||
/// An extra expression, e.g. `extra == '...'`.
|
||||
Extra,
|
||||
}
|
||||
|
|
@ -561,6 +600,37 @@ impl Display for ExtraOperator {
|
|||
}
|
||||
}
|
||||
|
||||
/// The operator for a container expression, either 'in' or 'not in'.
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum ContainerOperator {
|
||||
/// `in`
|
||||
In,
|
||||
/// `not in`
|
||||
NotIn,
|
||||
}
|
||||
|
||||
impl ContainerOperator {
|
||||
/// Creates a [`ContainerOperator`] from an equivalent [`MarkerOperator`].
|
||||
///
|
||||
/// Returns `None` if the operator is not supported for containers.
|
||||
pub(crate) fn from_marker_operator(operator: MarkerOperator) -> Option<ContainerOperator> {
|
||||
match operator {
|
||||
MarkerOperator::In => Some(ContainerOperator::In),
|
||||
MarkerOperator::NotIn => Some(ContainerOperator::NotIn),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ContainerOperator {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(match self {
|
||||
Self::In => "in",
|
||||
Self::NotIn => "not in",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MarkerExpression {
|
||||
/// Parse a [`MarkerExpression`] from a string with the given reporter.
|
||||
pub fn parse_reporter(
|
||||
|
|
@ -599,6 +669,7 @@ impl MarkerExpression {
|
|||
MarkerExpression::Version { key, .. } => MarkerExpressionKind::Version(*key),
|
||||
MarkerExpression::VersionIn { key, .. } => MarkerExpressionKind::VersionIn(*key),
|
||||
MarkerExpression::String { key, .. } => MarkerExpressionKind::String(*key),
|
||||
MarkerExpression::List { pair, .. } => MarkerExpressionKind::List(pair.key()),
|
||||
MarkerExpression::Extra { .. } => MarkerExpressionKind::Extra,
|
||||
}
|
||||
}
|
||||
|
|
@ -618,11 +689,10 @@ impl Display for MarkerExpression {
|
|||
MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
} => {
|
||||
let op = if *negated { "not in" } else { "in" };
|
||||
let versions = versions.iter().map(ToString::to_string).join(" ");
|
||||
write!(f, "{key} {op} '{versions}'")
|
||||
write!(f, "{key} {operator} '{versions}'")
|
||||
}
|
||||
MarkerExpression::String {
|
||||
key,
|
||||
|
|
@ -638,6 +708,9 @@ impl Display for MarkerExpression {
|
|||
|
||||
write!(f, "{key} {operator} '{value}'")
|
||||
}
|
||||
MarkerExpression::List { pair, operator } => {
|
||||
write!(f, "'{}' {} {}", pair.value(), operator, pair.key())
|
||||
}
|
||||
MarkerExpression::Extra { operator, name } => {
|
||||
write!(f, "extra {operator} '{name}'")
|
||||
}
|
||||
|
|
@ -645,6 +718,51 @@ impl Display for MarkerExpression {
|
|||
}
|
||||
}
|
||||
|
||||
/// The extra and dependency group names to use when evaluating a marker tree.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
enum ExtrasEnvironment<'a> {
|
||||
/// E.g., `extra == '...'`
|
||||
Extras(&'a [ExtraName]),
|
||||
/// E.g., `'...' in extras` or `'...' in dependency_groups`
|
||||
Pep751(&'a [ExtraName], &'a [GroupName]),
|
||||
}
|
||||
|
||||
impl<'a> ExtrasEnvironment<'a> {
|
||||
/// Creates a new [`ExtrasEnvironment`] for the given `extra` names.
|
||||
fn from_extras(extras: &'a [ExtraName]) -> Self {
|
||||
Self::Extras(extras)
|
||||
}
|
||||
|
||||
/// Creates a new [`ExtrasEnvironment`] for the given PEP 751 `extras` and `dependency_groups`.
|
||||
fn from_pep751(extras: &'a [ExtraName], dependency_groups: &'a [GroupName]) -> Self {
|
||||
Self::Pep751(extras, dependency_groups)
|
||||
}
|
||||
|
||||
/// Returns the `extra` names in this environment.
|
||||
fn extra(&self) -> &[ExtraName] {
|
||||
match self {
|
||||
Self::Extras(extra) => extra,
|
||||
Self::Pep751(..) => &[],
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `extras` names in this environment, as in a PEP 751 lockfile.
|
||||
fn extras(&self) -> &[ExtraName] {
|
||||
match self {
|
||||
Self::Extras(..) => &[],
|
||||
Self::Pep751(extras, ..) => extras,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `dependency_group` group names in this environment, as in a PEP 751 lockfile.
|
||||
fn dependency_groups(&self) -> &[GroupName] {
|
||||
match self {
|
||||
Self::Extras(..) => &[],
|
||||
Self::Pep751(.., groups) => groups,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents one or more nested marker expressions with and/or/parentheses.
|
||||
///
|
||||
/// Marker trees are canonical, meaning any two functionally equivalent markers
|
||||
|
|
@ -852,6 +970,16 @@ impl MarkerTree {
|
|||
low: low.negate(self.0),
|
||||
})
|
||||
}
|
||||
Variable::List(key) => {
|
||||
let Edges::Boolean { low, high } = node.children else {
|
||||
unreachable!()
|
||||
};
|
||||
MarkerTreeKind::List(ListMarkerTree {
|
||||
pair: key,
|
||||
high: high.negate(self.0),
|
||||
low: low.negate(self.0),
|
||||
})
|
||||
}
|
||||
Variable::Extra(name) => {
|
||||
let Edges::Boolean { low, high } = node.children else {
|
||||
unreachable!()
|
||||
|
|
@ -872,7 +1000,27 @@ impl MarkerTree {
|
|||
|
||||
/// Does this marker apply in the given environment?
|
||||
pub fn evaluate(self, env: &MarkerEnvironment, extras: &[ExtraName]) -> bool {
|
||||
self.evaluate_reporter_impl(env, extras, &mut TracingReporter)
|
||||
self.evaluate_reporter_impl(
|
||||
env,
|
||||
ExtrasEnvironment::from_extras(extras),
|
||||
&mut TracingReporter,
|
||||
)
|
||||
}
|
||||
|
||||
/// Evaluate a marker in the context of a PEP 751 lockfile, which exposes several additional
|
||||
/// markers (`extras` and `dependency_groups`) that are not available in any other context,
|
||||
/// per the spec.
|
||||
pub fn evaluate_pep751(
|
||||
self,
|
||||
env: &MarkerEnvironment,
|
||||
extras: &[ExtraName],
|
||||
groups: &[GroupName],
|
||||
) -> bool {
|
||||
self.evaluate_reporter_impl(
|
||||
env,
|
||||
ExtrasEnvironment::from_pep751(extras, groups),
|
||||
&mut TracingReporter,
|
||||
)
|
||||
}
|
||||
|
||||
/// Evaluates this marker tree against an optional environment and a
|
||||
|
|
@ -889,7 +1037,11 @@ impl MarkerTree {
|
|||
) -> bool {
|
||||
match env {
|
||||
None => self.evaluate_extras(extras),
|
||||
Some(env) => self.evaluate_reporter_impl(env, extras, &mut TracingReporter),
|
||||
Some(env) => self.evaluate_reporter_impl(
|
||||
env,
|
||||
ExtrasEnvironment::from_extras(extras),
|
||||
&mut TracingReporter,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -901,13 +1053,13 @@ impl MarkerTree {
|
|||
extras: &[ExtraName],
|
||||
reporter: &mut impl Reporter,
|
||||
) -> bool {
|
||||
self.evaluate_reporter_impl(env, extras, reporter)
|
||||
self.evaluate_reporter_impl(env, ExtrasEnvironment::from_extras(extras), reporter)
|
||||
}
|
||||
|
||||
fn evaluate_reporter_impl(
|
||||
self,
|
||||
env: &MarkerEnvironment,
|
||||
extras: &[ExtraName],
|
||||
extras: ExtrasEnvironment,
|
||||
reporter: &mut impl Reporter,
|
||||
) -> bool {
|
||||
match self.kind() {
|
||||
|
|
@ -959,7 +1111,21 @@ impl MarkerTree {
|
|||
}
|
||||
MarkerTreeKind::Extra(marker) => {
|
||||
return marker
|
||||
.edge(extras.contains(marker.name().extra()))
|
||||
.edge(extras.extra().contains(marker.name().extra()))
|
||||
.evaluate_reporter_impl(env, extras, reporter);
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
let edge = match marker.pair() {
|
||||
CanonicalMarkerListPair::Extras(extra) => extras.extras().contains(extra),
|
||||
CanonicalMarkerListPair::DependencyGroup(dependency_group) => {
|
||||
extras.dependency_groups().contains(dependency_group)
|
||||
}
|
||||
// Invalid marker expression
|
||||
CanonicalMarkerListPair::Arbitrary { .. } => return false,
|
||||
};
|
||||
|
||||
return marker
|
||||
.edge(edge)
|
||||
.evaluate_reporter_impl(env, extras, reporter);
|
||||
}
|
||||
}
|
||||
|
|
@ -986,6 +1152,9 @@ impl MarkerTree {
|
|||
MarkerTreeKind::Contains(marker) => marker
|
||||
.children()
|
||||
.any(|(_, tree)| tree.evaluate_extras(extras)),
|
||||
MarkerTreeKind::List(marker) => marker
|
||||
.children()
|
||||
.any(|(_, tree)| tree.evaluate_extras(extras)),
|
||||
MarkerTreeKind::Extra(marker) => marker
|
||||
.edge(extras.contains(marker.name().extra()))
|
||||
.evaluate_extras(extras),
|
||||
|
|
@ -1216,6 +1385,11 @@ impl MarkerTree {
|
|||
imp(tree, f);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(kind) => {
|
||||
for (_, tree) in kind.children() {
|
||||
imp(tree, f);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::Extra(kind) => {
|
||||
if kind.low.is_false() {
|
||||
f(MarkerOperator::Equal, kind.name().extra());
|
||||
|
|
@ -1333,6 +1507,21 @@ impl MarkerTree {
|
|||
write!(f, "{} not in {} -> ", kind.value(), kind.key())?;
|
||||
kind.edge(false).fmt_graph(f, level + 1)?;
|
||||
}
|
||||
MarkerTreeKind::List(kind) => {
|
||||
writeln!(f)?;
|
||||
for _ in 0..level {
|
||||
write!(f, " ")?;
|
||||
}
|
||||
write!(f, "{} in {} -> ", kind.value(), kind.key())?;
|
||||
kind.edge(true).fmt_graph(f, level + 1)?;
|
||||
|
||||
writeln!(f)?;
|
||||
for _ in 0..level {
|
||||
write!(f, " ")?;
|
||||
}
|
||||
write!(f, "{} not in {} -> ", kind.value(), kind.key())?;
|
||||
kind.edge(false).fmt_graph(f, level + 1)?;
|
||||
}
|
||||
MarkerTreeKind::Extra(kind) => {
|
||||
writeln!(f)?;
|
||||
for _ in 0..level {
|
||||
|
|
@ -1417,7 +1606,9 @@ pub enum MarkerTreeKind<'a> {
|
|||
In(InMarkerTree<'a>),
|
||||
/// A string expression with the `contains` operator.
|
||||
Contains(ContainsMarkerTree<'a>),
|
||||
/// A string expression.
|
||||
/// A `in` or `not in` expression.
|
||||
List(ListMarkerTree<'a>),
|
||||
/// An extra expression (e.g., `extra == 'dev'`).
|
||||
Extra(ExtraMarkerTree<'a>),
|
||||
}
|
||||
|
||||
|
|
@ -1593,6 +1784,59 @@ impl Ord for ContainsMarkerTree<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct ListMarkerTree<'a> {
|
||||
// No separate canonical type, the type is already canonical.
|
||||
pair: &'a CanonicalMarkerListPair,
|
||||
high: NodeId,
|
||||
low: NodeId,
|
||||
}
|
||||
|
||||
impl ListMarkerTree<'_> {
|
||||
/// The key-value pair for this expression
|
||||
pub fn pair(&self) -> &CanonicalMarkerListPair {
|
||||
self.pair
|
||||
}
|
||||
|
||||
/// The key (RHS) for this expression.
|
||||
pub fn key(&self) -> MarkerValueList {
|
||||
self.pair.key()
|
||||
}
|
||||
|
||||
/// The value (LHS) for this expression.
|
||||
pub fn value(&self) -> String {
|
||||
self.pair.value()
|
||||
}
|
||||
|
||||
/// The edges of this node, corresponding to the boolean evaluation of the expression.
|
||||
pub fn children(&self) -> impl Iterator<Item = (bool, MarkerTree)> {
|
||||
[(true, MarkerTree(self.high)), (false, MarkerTree(self.low))].into_iter()
|
||||
}
|
||||
|
||||
/// Returns the subtree associated with the given edge value.
|
||||
pub fn edge(&self, value: bool) -> MarkerTree {
|
||||
if value {
|
||||
MarkerTree(self.high)
|
||||
} else {
|
||||
MarkerTree(self.low)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for ListMarkerTree<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for ListMarkerTree<'_> {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.pair()
|
||||
.cmp(other.pair())
|
||||
.then_with(|| self.children().cmp(other.children()))
|
||||
}
|
||||
}
|
||||
|
||||
/// A node representing the existence or absence of a given extra, such as `extra == 'bar'`.
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct ExtraMarkerTree<'a> {
|
||||
|
|
@ -1745,7 +1989,7 @@ mod test {
|
|||
implementation_name: "",
|
||||
implementation_version: "3.7",
|
||||
os_name: "linux",
|
||||
platform_machine: "",
|
||||
platform_machine: "x86_64",
|
||||
platform_python_implementation: "",
|
||||
platform_release: "",
|
||||
platform_system: "",
|
||||
|
|
|
|||
|
|
@ -58,6 +58,49 @@ impl VerbatimUrl {
|
|||
})
|
||||
}
|
||||
|
||||
/// Convert a [`VerbatimUrl`] from a path or a URL.
|
||||
///
|
||||
/// If no root directory is provided, relative paths are resolved against the current working
|
||||
/// directory.
|
||||
#[cfg(feature = "non-pep508-extensions")] // PEP 508 arguably only allows absolute file URLs.
|
||||
pub fn from_url_or_path(
|
||||
input: &str,
|
||||
root_dir: Option<&Path>,
|
||||
) -> Result<Self, VerbatimUrlError> {
|
||||
let url = match split_scheme(input) {
|
||||
Some((scheme, ..)) => {
|
||||
match Scheme::parse(scheme) {
|
||||
Some(_) => {
|
||||
// Ex) `https://pypi.org/simple`
|
||||
Self::parse_url(input)?
|
||||
}
|
||||
None => {
|
||||
// Ex) `C:\Users\user\index`
|
||||
if let Some(root_dir) = root_dir {
|
||||
Self::from_path(input, root_dir)?
|
||||
} else {
|
||||
let absolute_path = std::path::absolute(input).map_err(|err| {
|
||||
VerbatimUrlError::Absolute(input.to_string(), err)
|
||||
})?;
|
||||
Self::from_absolute_path(absolute_path)?
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// Ex) `/Users/user/index`
|
||||
if let Some(root_dir) = root_dir {
|
||||
Self::from_path(input, root_dir)?
|
||||
} else {
|
||||
let absolute_path = std::path::absolute(input)
|
||||
.map_err(|err| VerbatimUrlError::Absolute(input.to_string(), err))?;
|
||||
Self::from_absolute_path(absolute_path)?
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(url.with_given(input))
|
||||
}
|
||||
|
||||
/// Parse a URL from an absolute or relative path.
|
||||
#[cfg(feature = "non-pep508-extensions")] // PEP 508 arguably only allows absolute file URLs.
|
||||
pub fn from_path(
|
||||
|
|
@ -362,6 +405,10 @@ pub enum VerbatimUrlError {
|
|||
#[error("path could not be normalized: {0}")]
|
||||
Normalization(PathBuf, #[source] std::io::Error),
|
||||
|
||||
/// Received a path that could not be converted to an absolute path.
|
||||
#[error("path could not be converted to an absolute path: {0}")]
|
||||
Absolute(String, #[source] std::io::Error),
|
||||
|
||||
/// Received a path that could not be normalized.
|
||||
#[cfg(not(feature = "non-pep508-extensions"))]
|
||||
#[error("Not a URL (missing scheme): {0}")]
|
||||
|
|
|
|||
|
|
@ -771,7 +771,7 @@ mod tests {
|
|||
/// A reference list can be generated with:
|
||||
/// ```text
|
||||
/// $ python -c "from packaging import tags; [print(tag) for tag in tags.platform_tags()]"`
|
||||
/// ````
|
||||
/// ```
|
||||
#[test]
|
||||
fn test_platform_tags_manylinux() {
|
||||
let tags = compatible_tags(&Platform::new(
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@ pub struct PyProjectToml {
|
|||
|
||||
impl PyProjectToml {
|
||||
pub fn from_toml(toml: &str) -> Result<Self, MetadataError> {
|
||||
let pyproject_toml: toml_edit::ImDocument<_> = toml_edit::ImDocument::from_str(toml)
|
||||
let pyproject_toml = toml_edit::Document::from_str(toml)
|
||||
.map_err(MetadataError::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml: Self = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(MetadataError::InvalidPyprojectTomlSchema)?;
|
||||
Ok(pyproject_toml)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -86,8 +86,8 @@ impl UnnamedRequirementUrl for VerbatimParsedUrl {
|
|||
ParsedUrl::Directory(ParsedDirectoryUrl {
|
||||
url,
|
||||
install_path,
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: None,
|
||||
r#virtual: None,
|
||||
})
|
||||
} else {
|
||||
ParsedUrl::Path(ParsedPathUrl {
|
||||
|
|
@ -118,8 +118,8 @@ impl UnnamedRequirementUrl for VerbatimParsedUrl {
|
|||
ParsedUrl::Directory(ParsedDirectoryUrl {
|
||||
url,
|
||||
install_path,
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: None,
|
||||
r#virtual: None,
|
||||
})
|
||||
} else {
|
||||
ParsedUrl::Path(ParsedPathUrl {
|
||||
|
|
@ -187,7 +187,10 @@ impl ParsedUrl {
|
|||
pub fn is_editable(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Directory(ParsedDirectoryUrl { editable: true, .. })
|
||||
Self::Directory(ParsedDirectoryUrl {
|
||||
editable: Some(true),
|
||||
..
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -226,16 +229,18 @@ pub struct ParsedDirectoryUrl {
|
|||
pub url: DisplaySafeUrl,
|
||||
/// The absolute path to the distribution which we use for installing.
|
||||
pub install_path: Box<Path>,
|
||||
pub editable: bool,
|
||||
pub r#virtual: bool,
|
||||
/// Whether the project at the given URL should be installed in editable mode.
|
||||
pub editable: Option<bool>,
|
||||
/// Whether the project at the given URL should be treated as a virtual package.
|
||||
pub r#virtual: Option<bool>,
|
||||
}
|
||||
|
||||
impl ParsedDirectoryUrl {
|
||||
/// Construct a [`ParsedDirectoryUrl`] from a path requirement source.
|
||||
pub fn from_source(
|
||||
install_path: Box<Path>,
|
||||
editable: bool,
|
||||
r#virtual: bool,
|
||||
editable: Option<bool>,
|
||||
r#virtual: Option<bool>,
|
||||
url: DisplaySafeUrl,
|
||||
) -> Self {
|
||||
Self {
|
||||
|
|
@ -399,8 +404,8 @@ impl TryFrom<DisplaySafeUrl> for ParsedUrl {
|
|||
Ok(Self::Directory(ParsedDirectoryUrl {
|
||||
url,
|
||||
install_path: path.into_boxed_path(),
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: None,
|
||||
r#virtual: None,
|
||||
}))
|
||||
} else {
|
||||
Ok(Self::Path(ParsedPathUrl {
|
||||
|
|
@ -445,7 +450,7 @@ impl From<&ParsedDirectoryUrl> for DirectUrl {
|
|||
Self::LocalDirectory {
|
||||
url: value.url.to_string(),
|
||||
dir_info: DirInfo {
|
||||
editable: value.editable.then_some(true),
|
||||
editable: value.editable,
|
||||
},
|
||||
subdirectory: None,
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -53,8 +53,7 @@ import re
|
|||
from dataclasses import asdict, dataclass, field
|
||||
from enum import StrEnum
|
||||
from pathlib import Path
|
||||
from typing import Generator, Iterable, NamedTuple, Self
|
||||
from urllib.parse import unquote
|
||||
from typing import Any, Generator, Iterable, NamedTuple, Self
|
||||
|
||||
import httpx
|
||||
|
||||
|
|
@ -255,8 +254,7 @@ class CPythonFinder(Finder):
|
|||
# Sort the assets to ensure deterministic results
|
||||
row["assets"].sort(key=lambda asset: asset["browser_download_url"])
|
||||
for asset in row["assets"]:
|
||||
url = asset["browser_download_url"]
|
||||
download = self._parse_download_url(url)
|
||||
download = self._parse_download_asset(asset)
|
||||
if download is None:
|
||||
continue
|
||||
if (
|
||||
|
|
@ -305,6 +303,9 @@ class CPythonFinder(Finder):
|
|||
"""Fetch the checksums for the given downloads."""
|
||||
checksum_urls = set()
|
||||
for download in downloads:
|
||||
# Skip the newer releases where we got the hash from the GitHub API
|
||||
if download.sha256:
|
||||
continue
|
||||
release_base_url = download.url.rsplit("/", maxsplit=1)[0]
|
||||
checksum_url = release_base_url + "/SHA256SUMS"
|
||||
checksum_urls.add(checksum_url)
|
||||
|
|
@ -343,16 +344,23 @@ class CPythonFinder(Finder):
|
|||
checksums[filename] = checksum
|
||||
|
||||
for download in downloads:
|
||||
if download.sha256:
|
||||
continue
|
||||
download.sha256 = checksums.get(download.filename)
|
||||
|
||||
def _parse_download_url(self, url: str) -> PythonDownload | None:
|
||||
"""Parse an indygreg download URL into a PythonDownload object."""
|
||||
def _parse_download_asset(self, asset: dict[str, Any]) -> PythonDownload | None:
|
||||
"""Parse a python-build-standalone download asset into a PythonDownload object."""
|
||||
url = asset["browser_download_url"]
|
||||
# Ex)
|
||||
# https://github.com/astral-sh/python-build-standalone/releases/download/20240107/cpython-3.12.1%2B20240107-aarch64-unknown-linux-gnu-lto-full.tar.zst
|
||||
if url.endswith(".sha256"):
|
||||
return None
|
||||
filename = unquote(url.rsplit("/", maxsplit=1)[-1])
|
||||
release = int(url.rsplit("/")[-2])
|
||||
filename = asset["name"]
|
||||
sha256 = None
|
||||
# On older versions, GitHub didn't backfill the digest.
|
||||
if digest := asset["digest"]:
|
||||
sha256 = digest.removeprefix("sha256:")
|
||||
|
||||
match = self._filename_re.match(filename) or self._legacy_filename_re.match(
|
||||
filename
|
||||
|
|
@ -391,6 +399,7 @@ class CPythonFinder(Finder):
|
|||
url=url,
|
||||
build_options=build_options,
|
||||
variant=variant,
|
||||
sha256=sha256,
|
||||
)
|
||||
|
||||
def _normalize_triple(self, triple: str) -> PlatformTriple | None:
|
||||
|
|
@ -598,6 +607,9 @@ class GraalPyFinder(Finder):
|
|||
platform = self._normalize_os(m.group(1))
|
||||
arch = self._normalize_arch(m.group(2))
|
||||
libc = "gnu" if platform == "linux" else "none"
|
||||
sha256 = None
|
||||
if digest := asset["digest"]:
|
||||
sha256 = digest.removeprefix("sha256:")
|
||||
download = PythonDownload(
|
||||
release=0,
|
||||
version=python_version,
|
||||
|
|
@ -610,6 +622,7 @@ class GraalPyFinder(Finder):
|
|||
implementation=self.implementation,
|
||||
filename=asset["name"],
|
||||
url=url,
|
||||
sha256=sha256,
|
||||
)
|
||||
# Only keep the latest GraalPy version of each arch/platform
|
||||
if (python_version, arch, platform) not in results:
|
||||
|
|
@ -624,6 +637,7 @@ class GraalPyFinder(Finder):
|
|||
return self.PLATFORM_MAPPING.get(os, os)
|
||||
|
||||
async def _fetch_checksums(self, downloads: list[PythonDownload], n: int) -> None:
|
||||
downloads = list(filter(lambda d: not d.sha256, downloads))
|
||||
for idx, batch in enumerate(batched(downloads, n)):
|
||||
logging.info("Fetching GraalPy checksums: %d/%d", idx * n, len(downloads))
|
||||
checksum_requests = []
|
||||
|
|
|
|||
|
|
@ -446,7 +446,16 @@ fn python_executables_from_installed<'a>(
|
|||
.flatten();
|
||||
|
||||
match preference {
|
||||
PythonPreference::OnlyManaged => Box::new(from_managed_installations),
|
||||
PythonPreference::OnlyManaged => {
|
||||
// TODO(zanieb): Ideally, we'd create "fake" managed installation directories for tests,
|
||||
// but for now... we'll just include the test interpreters which are always on the
|
||||
// search path.
|
||||
if std::env::var(uv_static::EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED).is_ok() {
|
||||
Box::new(from_managed_installations.chain(from_search_path))
|
||||
} else {
|
||||
Box::new(from_managed_installations)
|
||||
}
|
||||
}
|
||||
PythonPreference::Managed => Box::new(
|
||||
from_managed_installations
|
||||
.chain(from_search_path)
|
||||
|
|
@ -730,6 +739,9 @@ fn python_interpreters<'a>(
|
|||
false
|
||||
}
|
||||
})
|
||||
.filter_ok(move |(source, interpreter)| {
|
||||
satisfies_python_preference(*source, interpreter, preference)
|
||||
})
|
||||
}
|
||||
|
||||
/// Lazily convert Python executables into interpreters.
|
||||
|
|
@ -857,6 +869,93 @@ fn source_satisfies_environment_preference(
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns true if a Python interpreter matches the [`PythonPreference`].
|
||||
pub fn satisfies_python_preference(
|
||||
source: PythonSource,
|
||||
interpreter: &Interpreter,
|
||||
preference: PythonPreference,
|
||||
) -> bool {
|
||||
// If the source is "explicit", we will not apply the Python preference, e.g., if the user has
|
||||
// activated a virtual environment, we should always allow it. We may want to invalidate the
|
||||
// environment in some cases, like in projects, but we can't distinguish between explicit
|
||||
// requests for a different Python preference or a persistent preference in a configuration file
|
||||
// which would result in overly aggressive invalidation.
|
||||
let is_explicit = match source {
|
||||
PythonSource::ProvidedPath
|
||||
| PythonSource::ParentInterpreter
|
||||
| PythonSource::ActiveEnvironment
|
||||
| PythonSource::CondaPrefix => true,
|
||||
PythonSource::Managed
|
||||
| PythonSource::DiscoveredEnvironment
|
||||
| PythonSource::SearchPath
|
||||
| PythonSource::SearchPathFirst
|
||||
| PythonSource::Registry
|
||||
| PythonSource::MicrosoftStore
|
||||
| PythonSource::BaseCondaPrefix => false,
|
||||
};
|
||||
|
||||
match preference {
|
||||
PythonPreference::OnlyManaged => {
|
||||
// Perform a fast check using the source before querying the interpreter
|
||||
if matches!(source, PythonSource::Managed) || interpreter.is_managed() {
|
||||
true
|
||||
} else {
|
||||
if is_explicit {
|
||||
debug!(
|
||||
"Allowing unmanaged Python interpreter at `{}` (in conflict with the `python-preference`) since it is from source: {source}",
|
||||
interpreter.sys_executable().display()
|
||||
);
|
||||
true
|
||||
} else {
|
||||
debug!(
|
||||
"Ignoring Python interpreter at `{}`: only managed interpreters allowed",
|
||||
interpreter.sys_executable().display()
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
// If not "only" a kind, any interpreter is okay
|
||||
PythonPreference::Managed | PythonPreference::System => true,
|
||||
PythonPreference::OnlySystem => {
|
||||
let is_system = match source {
|
||||
// A managed interpreter is never a system interpreter
|
||||
PythonSource::Managed => false,
|
||||
// We can't be sure if this is a system interpreter without checking
|
||||
PythonSource::ProvidedPath
|
||||
| PythonSource::ParentInterpreter
|
||||
| PythonSource::ActiveEnvironment
|
||||
| PythonSource::CondaPrefix
|
||||
| PythonSource::DiscoveredEnvironment
|
||||
| PythonSource::SearchPath
|
||||
| PythonSource::SearchPathFirst
|
||||
| PythonSource::Registry
|
||||
| PythonSource::BaseCondaPrefix => !interpreter.is_managed(),
|
||||
// Managed interpreters should never be found in the store
|
||||
PythonSource::MicrosoftStore => true,
|
||||
};
|
||||
|
||||
if is_system {
|
||||
true
|
||||
} else {
|
||||
if is_explicit {
|
||||
debug!(
|
||||
"Allowing managed Python interpreter at `{}` (in conflict with the `python-preference`) since it is from source: {source}",
|
||||
interpreter.sys_executable().display()
|
||||
);
|
||||
true
|
||||
} else {
|
||||
debug!(
|
||||
"Ignoring Python interpreter at `{}`: only system interpreters allowed",
|
||||
interpreter.sys_executable().display()
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if an encountered error is critical and should stop discovery.
|
||||
///
|
||||
/// Returns false when an error could be due to a faulty Python installation and we should continue searching for a working one.
|
||||
|
|
@ -884,6 +983,14 @@ impl Error {
|
|||
);
|
||||
false
|
||||
}
|
||||
#[cfg(windows)]
|
||||
InterpreterError::CorruptWindowsPackage { path, err } => {
|
||||
debug!(
|
||||
"Skipping bad interpreter at {} from {source}: {err}",
|
||||
path.display()
|
||||
);
|
||||
false
|
||||
}
|
||||
InterpreterError::NotFound(path)
|
||||
| InterpreterError::BrokenSymlink(BrokenSymlink { path, .. }) => {
|
||||
// If the interpreter is from an active, valid virtual environment, we should
|
||||
|
|
@ -2804,6 +2911,18 @@ impl PythonPreference {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the canonical name.
|
||||
// TODO(zanieb): This should be a `Display` impl and we should have a different view for
|
||||
// the sources
|
||||
pub fn canonical_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::OnlyManaged => "only managed",
|
||||
Self::Managed => "prefer managed",
|
||||
Self::System => "prefer system",
|
||||
Self::OnlySystem => "only system",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PythonPreference {
|
||||
|
|
|
|||
|
|
@ -158,8 +158,7 @@ impl PythonEnvironment {
|
|||
let installation = match find_python_installation(
|
||||
request,
|
||||
preference,
|
||||
// Ignore managed installations when looking for environments
|
||||
PythonPreference::OnlySystem,
|
||||
PythonPreference::default(),
|
||||
cache,
|
||||
preview,
|
||||
)? {
|
||||
|
|
@ -174,7 +173,7 @@ impl PythonEnvironment {
|
|||
/// N.B. This function also works for system Python environments and users depend on this.
|
||||
pub fn from_root(root: impl AsRef<Path>, cache: &Cache) -> Result<Self, Error> {
|
||||
debug!(
|
||||
"Checking for Python environment at `{}`",
|
||||
"Checking for Python environment at: `{}`",
|
||||
root.as_ref().user_display()
|
||||
);
|
||||
match root.as_ref().try_exists() {
|
||||
|
|
|
|||
|
|
@ -34,6 +34,9 @@ use crate::{
|
|||
VirtualEnvironment,
|
||||
};
|
||||
|
||||
#[cfg(windows)]
|
||||
use windows_sys::Win32::Foundation::{APPMODEL_ERROR_NO_PACKAGE, ERROR_CANT_ACCESS_FILE};
|
||||
|
||||
/// A Python executable and its associated platform markers.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Interpreter {
|
||||
|
|
@ -268,15 +271,28 @@ impl Interpreter {
|
|||
///
|
||||
/// Returns `false` if we cannot determine the path of the uv managed Python interpreters.
|
||||
pub fn is_managed(&self) -> bool {
|
||||
if let Ok(test_managed) =
|
||||
std::env::var(uv_static::EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED)
|
||||
{
|
||||
// During testing, we collect interpreters into an artificial search path and need to
|
||||
// be able to mock whether an interpreter is managed or not.
|
||||
return test_managed.split_ascii_whitespace().any(|item| {
|
||||
let version = <PythonVersion as std::str::FromStr>::from_str(item).expect(
|
||||
"`UV_INTERNAL__TEST_PYTHON_MANAGED` items should be valid Python versions",
|
||||
);
|
||||
if version.patch().is_some() {
|
||||
version.version() == self.python_version()
|
||||
} else {
|
||||
(version.major(), version.minor()) == self.python_tuple()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let Ok(installations) = ManagedPythonInstallations::from_settings(None) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
installations
|
||||
.find_all()
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.any(|install| install.path() == self.sys_base_prefix)
|
||||
self.sys_base_prefix.starts_with(installations.root())
|
||||
}
|
||||
|
||||
/// Returns `Some` if the environment is externally managed, optionally including an error
|
||||
|
|
@ -760,6 +776,13 @@ pub enum Error {
|
|||
#[source]
|
||||
err: io::Error,
|
||||
},
|
||||
#[cfg(windows)]
|
||||
#[error("Failed to query Python interpreter at `{path}`")]
|
||||
CorruptWindowsPackage {
|
||||
path: PathBuf,
|
||||
#[source]
|
||||
err: io::Error,
|
||||
},
|
||||
#[error("{0}")]
|
||||
UnexpectedResponse(UnexpectedResponseError),
|
||||
#[error("{0}")]
|
||||
|
|
@ -872,10 +895,23 @@ impl InterpreterInfo {
|
|||
.arg("-c")
|
||||
.arg(script)
|
||||
.output()
|
||||
.map_err(|err| Error::SpawnFailed {
|
||||
.map_err(
|
||||
|err| match err.raw_os_error().and_then(|code| u32::try_from(code).ok()) {
|
||||
// These error codes are returned if the Python interpreter is a corrupt MSIX
|
||||
// package, which we want to differentiate from a typical spawn failure.
|
||||
#[cfg(windows)]
|
||||
Some(APPMODEL_ERROR_NO_PACKAGE | ERROR_CANT_ACCESS_FILE) => {
|
||||
Error::CorruptWindowsPackage {
|
||||
path: interpreter.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
}
|
||||
}
|
||||
_ => Error::SpawnFailed {
|
||||
path: interpreter.to_path_buf(),
|
||||
err,
|
||||
},
|
||||
},
|
||||
)?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use uv_static::EnvVars;
|
|||
pub use crate::discovery::{
|
||||
EnvironmentPreference, Error as DiscoveryError, PythonDownloads, PythonNotFound,
|
||||
PythonPreference, PythonRequest, PythonSource, PythonVariant, VersionRequest,
|
||||
find_python_installations,
|
||||
find_python_installations, satisfies_python_preference,
|
||||
};
|
||||
pub use crate::downloads::PlatformRequest;
|
||||
pub use crate::environment::{InvalidEnvironmentKind, PythonEnvironment};
|
||||
|
|
|
|||
|
|
@ -847,7 +847,7 @@ fn executable_path_from_base(
|
|||
/// Create a link to a managed Python executable.
|
||||
///
|
||||
/// If the file already exists at the link path, an error will be returned.
|
||||
pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(), Error> {
|
||||
pub fn create_link_to_executable(link: &Path, executable: &Path) -> Result<(), Error> {
|
||||
let link_parent = link.parent().ok_or(Error::NoExecutableDirectory)?;
|
||||
fs_err::create_dir_all(link_parent).map_err(|err| Error::ExecutableDirectory {
|
||||
to: link_parent.to_path_buf(),
|
||||
|
|
@ -856,20 +856,20 @@ pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(),
|
|||
|
||||
if cfg!(unix) {
|
||||
// Note this will never copy on Unix — we use it here to allow compilation on Windows
|
||||
match symlink_or_copy_file(&executable, link) {
|
||||
match symlink_or_copy_file(executable, link) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
||||
Err(Error::MissingExecutable(executable.clone()))
|
||||
Err(Error::MissingExecutable(executable.to_path_buf()))
|
||||
}
|
||||
Err(err) => Err(Error::LinkExecutable {
|
||||
from: executable,
|
||||
from: executable.to_path_buf(),
|
||||
to: link.to_path_buf(),
|
||||
err,
|
||||
}),
|
||||
}
|
||||
} else if cfg!(windows) {
|
||||
// TODO(zanieb): Install GUI launchers as well
|
||||
let launcher = windows_python_launcher(&executable, false)?;
|
||||
let launcher = windows_python_launcher(executable, false)?;
|
||||
|
||||
// OK to use `std::fs` here, `fs_err` does not support `File::create_new` and we attach
|
||||
// error context anyway
|
||||
|
|
@ -878,7 +878,7 @@ pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(),
|
|||
std::fs::File::create_new(link)
|
||||
.and_then(|mut file| file.write_all(launcher.as_ref()))
|
||||
.map_err(|err| Error::LinkExecutable {
|
||||
from: executable,
|
||||
from: executable.to_path_buf(),
|
||||
to: link.to_path_buf(),
|
||||
err,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ use std::ops::Deref;
|
|||
use std::{fmt, str::FromStr};
|
||||
use thiserror::Error;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("Unknown operating system: {0}")]
|
||||
|
|
@ -15,6 +17,8 @@ pub enum Error {
|
|||
UnknownLibc(String),
|
||||
#[error("Unsupported variant `{0}` for architecture `{1}`")]
|
||||
UnsupportedVariant(String, String),
|
||||
#[error(transparent)]
|
||||
LibcDetectionError(#[from] LibcDetectionError),
|
||||
}
|
||||
|
||||
/// Architecture variants, e.g., with support for different instruction sets
|
||||
|
|
@ -95,22 +99,32 @@ pub enum Libc {
|
|||
}
|
||||
|
||||
impl Libc {
|
||||
pub(crate) fn from_env() -> Result<Self, LibcDetectionError> {
|
||||
pub(crate) fn from_env() -> Result<Self, Error> {
|
||||
match std::env::consts::OS {
|
||||
"linux" => Ok(Self::Some(match detect_linux_libc()? {
|
||||
"linux" => {
|
||||
if let Ok(libc) = std::env::var(EnvVars::UV_LIBC) {
|
||||
if !libc.is_empty() {
|
||||
return Self::from_str(&libc);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self::Some(match detect_linux_libc()? {
|
||||
LibcVersion::Manylinux { .. } => match std::env::consts::ARCH {
|
||||
// Checks if the CPU supports hardware floating-point operations.
|
||||
// Depending on the result, it selects either the `gnueabihf` (hard-float) or `gnueabi` (soft-float) environment.
|
||||
// download-metadata.json only includes armv7.
|
||||
"arm" | "armv5te" | "armv7" => match detect_hardware_floating_point_support() {
|
||||
"arm" | "armv5te" | "armv7" => {
|
||||
match detect_hardware_floating_point_support() {
|
||||
Ok(true) => target_lexicon::Environment::Gnueabihf,
|
||||
Ok(false) => target_lexicon::Environment::Gnueabi,
|
||||
Err(_) => target_lexicon::Environment::Gnu,
|
||||
},
|
||||
}
|
||||
}
|
||||
_ => target_lexicon::Environment::Gnu,
|
||||
},
|
||||
LibcVersion::Musllinux { .. } => target_lexicon::Environment::Musl,
|
||||
})),
|
||||
}))
|
||||
}
|
||||
"windows" | "macos" => Ok(Self::None),
|
||||
// Use `None` on platforms without explicit support.
|
||||
_ => Ok(Self::None),
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
//! DO NOT EDIT
|
||||
//!
|
||||
//! Generated with `cargo run dev generate-sysconfig-metadata`
|
||||
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250708/cpython-unix/targets.yml>
|
||||
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250712/cpython-unix/targets.yml>
|
||||
//!
|
||||
#![allow(clippy::all)]
|
||||
#![cfg_attr(any(), rustfmt::skip)]
|
||||
|
|
|
|||
|
|
@ -217,6 +217,19 @@ impl PythonVersionFile {
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a new representation of a global Python version file.
|
||||
///
|
||||
/// Returns [`None`] if the user configuration directory cannot be determined.
|
||||
pub fn global() -> Option<Self> {
|
||||
let path = user_uv_config_dir()?.join(PYTHON_VERSION_FILENAME);
|
||||
Some(Self::new(path))
|
||||
}
|
||||
|
||||
/// Returns `true` if the version file is a global version file.
|
||||
pub fn is_global(&self) -> bool {
|
||||
PythonVersionFile::global().is_some_and(|global| self.path() == global.path())
|
||||
}
|
||||
|
||||
/// Return the first request declared in the file, if any.
|
||||
pub fn version(&self) -> Option<&PythonRequest> {
|
||||
self.versions.first()
|
||||
|
|
@ -260,6 +273,9 @@ impl PythonVersionFile {
|
|||
/// Update the version file on the file system.
|
||||
pub async fn write(&self) -> Result<(), std::io::Error> {
|
||||
debug!("Writing Python versions to `{}`", self.path.display());
|
||||
if let Some(parent) = self.path.parent() {
|
||||
fs_err::tokio::create_dir_all(parent).await?;
|
||||
}
|
||||
fs::tokio::write(
|
||||
&self.path,
|
||||
self.versions
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
use crate::managed::ManagedPythonInstallation;
|
||||
use crate::platform::Arch;
|
||||
use crate::{COMPANY_DISPLAY_NAME, COMPANY_KEY, PythonInstallationKey, PythonVersion};
|
||||
use anyhow::anyhow;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
|
@ -129,12 +130,13 @@ fn read_registry_entry(company: &str, tag: &str, tag_key: &Key) -> Option<Window
|
|||
pub enum ManagedPep514Error {
|
||||
#[error("Windows has an unknown pointer width for arch: `{_0}`")]
|
||||
InvalidPointerSize(Arch),
|
||||
#[error("Failed to write registry entry: {0}")]
|
||||
WriteError(#[from] windows_result::Error),
|
||||
}
|
||||
|
||||
/// Register a managed Python installation in the Windows registry following PEP 514.
|
||||
pub fn create_registry_entry(
|
||||
installation: &ManagedPythonInstallation,
|
||||
errors: &mut Vec<(PythonInstallationKey, anyhow::Error)>,
|
||||
) -> Result<(), ManagedPep514Error> {
|
||||
let pointer_width = match installation.key().arch().family().pointer_width() {
|
||||
Ok(PointerWidth::U32) => 32,
|
||||
|
|
@ -146,9 +148,7 @@ pub fn create_registry_entry(
|
|||
}
|
||||
};
|
||||
|
||||
if let Err(err) = write_registry_entry(installation, pointer_width) {
|
||||
errors.push((installation.key().clone(), err.into()));
|
||||
}
|
||||
write_registry_entry(installation, pointer_width)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -239,8 +239,7 @@ pub fn remove_registry_entry<'a>(
|
|||
} else {
|
||||
errors.push((
|
||||
installation.key().clone(),
|
||||
anyhow::Error::new(err)
|
||||
.context("Failed to clear registry entries under HKCU:\\{python_entry}"),
|
||||
anyhow!("Failed to clear registry entries under HKCU:\\{python_entry}: {err}"),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
@ -269,6 +268,9 @@ pub fn remove_orphan_registry_entries(installations: &[ManagedPythonInstallation
|
|||
// Separate assignment since `keys()` creates a borrow.
|
||||
let subkeys = match key.keys() {
|
||||
Ok(subkeys) => subkeys,
|
||||
Err(err) if err.code() == ERROR_NOT_FOUND => {
|
||||
return;
|
||||
}
|
||||
Err(err) => {
|
||||
// TODO(konsti): We don't have an installation key here.
|
||||
warn_user_once!("Failed to list subkeys of HKCU:\\{astral_key}: {err}");
|
||||
|
|
@ -282,6 +284,9 @@ pub fn remove_orphan_registry_entries(installations: &[ManagedPythonInstallation
|
|||
let python_entry = format!("{astral_key}\\{subkey}");
|
||||
debug!("Removing orphan registry key HKCU:\\{}", python_entry);
|
||||
if let Err(err) = CURRENT_USER.remove_tree(&python_entry) {
|
||||
if err.code() == ERROR_NOT_FOUND {
|
||||
continue;
|
||||
}
|
||||
// TODO(konsti): We don't have an installation key here.
|
||||
warn_user_once!("Failed to remove orphan registry key HKCU:\\{python_entry}: {err}");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2064,8 +2064,10 @@ mod test {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "/foo/bar",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ impl RequirementsTxtRequirement {
|
|||
version_or_url: Some(uv_pep508::VersionOrUrl::Url(VerbatimParsedUrl {
|
||||
verbatim: url.verbatim,
|
||||
parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl {
|
||||
editable: true,
|
||||
editable: Some(true),
|
||||
..parsed_url
|
||||
}),
|
||||
})),
|
||||
|
|
@ -115,7 +115,7 @@ impl RequirementsTxtRequirement {
|
|||
url: VerbatimParsedUrl {
|
||||
verbatim: requirement.url.verbatim,
|
||||
parsed_url: ParsedUrl::Directory(ParsedDirectoryUrl {
|
||||
editable: true,
|
||||
editable: Some(true),
|
||||
..parsed_url
|
||||
}),
|
||||
},
|
||||
|
|
|
|||
|
|
@ -146,8 +146,8 @@ fn unquote_open_escape(acc: &mut String, cursor: &mut std::iter::Enumerate<std::
|
|||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// assert_eq!(r_shquote::unquote("foobar").unwrap(), "foobar");
|
||||
/// ```no_build
|
||||
/// assert_eq!(unquote("foobar").unwrap(), "foobar");
|
||||
/// ```
|
||||
pub(crate) fn unquote(source: &str) -> Result<Option<String>, UnquoteError> {
|
||||
// If the string does not contain any single-quotes, double-quotes, or escape sequences, it
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -72,8 +72,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -126,8 +126,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "/scripts/packages/black_editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -176,8 +176,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -226,8 +226,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -276,8 +276,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
|
|||
|
|
@ -24,8 +24,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -81,8 +83,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -138,8 +142,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -195,8 +201,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -252,8 +260,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -302,8 +312,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable[d",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -352,8 +364,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -402,8 +416,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -72,8 +72,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -126,8 +126,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black_editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -176,8 +176,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -226,8 +226,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -276,8 +276,8 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/scripts/packages/black editable",
|
||||
editable: false,
|
||||
virtual: false,
|
||||
editable: None,
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
|
|||
|
|
@ -24,8 +24,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -81,8 +83,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -138,8 +142,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -195,8 +201,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -252,8 +260,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -302,8 +312,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable[d",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -352,8 +364,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
@ -402,8 +416,10 @@ RequirementsTxt {
|
|||
fragment: None,
|
||||
},
|
||||
install_path: "<REQUIREMENTS_DIR>/editable",
|
||||
editable: true,
|
||||
virtual: false,
|
||||
editable: Some(
|
||||
true,
|
||||
),
|
||||
virtual: None,
|
||||
},
|
||||
),
|
||||
verbatim: VerbatimUrl {
|
||||
|
|
|
|||
|
|
@ -31,6 +31,9 @@ pub enum Error {
|
|||
#[error(transparent)]
|
||||
WheelFilename(#[from] uv_distribution_filename::WheelFilenameError),
|
||||
|
||||
#[error("Failed to construct HTTP client")]
|
||||
ClientError(#[source] anyhow::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
|
|||
let source = SourceUrl::Directory(DirectorySourceUrl {
|
||||
url: &url,
|
||||
install_path: Cow::Borrowed(source_tree),
|
||||
editable: false,
|
||||
editable: None,
|
||||
});
|
||||
|
||||
// Determine the hash policy. Since we don't have a package name, we perform a
|
||||
|
|
|
|||
|
|
@ -273,13 +273,13 @@ impl RequirementsSource {
|
|||
pub fn allows_extras(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::PyprojectToml(_) | Self::SetupPy(_) | Self::SetupCfg(_)
|
||||
Self::PylockToml(_) | Self::PyprojectToml(_) | Self::SetupPy(_) | Self::SetupCfg(_)
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns `true` if the source allows groups to be specified.
|
||||
pub fn allows_groups(&self) -> bool {
|
||||
matches!(self, Self::PyprojectToml(_))
|
||||
matches!(self, Self::PylockToml(_) | Self::PyprojectToml(_))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ use uv_distribution_types::{
|
|||
UnresolvedRequirementSpecification,
|
||||
};
|
||||
use uv_fs::{CWD, Simplified};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_normalize::{ExtraName, PackageName, PipGroupName};
|
||||
use uv_requirements_txt::{RequirementsTxt, RequirementsTxtRequirement};
|
||||
use uv_warnings::warn_user;
|
||||
use uv_workspace::pyproject::PyProjectToml;
|
||||
|
|
@ -215,7 +215,7 @@ impl RequirementsSpecification {
|
|||
requirements: &[RequirementsSource],
|
||||
constraints: &[RequirementsSource],
|
||||
overrides: &[RequirementsSource],
|
||||
groups: BTreeMap<PathBuf, Vec<GroupName>>,
|
||||
groups: Option<&GroupsSpecification>,
|
||||
client_builder: &BaseClientBuilder<'_>,
|
||||
) -> Result<Self> {
|
||||
let mut spec = Self::default();
|
||||
|
|
@ -250,10 +250,13 @@ impl RequirementsSpecification {
|
|||
|
||||
// If we have a `pylock.toml`, don't allow additional requirements, constraints, or
|
||||
// overrides.
|
||||
if requirements
|
||||
.iter()
|
||||
.any(|source| matches!(source, RequirementsSource::PylockToml(..)))
|
||||
{
|
||||
if let Some(pylock_toml) = requirements.iter().find_map(|source| {
|
||||
if let RequirementsSource::PylockToml(path) = source {
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
if requirements
|
||||
.iter()
|
||||
.any(|source| !matches!(source, RequirementsSource::PylockToml(..)))
|
||||
|
|
@ -272,24 +275,55 @@ impl RequirementsSpecification {
|
|||
"Cannot specify constraints with a `pylock.toml` file"
|
||||
));
|
||||
}
|
||||
if !groups.is_empty() {
|
||||
|
||||
// If we have a `pylock.toml`, disallow specifying paths for groups; instead, require
|
||||
// that all groups refer to the `pylock.toml` file.
|
||||
if let Some(groups) = groups {
|
||||
let mut names = Vec::new();
|
||||
for group in &groups.groups {
|
||||
if group.path.is_some() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Cannot specify groups with a `pylock.toml` file"
|
||||
"Cannot specify paths for groups with a `pylock.toml` file; all groups must refer to the `pylock.toml` file"
|
||||
));
|
||||
}
|
||||
names.push(group.name.clone());
|
||||
}
|
||||
|
||||
// Resolve sources into specifications so we know their `source_tree`.
|
||||
let mut requirement_sources = Vec::new();
|
||||
for source in requirements {
|
||||
let source = Self::from_source(source, client_builder).await?;
|
||||
requirement_sources.push(source);
|
||||
if !names.is_empty() {
|
||||
spec.groups.insert(
|
||||
pylock_toml.clone(),
|
||||
DependencyGroups::from_args(
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
false,
|
||||
names,
|
||||
false,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if let Some(groups) = groups {
|
||||
// pip `--group` flags specify their own sources, which we need to process here.
|
||||
// First, we collect all groups by their path.
|
||||
let mut groups_by_path = BTreeMap::new();
|
||||
for group in &groups.groups {
|
||||
// If there's no path provided, expect a pyproject.toml in the project-dir
|
||||
// (Which is typically the current working directory, matching pip's behaviour)
|
||||
let pyproject_path = group
|
||||
.path
|
||||
.clone()
|
||||
.unwrap_or_else(|| groups.root.join("pyproject.toml"));
|
||||
groups_by_path
|
||||
.entry(pyproject_path)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(group.name.clone());
|
||||
}
|
||||
|
||||
// pip `--group` flags specify their own sources, which we need to process here
|
||||
if !groups.is_empty() {
|
||||
let mut group_specs = BTreeMap::new();
|
||||
for (path, groups) in groups {
|
||||
for (path, groups) in groups_by_path {
|
||||
let group_spec = DependencyGroups::from_args(
|
||||
false,
|
||||
false,
|
||||
|
|
@ -305,6 +339,13 @@ impl RequirementsSpecification {
|
|||
spec.groups = group_specs;
|
||||
}
|
||||
|
||||
// Resolve sources into specifications so we know their `source_tree`.
|
||||
let mut requirement_sources = Vec::new();
|
||||
for source in requirements {
|
||||
let source = Self::from_source(source, client_builder).await?;
|
||||
requirement_sources.push(source);
|
||||
}
|
||||
|
||||
// Read all requirements, and keep track of all requirements _and_ constraints.
|
||||
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
|
||||
// a requirements file can also add constraints.
|
||||
|
|
@ -426,7 +467,7 @@ impl RequirementsSpecification {
|
|||
requirements: &[RequirementsSource],
|
||||
client_builder: &BaseClientBuilder<'_>,
|
||||
) -> Result<Self> {
|
||||
Self::from_sources(requirements, &[], &[], BTreeMap::default(), client_builder).await
|
||||
Self::from_sources(requirements, &[], &[], None, client_builder).await
|
||||
}
|
||||
|
||||
/// Initialize a [`RequirementsSpecification`] from a list of [`Requirement`].
|
||||
|
|
@ -485,3 +526,12 @@ impl RequirementsSpecification {
|
|||
self.requirements.is_empty() && self.source_trees.is_empty() && self.overrides.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct GroupsSpecification {
|
||||
/// The path to the project root, relative to which the default `pyproject.toml` file is
|
||||
/// located.
|
||||
pub root: PathBuf,
|
||||
/// The enabled groups.
|
||||
pub groups: Vec<PipGroupName>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -186,13 +186,13 @@ pub struct PylockToml {
|
|||
lock_version: Version,
|
||||
created_by: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
requires_python: Option<RequiresPython>,
|
||||
pub requires_python: Option<RequiresPython>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
extras: Vec<ExtraName>,
|
||||
pub extras: Vec<ExtraName>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
dependency_groups: Vec<GroupName>,
|
||||
pub dependency_groups: Vec<GroupName>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
default_groups: Vec<GroupName>,
|
||||
pub default_groups: Vec<GroupName>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub packages: Vec<PylockTomlPackage>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
|
|
@ -500,7 +500,7 @@ impl<'lock> PylockToml {
|
|||
.unwrap_or_else(|_| dist.install_path.clone());
|
||||
package.directory = Some(PylockTomlDirectory {
|
||||
path: PortablePathBuf::from(path),
|
||||
editable: if dist.editable { Some(true) } else { None },
|
||||
editable: dist.editable,
|
||||
subdirectory: None,
|
||||
});
|
||||
}
|
||||
|
|
@ -737,7 +737,7 @@ impl<'lock> PylockToml {
|
|||
),
|
||||
editable: match editable {
|
||||
EditableMode::NonEditable => None,
|
||||
EditableMode::Editable => Some(sdist.editable),
|
||||
EditableMode::Editable => sdist.editable,
|
||||
},
|
||||
subdirectory: None,
|
||||
}),
|
||||
|
|
@ -966,9 +966,12 @@ impl<'lock> PylockToml {
|
|||
self,
|
||||
install_path: &Path,
|
||||
markers: &MarkerEnvironment,
|
||||
extras: &[ExtraName],
|
||||
groups: &[GroupName],
|
||||
tags: &Tags,
|
||||
build_options: &BuildOptions,
|
||||
) -> Result<Resolution, PylockTomlError> {
|
||||
// Convert the extras and dependency groups specifications to a concrete environment.
|
||||
let mut graph =
|
||||
petgraph::graph::DiGraph::with_capacity(self.packages.len(), self.packages.len());
|
||||
|
||||
|
|
@ -977,7 +980,7 @@ impl<'lock> PylockToml {
|
|||
|
||||
for package in self.packages {
|
||||
// Omit packages that aren't relevant to the current environment.
|
||||
if !package.marker.evaluate(markers, &[]) {
|
||||
if !package.marker.evaluate_pep751(markers, extras, groups) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
@ -1152,7 +1155,7 @@ impl<'lock> PylockToml {
|
|||
};
|
||||
|
||||
let index = graph.add_node(dist);
|
||||
graph.add_edge(root, index, Edge::Prod(package.marker));
|
||||
graph.add_edge(root, index, Edge::Prod);
|
||||
}
|
||||
|
||||
Ok(Resolution::new(graph))
|
||||
|
|
@ -1394,8 +1397,8 @@ impl PylockTomlDirectory {
|
|||
Ok(DirectorySourceDist {
|
||||
name: name.clone(),
|
||||
install_path: path.into_boxed_path(),
|
||||
editable: self.editable.unwrap_or(false),
|
||||
r#virtual: false,
|
||||
editable: self.editable,
|
||||
r#virtual: Some(false),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ use uv_configuration::ExtrasSpecificationWithDefaults;
|
|||
use uv_configuration::{BuildOptions, DependencyGroupsWithDefaults, InstallOptions};
|
||||
use uv_distribution_types::{Edge, Node, Resolution, ResolvedDist};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep508::MarkerTree;
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::ResolverMarkerEnvironment;
|
||||
|
||||
|
|
@ -113,7 +112,7 @@ pub trait Installable<'lock> {
|
|||
inverse.insert(&dist.id, index);
|
||||
|
||||
// Add an edge from the root.
|
||||
petgraph.add_edge(root, index, Edge::Prod(MarkerTree::TRUE));
|
||||
petgraph.add_edge(root, index, Edge::Prod);
|
||||
|
||||
// Push the package onto the queue.
|
||||
roots.push((dist, index));
|
||||
|
|
@ -189,7 +188,7 @@ pub trait Installable<'lock> {
|
|||
// a specific marker environment and set of extras/groups.
|
||||
// So at this point, we know the extras/groups have been
|
||||
// satisfied, so we can safely drop the conflict marker.
|
||||
Edge::Dev(group.clone(), dep.complexified_marker.pep508()),
|
||||
Edge::Dev(group.clone()),
|
||||
);
|
||||
|
||||
// Push its dependencies on the queue.
|
||||
|
|
@ -231,7 +230,7 @@ pub trait Installable<'lock> {
|
|||
inverse.insert(&dist.id, index);
|
||||
|
||||
// Add the edge.
|
||||
petgraph.add_edge(root, index, Edge::Prod(dependency.marker));
|
||||
petgraph.add_edge(root, index, Edge::Prod);
|
||||
|
||||
// Push its dependencies on the queue.
|
||||
if seen.insert((&dist.id, None)) {
|
||||
|
|
@ -300,7 +299,7 @@ pub trait Installable<'lock> {
|
|||
};
|
||||
|
||||
// Add the edge.
|
||||
petgraph.add_edge(root, index, Edge::Dev(group.clone(), dependency.marker));
|
||||
petgraph.add_edge(root, index, Edge::Dev(group.clone()));
|
||||
|
||||
// Push its dependencies on the queue.
|
||||
if seen.insert((&dist.id, None)) {
|
||||
|
|
@ -484,9 +483,9 @@ pub trait Installable<'lock> {
|
|||
index,
|
||||
dep_index,
|
||||
if let Some(extra) = extra {
|
||||
Edge::Optional(extra.clone(), dep.complexified_marker.pep508())
|
||||
Edge::Optional(extra.clone())
|
||||
} else {
|
||||
Edge::Prod(dep.complexified_marker.pep508())
|
||||
Edge::Prod
|
||||
},
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -1256,6 +1256,7 @@ impl Lock {
|
|||
root: &Path,
|
||||
packages: &BTreeMap<PackageName, WorkspaceMember>,
|
||||
members: &[PackageName],
|
||||
required_members: &BTreeSet<PackageName>,
|
||||
requirements: &[Requirement],
|
||||
constraints: &[Requirement],
|
||||
overrides: &[Requirement],
|
||||
|
|
@ -1283,7 +1284,10 @@ impl Lock {
|
|||
// Validate that the member sources have not changed (e.g., that they've switched from
|
||||
// virtual to non-virtual or vice versa).
|
||||
for (name, member) in packages {
|
||||
let expected = !member.pyproject_toml().is_package();
|
||||
// We don't require a build system, if the workspace member is a dependency
|
||||
let expected = !member
|
||||
.pyproject_toml()
|
||||
.is_package(!required_members.contains(name));
|
||||
let actual = self
|
||||
.find_by_name(name)
|
||||
.ok()
|
||||
|
|
@ -2397,8 +2401,8 @@ impl Package {
|
|||
name: self.id.name.clone(),
|
||||
url: verbatim_url(&install_path, &self.id)?,
|
||||
install_path: install_path.into_boxed_path(),
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(false),
|
||||
};
|
||||
uv_distribution_types::SourceDist::Directory(dir_dist)
|
||||
}
|
||||
|
|
@ -2408,8 +2412,8 @@ impl Package {
|
|||
name: self.id.name.clone(),
|
||||
url: verbatim_url(&install_path, &self.id)?,
|
||||
install_path: install_path.into_boxed_path(),
|
||||
editable: true,
|
||||
r#virtual: false,
|
||||
editable: Some(true),
|
||||
r#virtual: Some(false),
|
||||
};
|
||||
uv_distribution_types::SourceDist::Directory(dir_dist)
|
||||
}
|
||||
|
|
@ -2419,8 +2423,8 @@ impl Package {
|
|||
name: self.id.name.clone(),
|
||||
url: verbatim_url(&install_path, &self.id)?,
|
||||
install_path: install_path.into_boxed_path(),
|
||||
editable: false,
|
||||
r#virtual: true,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(true),
|
||||
};
|
||||
uv_distribution_types::SourceDist::Directory(dir_dist)
|
||||
}
|
||||
|
|
@ -3251,9 +3255,9 @@ impl Source {
|
|||
let path = relative_to(&directory_dist.install_path, root)
|
||||
.or_else(|_| std::path::absolute(&directory_dist.install_path))
|
||||
.map_err(LockErrorKind::DistributionRelativePath)?;
|
||||
if directory_dist.editable {
|
||||
if directory_dist.editable.unwrap_or(false) {
|
||||
Ok(Source::Editable(path.into_boxed_path()))
|
||||
} else if directory_dist.r#virtual {
|
||||
} else if directory_dist.r#virtual.unwrap_or(false) {
|
||||
Ok(Source::Virtual(path.into_boxed_path()))
|
||||
} else {
|
||||
Ok(Source::Directory(path.into_boxed_path()))
|
||||
|
|
@ -4801,8 +4805,8 @@ fn normalize_requirement(
|
|||
marker: requires_python.simplify_markers(requirement.marker),
|
||||
source: RequirementSource::Directory {
|
||||
install_path,
|
||||
editable,
|
||||
r#virtual,
|
||||
editable: Some(editable.unwrap_or(false)),
|
||||
r#virtual: Some(r#virtual.unwrap_or(false)),
|
||||
url,
|
||||
},
|
||||
origin: None,
|
||||
|
|
|
|||
|
|
@ -54,6 +54,11 @@ pub(crate) fn requires_python(tree: MarkerTree) -> Option<RequiresPythonRange> {
|
|||
collect_python_markers(tree, markers, range);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
for (_, tree) in marker.children() {
|
||||
collect_python_markers(tree, markers, range);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -698,6 +698,11 @@ impl ResolverOutput {
|
|||
add_marker_params_from_tree(tree, set);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
for (_, tree) in marker.children() {
|
||||
add_marker_params_from_tree(tree, set);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -894,16 +899,11 @@ impl From<ResolverOutput> for uv_distribution_types::Resolution {
|
|||
// Re-add the edges to the reduced graph.
|
||||
for edge in graph.edge_indices() {
|
||||
let (source, target) = graph.edge_endpoints(edge).unwrap();
|
||||
// OK to ignore conflicting marker because we've asserted
|
||||
// above that we aren't in universal mode. If we aren't in
|
||||
// universal mode, then there can be no conflicts since
|
||||
// conflicts imply forks and forks imply universal mode.
|
||||
let marker = graph[edge].pep508();
|
||||
|
||||
match (&graph[source], &graph[target]) {
|
||||
(ResolutionGraphNode::Root, ResolutionGraphNode::Dist(target_dist)) => {
|
||||
let target = inverse[&target_dist.name()];
|
||||
transformed.update_edge(root, target, Edge::Prod(marker));
|
||||
transformed.update_edge(root, target, Edge::Prod);
|
||||
}
|
||||
(
|
||||
ResolutionGraphNode::Dist(source_dist),
|
||||
|
|
@ -913,11 +913,11 @@ impl From<ResolverOutput> for uv_distribution_types::Resolution {
|
|||
let target = inverse[&target_dist.name()];
|
||||
|
||||
let edge = if let Some(extra) = source_dist.extra.as_ref() {
|
||||
Edge::Optional(extra.clone(), marker)
|
||||
Edge::Optional(extra.clone())
|
||||
} else if let Some(dev) = source_dist.dev.as_ref() {
|
||||
Edge::Dev(dev.clone(), marker)
|
||||
Edge::Dev(dev.clone())
|
||||
} else {
|
||||
Edge::Prod(marker)
|
||||
Edge::Prod
|
||||
};
|
||||
|
||||
transformed.add_edge(source, target, edge);
|
||||
|
|
|
|||
|
|
@ -620,6 +620,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
|||
&state.python_requirement,
|
||||
&state.pubgrub,
|
||||
)?;
|
||||
|
||||
match forked_deps {
|
||||
ForkedDependencies::Unavailable(reason) => {
|
||||
// Then here, if we get a reason that we consider unrecoverable, we should
|
||||
|
|
|
|||
|
|
@ -63,9 +63,9 @@ impl Urls {
|
|||
verbatim: _,
|
||||
} = package_url
|
||||
{
|
||||
if !*editable {
|
||||
if editable.is_none() {
|
||||
debug!("Allowing an editable variant of {}", &package_url.verbatim);
|
||||
*editable = true;
|
||||
*editable = Some(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -201,8 +201,9 @@ fn same_resource(a: &ParsedUrl, b: &ParsedUrl, git: &GitResolver) -> bool {
|
|||
|| is_same_file(&a.install_path, &b.install_path).unwrap_or(false)
|
||||
}
|
||||
(ParsedUrl::Directory(a), ParsedUrl::Directory(b)) => {
|
||||
a.install_path == b.install_path
|
||||
|| is_same_file(&a.install_path, &b.install_path).unwrap_or(false)
|
||||
(a.install_path == b.install_path
|
||||
|| is_same_file(&a.install_path, &b.install_path).unwrap_or(false))
|
||||
&& a.editable.is_none_or(|a| b.editable.is_none_or(|b| a == b))
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue