mirror of https://github.com/astral-sh/uv
Merge branch 'main' into zb/fix-python-dir
This commit is contained in:
commit
7c8a5d3efe
|
|
@ -1,4 +1,4 @@
|
|||
[profile.default]
|
||||
# Mark tests that take longer than 10s as slow.
|
||||
# Terminate after 90s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 9 }
|
||||
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 12 }
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ jobs:
|
|||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
command: sdist
|
||||
args: --out dist
|
||||
|
|
@ -74,7 +74,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build sdist uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
command: sdist
|
||||
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
@ -103,7 +103,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: x86_64
|
||||
args: --release --locked --out dist --features self-update
|
||||
|
|
@ -133,7 +133,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build - x86_64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: x86_64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
@ -157,7 +157,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: aarch64
|
||||
args: --release --locked --out dist --features self-update
|
||||
|
|
@ -193,7 +193,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build - aarch64"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: aarch64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
@ -231,7 +231,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist --features self-update,windows-gui-bin
|
||||
|
|
@ -267,7 +267,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
@ -303,7 +303,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
# Generally, we try to build in a target docker container. In this case however, a
|
||||
|
|
@ -368,7 +368,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
|
|
@ -412,7 +412,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
|
|
@ -461,7 +461,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
|
|
@ -509,7 +509,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
|
@ -561,7 +561,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
|
@ -614,7 +614,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
|
@ -671,7 +671,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
|
|
@ -712,13 +712,13 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist --features self-update
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
@ -761,13 +761,13 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel uv-build"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
@ -807,7 +807,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
|
@ -854,7 +854,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
|
@ -901,7 +901,7 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
|
@ -966,7 +966,7 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@44479ae1b6b1a57f561e03add8832e62c185eb17 # v1.48.1
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
|
|
|
|||
|
|
@ -1,11 +1,19 @@
|
|||
# Build and publish a Docker image.
|
||||
# Build and publish Docker images.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
# Uses Depot for multi-platform builds. Includes both a `uv` base image, which
|
||||
# is just the binary in a scratch image, and a set of extra, common images with
|
||||
# the uv binary installed.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "Build Docker image"
|
||||
# Images are built on all runs.
|
||||
#
|
||||
# On release, assumed to run as a subworkflow of .github/workflows/release.yml;
|
||||
# specifically, as a local artifacts job within `cargo-dist`. In this case,
|
||||
# images are published based on the `plan`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within
|
||||
# `cargo-dist`, but sharing the built image as an artifact between jobs is
|
||||
# challenging.
|
||||
name: "Docker images"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
|
@ -29,35 +37,67 @@ on:
|
|||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
UV_BASE_IMG: ghcr.io/${{ github.repository_owner }}/uv
|
||||
UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv
|
||||
UV_DOCKERHUB_IMAGE: docker.io/astral/uv
|
||||
|
||||
jobs:
|
||||
docker-build:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
name: Build Docker image (ghcr.io/astral-sh/uv) for ${{ matrix.platform }}
|
||||
docker-plan:
|
||||
name: plan
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
login: ${{ steps.plan.outputs.login }}
|
||||
push: ${{ steps.plan.outputs.push }}
|
||||
tag: ${{ steps.plan.outputs.tag }}
|
||||
action: ${{ steps.plan.outputs.action }}
|
||||
steps:
|
||||
- name: Set push variable
|
||||
env:
|
||||
DRY_RUN: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag }}
|
||||
IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
||||
id: plan
|
||||
run: |
|
||||
if [ "${{ env.DRY_RUN }}" == "false" ]; then
|
||||
echo "login=true" >> "$GITHUB_OUTPUT"
|
||||
echo "push=true" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=${{ env.TAG }}" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build and publish" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "login=${{ env.IS_LOCAL_PR }}" >> "$GITHUB_OUTPUT"
|
||||
echo "push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=dry-run" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
docker-publish-base:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
name: ${{ needs.docker-plan.outputs.action }} uv
|
||||
needs:
|
||||
- docker-plan
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # for Depot OIDC and GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
environment:
|
||||
name: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
outputs:
|
||||
image-tags: ${{ steps.meta.outputs.tags }}
|
||||
image-annotations: ${{ steps.meta.outputs.annotations }}
|
||||
image-digest: ${{ steps.build.outputs.digest }}
|
||||
image-version: ${{ steps.meta.outputs.version }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
# PRs from forks don't have access to secrets, disable this step in that case.
|
||||
if: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
|
|
@ -65,13 +105,15 @@ jobs:
|
|||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
|
||||
if [ "${{ needs.docker-plan.outputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
|
||||
echo "${{ needs.docker-plan.outputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
|
|
@ -81,107 +123,50 @@ jobs:
|
|||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
images: |
|
||||
${{ env.UV_GHCR_IMAGE }}
|
||||
${{ env.UV_DOCKERHUB_IMAGE }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=pep440,pattern={{ version }},value=${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag || 'dry-run' }},enable=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
type=raw,value=dry-run,enable=${{ needs.docker-plan.outputs.push == 'false' }}
|
||||
type=pep440,pattern={{ version }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }}
|
||||
|
||||
- name: Normalize Platform Pair (replace / with -)
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_TUPLE=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||
with:
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
context: .
|
||||
platforms: ${{ matrix.platform }}
|
||||
cache-from: type=gha,scope=uv-${{ env.PLATFORM_TUPLE }}
|
||||
cache-to: type=gha,mode=min,scope=uv-${{ env.PLATFORM_TUPLE }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ needs.docker-plan.outputs.push }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.UV_BASE_IMG }},push-by-digest=true,name-canonical=true,push=${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
# TODO(zanieb): Annotations are not supported by Depot yet and are ignored
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
- name: Export digests
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digests
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
- name: Generate artifact attestation for base image
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_TUPLE }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
docker-publish:
|
||||
name: Publish Docker image (ghcr.io/astral-sh/uv)
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
needs:
|
||||
- docker-build
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
steps:
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<UV_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <UV_BASE_IMG>@sha256:<sha256_1> <UV_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
run: |
|
||||
docker buildx imagetools create \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *)
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.build.outputs.digest }}
|
||||
|
||||
docker-publish-extra:
|
||||
name: Publish additional Docker image based on ${{ matrix.image-mapping }}
|
||||
name: ${{ needs.docker-plan.outputs.action }} ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
needs:
|
||||
- docker-publish
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
- docker-plan
|
||||
- docker-publish-base
|
||||
permissions:
|
||||
packages: write
|
||||
attestations: write # needed to push image attestations to the Github attestation store
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
id-token: write # for Depot OIDC and GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
|
@ -213,13 +198,12 @@ jobs:
|
|||
- python:3.9-slim-bookworm,python3.9-bookworm-slim
|
||||
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
||||
steps:
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
|
|
@ -227,6 +211,8 @@ jobs:
|
|||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
run: |
|
||||
|
|
@ -238,7 +224,8 @@ jobs:
|
|||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${{ env.UV_BASE_IMG }}:latest /uv /uvx /usr/local/bin/
|
||||
COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/
|
||||
ENV UV_TOOL_BIN_DIR="/usr/local/bin"
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/uv"]
|
||||
EOF
|
||||
|
|
@ -249,17 +236,14 @@ jobs:
|
|||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ fromJson(inputs.plan).announcement_tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export image cache name
|
||||
echo "IMAGE_REF=${BASE_IMAGE//:/-}" >> $GITHUB_ENV
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
|
|
@ -274,7 +258,9 @@ jobs:
|
|||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
images: |
|
||||
${{ env.UV_GHCR_IMAGE }}
|
||||
${{ env.UV_DOCKERHUB_IMAGE }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
|
|
@ -282,67 +268,84 @@ jobs:
|
|||
|
||||
- name: Build and push
|
||||
id: build-and-push
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||
with:
|
||||
context: .
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
platforms: linux/amd64,linux/arm64
|
||||
# We do not really need to cache here as the Dockerfile is tiny
|
||||
#cache-from: type=gha,scope=uv-${{ env.IMAGE_REF }}
|
||||
#cache-to: type=gha,mode=min,scope=uv-${{ env.IMAGE_REF }}
|
||||
push: true
|
||||
push: ${{ needs.docker-plan.outputs.push }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
# TODO(zanieb): Annotations are not supported by Depot yet and are ignored
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_BASE_IMG }}
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
# push-to-registry is explicitly not enabled to maintain full control over the top image
|
||||
|
||||
# This is effectively a duplicate of `docker-publish` to make https://github.com/astral-sh/uv/pkgs/container/uv
|
||||
# show the uv base image first since GitHub always shows the last updated image digests
|
||||
# This works by annotating the original digests (previously non-annotated) which triggers an update to ghcr.io
|
||||
docker-republish:
|
||||
name: Annotate Docker image (ghcr.io/astral-sh/uv)
|
||||
# Push annotations manually.
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Add annotations to images
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
env:
|
||||
IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}"
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
ANNOTATIONS: ${{ steps.meta.outputs.annotations }}
|
||||
run: |
|
||||
set -x
|
||||
readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
for image in $IMAGES; do
|
||||
readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
"${tags[@]}" \
|
||||
"${image}@${DIGEST}"
|
||||
done
|
||||
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Export manifest digest
|
||||
id: manifest-digest
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
env:
|
||||
IMAGE: ${{ env.UV_GHCR_IMAGE }}
|
||||
VERSION: ${{ steps.meta.outputs.version }}
|
||||
run: |
|
||||
digest="$(
|
||||
docker buildx imagetools inspect \
|
||||
"${IMAGE}:${VERSION}" \
|
||||
--format '{{json .Manifest}}' \
|
||||
| jq -r '.digest'
|
||||
)"
|
||||
echo "digest=${digest}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Generate artifact attestation
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.manifest-digest.outputs.digest }}
|
||||
|
||||
# Annotate the base image
|
||||
docker-annotate-base:
|
||||
name: annotate uv
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
needs:
|
||||
- docker-plan
|
||||
- docker-publish-base
|
||||
- docker-publish-extra
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
permissions:
|
||||
packages: write
|
||||
attestations: write # needed to push image attestations to the Github attestation store
|
||||
id-token: write # needed for signing the images with GitHub OIDC Token
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
steps:
|
||||
# Login to DockerHub first, to avoid rate-limiting
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: astralshbot
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: ${{ env.UV_BASE_IMG }}
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
tags: |
|
||||
type=pep440,pattern={{ version }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ fromJson(inputs.plan).announcement_tag }}
|
||||
username: astral
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
||||
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
|
|
@ -350,22 +353,37 @@ jobs:
|
|||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Adapted from https://docs.docker.com/build/ci/github-actions/multi-platform/
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
# Depot doesn't support annotating images, so we need to do so manually
|
||||
# afterwards. Mutating the manifest is desirable regardless, because we
|
||||
# want to bump the base image to appear at the top of the list on GHCR.
|
||||
# However, once annotation support is added to Depot, this step can be
|
||||
# minimized to just touch the GHCR manifest.
|
||||
- name: Add annotations to images
|
||||
env:
|
||||
IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}"
|
||||
DIGEST: ${{ needs.docker-publish-base.outputs.image-digest }}
|
||||
TAGS: ${{ needs.docker-publish-base.outputs.image-tags }}
|
||||
ANNOTATIONS: ${{ needs.docker-publish-base.outputs.image-annotations }}
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The jq command expands the docker/metadata json "tags" array entry to `-t tag1 -t tag2 ...` for each tag in the array
|
||||
# The printf will expand the base image with the `<UV_BASE_IMG>@sha256:<sha256> ...` for each sha256 in the directory
|
||||
# The final command becomes `docker buildx imagetools create -t tag1 -t tag2 ... <UV_BASE_IMG>@sha256:<sha256_1> <UV_BASE_IMG>@sha256:<sha256_2> ...`
|
||||
# The final command becomes `docker buildx imagetools create --annotation 'index:foo=1' --annotation 'index:bar=2' ... -t tag1 -t tag2 ... <IMG>@sha256:<sha256>`
|
||||
run: |
|
||||
readarray -t lines <<< "$DOCKER_METADATA_OUTPUT_ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
set -x
|
||||
readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
for image in $IMAGES; do
|
||||
readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
$(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.UV_BASE_IMG }}@sha256:%s ' *)
|
||||
"${tags[@]}" \
|
||||
"${image}@${DIGEST}"
|
||||
done
|
||||
|
||||
- name: Share manifest digest
|
||||
# Now that we've modified the manifest, we need to attest it again.
|
||||
# Note we only generate an attestation for GHCR.
|
||||
- name: Export manifest digest
|
||||
id: manifest-digest
|
||||
env:
|
||||
IMAGE: ${{ env.UV_GHCR_IMAGE }}
|
||||
VERSION: ${{ needs.docker-publish-base.outputs.image-version }}
|
||||
# To sign the manifest, we need it's digest. Unfortunately "docker
|
||||
# buildx imagetools create" does not (yet) have a clean way of sharing
|
||||
# the digest of the manifest it creates (see docker/buildx#2407), so
|
||||
|
|
@ -377,7 +395,7 @@ jobs:
|
|||
run: |
|
||||
digest="$(
|
||||
docker buildx imagetools inspect \
|
||||
"${UV_BASE_IMG}:${DOCKER_METADATA_OUTPUT_VERSION}" \
|
||||
"${IMAGE}:${VERSION}" \
|
||||
--format '{{json .Manifest}}' \
|
||||
| jq -r '.digest'
|
||||
)"
|
||||
|
|
@ -386,6 +404,5 @@ jobs:
|
|||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_BASE_IMG }}
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.manifest-digest.outputs.digest }}
|
||||
# push-to-registry is explicitly not enabled to maintain full control over the top image
|
||||
|
|
|
|||
|
|
@ -14,8 +14,9 @@ env:
|
|||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PYTHON_VERSION: "3.12"
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
|
|
@ -81,7 +82,7 @@ jobs:
|
|||
run: rustup component add rustfmt
|
||||
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
|
||||
- name: "rustfmt"
|
||||
run: cargo fmt --all --check
|
||||
|
|
@ -125,11 +126,11 @@ jobs:
|
|||
name: "cargo clippy | ubuntu"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Check uv_build dependencies"
|
||||
uses: EmbarkStudios/cargo-deny-action@34899fc7ba81ca6268d5947a7a16b4649013fea1 # v2.0.11
|
||||
uses: EmbarkStudios/cargo-deny-action@30f817c6f72275c6d54dc744fbca09ebc958599f # v2.0.12
|
||||
with:
|
||||
command: check bans
|
||||
manifest-path: crates/uv-build/Cargo.toml
|
||||
|
|
@ -155,7 +156,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
|
@ -174,7 +175,7 @@ jobs:
|
|||
name: "cargo dev generate-all"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Generate all"
|
||||
|
|
@ -187,7 +188,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: "Install cargo shear"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-shear
|
||||
- run: cargo shear
|
||||
|
|
@ -207,21 +208,24 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
- name: "Cargo test"
|
||||
env:
|
||||
# Retry more than default to reduce flakes in CI
|
||||
UV_HTTP_RETRIES: 5
|
||||
run: |
|
||||
cargo nextest run \
|
||||
--features python-patch \
|
||||
|
|
@ -231,7 +235,8 @@ jobs:
|
|||
cargo-test-macos:
|
||||
timeout-minutes: 15
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
# Only run macOS tests on main without opt-in
|
||||
if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos') || github.ref == 'refs/heads/main' }}
|
||||
runs-on: macos-latest-xlarge # github-macos-14-aarch64-6
|
||||
name: "cargo test | macos"
|
||||
steps:
|
||||
|
|
@ -239,21 +244,24 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
- name: "Cargo test"
|
||||
env:
|
||||
# Retry more than default to reduce flakes in CI
|
||||
UV_HTTP_RETRIES: 5
|
||||
run: |
|
||||
cargo nextest run \
|
||||
--no-default-features \
|
||||
|
|
@ -265,7 +273,7 @@ jobs:
|
|||
timeout-minutes: 15
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: github-windows-2025-x86_64-16
|
||||
runs-on: depot-windows-2022-16
|
||||
name: "cargo test | windows"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
|
@ -278,11 +286,11 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
|
@ -291,29 +299,15 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
# Get crash dumps to debug the `exit_code: -1073741819` failures
|
||||
- name: Configure crash dumps
|
||||
if: runner.os == 'Windows'
|
||||
shell: powershell
|
||||
run: |
|
||||
$dumps = "$env:GITHUB_WORKSPACE\dumps"
|
||||
New-Item -Path $dumps -ItemType Directory -Force
|
||||
|
||||
# https://github.com/microsoft/terminal/wiki/Troubleshooting-Tips#capture-automatically
|
||||
$reg = "HKLM:\SOFTWARE\Microsoft\Windows\Windows Error Reporting\LocalDumps"
|
||||
New-Item -Path $reg -Force | Out-Null
|
||||
Set-ItemProperty -Path $reg -Name "DumpFolder" -Value $dumps
|
||||
Set-ItemProperty -Path $reg -Name "DumpType" -Value 2
|
||||
|
||||
- name: "Cargo test"
|
||||
id: test
|
||||
continue-on-error: true
|
||||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
env:
|
||||
# Retry more than default to reduce flakes in CI
|
||||
UV_HTTP_RETRIES: 5
|
||||
# Avoid permission errors during concurrent tests
|
||||
# See https://github.com/astral-sh/uv/issues/6940
|
||||
UV_LINK_MODE: copy
|
||||
|
|
@ -325,42 +319,6 @@ jobs:
|
|||
--workspace \
|
||||
--status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow
|
||||
|
||||
# Get crash dumps to debug the `exit_code: -1073741819` failures (contd.)
|
||||
- name: Analyze crashes
|
||||
if: steps.test.outcome == 'failure'
|
||||
shell: powershell
|
||||
run: |
|
||||
$dumps = Get-ChildItem "$env:GITHUB_WORKSPACE\dumps\*.dmp" -ErrorAction SilentlyContinue
|
||||
if (!$dumps) { exit 0 }
|
||||
|
||||
Write-Host "Found $($dumps.Count) crash dump(s)"
|
||||
|
||||
# Download cdb if needed
|
||||
$cdb = "C:\Program Files (x86)\Windows Kits\10\Debuggers\x64\cdb.exe"
|
||||
if (!(Test-Path $cdb)) {
|
||||
# https://github.com/microsoft/react-native-windows/blob/f1570a5ef1c4fc1e78d0a0ad5af848ab91a4061c/vnext/Scripts/Analyze-Crash.ps1#L44-L56
|
||||
Invoke-WebRequest "https://go.microsoft.com/fwlink/?linkid=2173743" -OutFile "$env:TEMP\sdk.exe"
|
||||
Start-Process "$env:TEMP\sdk.exe" -ArgumentList "/features OptionId.WindowsDesktopDebuggers /quiet" -Wait
|
||||
}
|
||||
|
||||
# Analyze each dump
|
||||
foreach ($dump in $dumps) {
|
||||
Write-Host "`n=== $($dump.Name) ==="
|
||||
& $cdb -z $dump -c "!analyze -v; .ecxr; k; q" 2>&1 | Select-String -Pattern "(ExceptionCode:|SYMBOL_NAME:|IMAGE_NAME:|STACK_TEXT:)" -Context 0,2
|
||||
}
|
||||
|
||||
- name: Upload crash dumps
|
||||
if: steps.test.outcome == 'failure'
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: crash-dumps-${{ github.run_number }}
|
||||
path: dumps/*.dmp
|
||||
if-no-files-found: ignore
|
||||
|
||||
- name: Fail if tests failed
|
||||
if: steps.test.outcome == 'failure'
|
||||
run: exit 1
|
||||
|
||||
# Separate jobs for the nightly crate
|
||||
windows-trampoline-check:
|
||||
timeout-minutes: 15
|
||||
|
|
@ -383,7 +341,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||
|
||||
|
|
@ -394,7 +352,7 @@ jobs:
|
|||
rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc
|
||||
|
||||
- name: "Install cargo-bloat"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-bloat
|
||||
|
||||
|
|
@ -439,7 +397,7 @@ jobs:
|
|||
- name: Copy Git Repo to Dev Drive
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||
- name: "Install Rust toolchain"
|
||||
|
|
@ -481,7 +439,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
|
@ -494,7 +452,7 @@ jobs:
|
|||
|
||||
- name: "Build docs (insiders)"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uvx --with-requirements docs/requirements.txt mkdocs build --strict -f mkdocs.insiders.yml
|
||||
run: uvx --with-requirements docs/requirements-insiders.txt mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
build-binary-linux-libc:
|
||||
timeout-minutes: 10
|
||||
|
|
@ -507,7 +465,7 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build
|
||||
|
|
@ -521,6 +479,31 @@ jobs:
|
|||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-aarch64:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: github-ubuntu-24.04-aarch64-4
|
||||
name: "build binary | linux aarch64"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-linux-aarch64-${{ github.sha }}
|
||||
path: |
|
||||
./target/debug/uv
|
||||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-musl:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
|
|
@ -537,7 +520,7 @@ jobs:
|
|||
sudo apt-get install musl-tools
|
||||
rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build --target x86_64-unknown-linux-musl --bin uv --bin uvx
|
||||
|
|
@ -562,7 +545,7 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Build"
|
||||
run: cargo build --bin uv --bin uvx
|
||||
|
||||
|
|
@ -586,7 +569,7 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@v1
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Build"
|
||||
run: cargo build --bin uv --bin uvx
|
||||
|
||||
|
|
@ -616,7 +599,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
|
@ -651,7 +634,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
|
@ -671,8 +654,8 @@ jobs:
|
|||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uvx.exe
|
||||
retention-days: 1
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
build-binary-msrv:
|
||||
name: "build binary | msrv"
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: github-ubuntu-24.04-x86_64-8
|
||||
|
|
@ -688,7 +671,7 @@ jobs:
|
|||
run: rustup default ${{ steps.msrv.outputs.value }}
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@v1
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- run: cargo +${{ steps.msrv.outputs.value }} build
|
||||
- run: ./target/debug/uv --version
|
||||
|
||||
|
|
@ -701,7 +684,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Cross build"
|
||||
run: |
|
||||
# Install cross from `freebsd-firecracker`
|
||||
|
|
@ -712,7 +695,7 @@ jobs:
|
|||
cross build --target x86_64-unknown-freebsd
|
||||
|
||||
- name: Test in Firecracker VM
|
||||
uses: acj/freebsd-firecracker-action@6c57bda7113c2f137ef00d54512d61ae9d64365b # v0.5.0
|
||||
uses: acj/freebsd-firecracker-action@136ca0bce2adade21e526ceb07db643ad23dd2dd # v0.5.1
|
||||
with:
|
||||
verbose: false
|
||||
checkout: false
|
||||
|
|
@ -821,6 +804,33 @@ jobs:
|
|||
eval "$(./uv generate-shell-completion bash)"
|
||||
eval "$(./uvx --generate-shell-completion bash)"
|
||||
|
||||
smoke-test-linux-aarch64:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-aarch64
|
||||
name: "smoke test | linux aarch64"
|
||||
runs-on: github-ubuntu-24.04-aarch64-2
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-linux-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Prepare binary"
|
||||
run: |
|
||||
chmod +x ./uv
|
||||
chmod +x ./uvx
|
||||
|
||||
- name: "Smoke test"
|
||||
run: |
|
||||
./uv run scripts/smoke-test
|
||||
|
||||
- name: "Test shell completions"
|
||||
run: |
|
||||
eval "$(./uv generate-shell-completion bash)"
|
||||
eval "$(./uvx --generate-shell-completion bash)"
|
||||
|
||||
smoke-test-linux-musl:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-musl
|
||||
|
|
@ -903,7 +913,7 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "smoke test | windows aarch64"
|
||||
runs-on: github-windows-11-aarch64-4
|
||||
runs-on: windows-11-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
|
|
@ -1032,7 +1042,7 @@ jobs:
|
|||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -p 3.13t --managed-python
|
||||
./uv venv -c -p 3.13t --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
|
|
@ -1051,6 +1061,96 @@ jobs:
|
|||
./uv run python -c ""
|
||||
./uv run -p 3.13t python -c ""
|
||||
|
||||
integration-test-windows-aarch64-implicit:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "integration test | aarch64 windows implicit"
|
||||
runs-on: windows-11-arm
|
||||
|
||||
steps:
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Install Python via uv (implicitly select x64)"
|
||||
run: |
|
||||
./uv python install -v 3.13
|
||||
|
||||
- name: "Create a virtual environment (stdlib)"
|
||||
run: |
|
||||
& (./uv python find 3.13) -m venv .venv
|
||||
|
||||
- name: "Check version (stdlib)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -c -p 3.13 --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Check is x64"
|
||||
run: |
|
||||
.venv/Scripts/python -c "import sys; exit(1) if 'AMD64' not in sys.version else exit(0)"
|
||||
|
||||
- name: "Check install"
|
||||
run: |
|
||||
./uv pip install -v anyio
|
||||
|
||||
- name: "Check uv run"
|
||||
run: |
|
||||
./uv run python -c ""
|
||||
./uv run -p 3.13 python -c ""
|
||||
|
||||
integration-test-windows-aarch64-explicit:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "integration test | aarch64 windows explicit"
|
||||
runs-on: windows-11-arm
|
||||
|
||||
steps:
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Install Python via uv (explicitly select aarch64)"
|
||||
run: |
|
||||
./uv python install -v cpython-3.13-windows-aarch64-none
|
||||
|
||||
- name: "Create a virtual environment (stdlib)"
|
||||
run: |
|
||||
& (./uv python find 3.13) -m venv .venv
|
||||
|
||||
- name: "Check version (stdlib)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Create a virtual environment (uv)"
|
||||
run: |
|
||||
./uv venv -c -p 3.13 --managed-python
|
||||
|
||||
- name: "Check version (uv)"
|
||||
run: |
|
||||
.venv/Scripts/python --version
|
||||
|
||||
- name: "Check is NOT x64"
|
||||
run: |
|
||||
.venv/Scripts/python -c "import sys; exit(1) if 'AMD64' in sys.version else exit(0)"
|
||||
|
||||
- name: "Check install"
|
||||
run: |
|
||||
./uv pip install -v anyio
|
||||
|
||||
- name: "Check uv run"
|
||||
run: |
|
||||
./uv run python -c ""
|
||||
./uv run -p 3.13 python -c ""
|
||||
|
||||
integration-test-pypy-linux:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-libc
|
||||
|
|
@ -1467,6 +1567,90 @@ jobs:
|
|||
done <<< "${CHANGED_FILES}"
|
||||
echo "code_any_changed=${CODE_CHANGED}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
integration-test-registries:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-libc
|
||||
name: "integration test | registries"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event.pull_request.head.repo.fork != true }}
|
||||
environment: uv-test-registries
|
||||
env:
|
||||
PYTHON_VERSION: 3.12
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "${{ env.PYTHON_VERSION }}"
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-linux-libc-${{ github.sha }}
|
||||
|
||||
- name: "Prepare binary"
|
||||
run: chmod +x ./uv
|
||||
|
||||
- name: "Configure AWS credentials"
|
||||
uses: aws-actions/configure-aws-credentials@a159d7bb5354cf786f855f2f5d1d8d768d9a08d1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-east-1
|
||||
|
||||
- name: "Get AWS CodeArtifact token"
|
||||
run: |
|
||||
UV_TEST_AWS_TOKEN=$(aws codeartifact get-authorization-token \
|
||||
--domain tests \
|
||||
--domain-owner ${{ secrets.AWS_ACCOUNT_ID }} \
|
||||
--region us-east-1 \
|
||||
--query authorizationToken \
|
||||
--output text)
|
||||
echo "::add-mask::$UV_TEST_AWS_TOKEN"
|
||||
echo "UV_TEST_AWS_TOKEN=$UV_TEST_AWS_TOKEN" >> $GITHUB_ENV
|
||||
|
||||
- name: "Authenticate with GCP"
|
||||
id: "auth"
|
||||
uses: "google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}"
|
||||
|
||||
- name: "Set up GCP SDK"
|
||||
uses: "google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9"
|
||||
|
||||
- name: "Get GCP Artifact Registry token"
|
||||
id: get_token
|
||||
run: |
|
||||
UV_TEST_GCP_TOKEN=$(gcloud auth print-access-token)
|
||||
echo "::add-mask::$UV_TEST_GCP_TOKEN"
|
||||
echo "UV_TEST_GCP_TOKEN=$UV_TEST_GCP_TOKEN" >> $GITHUB_ENV
|
||||
|
||||
- name: "Run registry tests"
|
||||
run: ./uv run -p ${{ env.PYTHON_VERSION }} scripts/registries-test.py --uv ./uv --color always --all
|
||||
env:
|
||||
RUST_LOG: uv=debug
|
||||
UV_TEST_ARTIFACTORY_TOKEN: ${{ secrets.UV_TEST_ARTIFACTORY_TOKEN }}
|
||||
UV_TEST_ARTIFACTORY_URL: ${{ secrets.UV_TEST_ARTIFACTORY_URL }}
|
||||
UV_TEST_ARTIFACTORY_USERNAME: ${{ secrets.UV_TEST_ARTIFACTORY_USERNAME }}
|
||||
UV_TEST_AWS_URL: ${{ secrets.UV_TEST_AWS_URL }}
|
||||
UV_TEST_AWS_USERNAME: aws
|
||||
UV_TEST_AZURE_TOKEN: ${{ secrets.UV_TEST_AZURE_TOKEN }}
|
||||
UV_TEST_AZURE_URL: ${{ secrets.UV_TEST_AZURE_URL }}
|
||||
UV_TEST_AZURE_USERNAME: dummy
|
||||
UV_TEST_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_CLOUDSMITH_TOKEN }}
|
||||
UV_TEST_CLOUDSMITH_URL: ${{ secrets.UV_TEST_CLOUDSMITH_URL }}
|
||||
UV_TEST_CLOUDSMITH_USERNAME: ${{ secrets.UV_TEST_CLOUDSMITH_USERNAME }}
|
||||
UV_TEST_GCP_URL: ${{ secrets.UV_TEST_GCP_URL }}
|
||||
UV_TEST_GCP_USERNAME: oauth2accesstoken
|
||||
UV_TEST_GEMFURY_TOKEN: ${{ secrets.UV_TEST_GEMFURY_TOKEN }}
|
||||
UV_TEST_GEMFURY_URL: ${{ secrets.UV_TEST_GEMFURY_URL }}
|
||||
UV_TEST_GEMFURY_USERNAME: ${{ secrets.UV_TEST_GEMFURY_USERNAME }}
|
||||
UV_TEST_GITLAB_TOKEN: ${{ secrets.UV_TEST_GITLAB_TOKEN }}
|
||||
UV_TEST_GITLAB_URL: ${{ secrets.UV_TEST_GITLAB_URL }}
|
||||
UV_TEST_GITLAB_USERNAME: token
|
||||
|
||||
integration-test-publish:
|
||||
timeout-minutes: 20
|
||||
needs: integration-test-publish-changed
|
||||
|
|
@ -1574,14 +1758,14 @@ jobs:
|
|||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||
|
||||
# Test both `build_wheel` and `build_sdist` through uv
|
||||
./uv venv -v
|
||||
./uv venv -c -v
|
||||
./uv build -v --force-pep517 scripts/packages/built-by-uv --find-links crates/uv-build/dist --offline
|
||||
./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||
|
||||
# Test both `build_wheel` and `build_sdist` through the official `build`
|
||||
rm -rf scripts/packages/built-by-uv/dist/
|
||||
./uv venv -v
|
||||
./uv venv -c -v
|
||||
./uv pip install build
|
||||
# Add the uv binary to PATH for `build` to find
|
||||
PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv scripts/packages/built-by-uv
|
||||
|
|
@ -1785,6 +1969,10 @@ jobs:
|
|||
- name: "Print Python path"
|
||||
run: echo $(which python3)
|
||||
|
||||
# Needed for building Pydantic
|
||||
- name: "Install build tools"
|
||||
run: dnf install -y gcc
|
||||
|
||||
- name: "Validate global Python install"
|
||||
run: python3 scripts/check_system_python.py --uv ./uv
|
||||
|
||||
|
|
@ -2039,7 +2227,7 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "check system | x86-64 python3.13 on windows aarch64"
|
||||
runs-on: github-windows-11-aarch64-4
|
||||
runs-on: windows-11-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
|
|
@ -2057,6 +2245,28 @@ jobs:
|
|||
- name: "Validate global Python install"
|
||||
run: py -3.13 ./scripts/check_system_python.py --uv ./uv.exe
|
||||
|
||||
system-test-windows-aarch64-aarch64-python-313:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-windows-aarch64
|
||||
name: "check system | aarch64 python3.13 on windows aarch64"
|
||||
runs-on: windows-11-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
architecture: "arm64"
|
||||
allow-prereleases: true
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
|
||||
- name: "Validate global Python install"
|
||||
run: py -3.13-arm64 ./scripts/check_system_python.py --uv ./uv.exe
|
||||
|
||||
# Test our PEP 514 integration that installs Python into the Windows registry.
|
||||
system-test-windows-registry:
|
||||
timeout-minutes: 10
|
||||
|
|
@ -2259,6 +2469,9 @@ jobs:
|
|||
- name: "Print Python path"
|
||||
run: echo $(which python3)
|
||||
|
||||
- name: Install build tools
|
||||
run: yum install -y gcc
|
||||
|
||||
- name: "Validate global Python install"
|
||||
run: python3 scripts/check_system_python.py --uv ./uv
|
||||
|
||||
|
|
@ -2304,13 +2517,13 @@ jobs:
|
|||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
|
@ -2326,7 +2539,7 @@ jobs:
|
|||
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
|
@ -2341,13 +2554,13 @@ jobs:
|
|||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2.7.8
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
|
@ -2363,7 +2576,7 @@ jobs:
|
|||
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@0010eb0ca6e89b80c88e8edaaa07cfe5f3e6664d # v3.5.0
|
||||
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -22,14 +22,12 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv-*
|
||||
path: wheels_uv
|
||||
merge-multiple: true
|
||||
- name: Remove wheels unsupported by PyPI
|
||||
run: rm wheels_uv/*riscv*
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv/*
|
||||
|
||||
|
|
@ -43,13 +41,11 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv_build-*
|
||||
path: wheels_uv_build
|
||||
merge-multiple: true
|
||||
- name: Remove wheels unsupported by PyPI
|
||||
run: rm wheels_uv_build/*riscv*
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv_build/*
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ jobs:
|
|||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.4/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
|
|
|
|||
|
|
@ -1,13 +1,43 @@
|
|||
# Configures a drive for testing in CI.
|
||||
#
|
||||
# When using standard GitHub Actions runners, a `D:` drive is present and has
|
||||
# similar or better performance characteristics than a ReFS dev drive. Sometimes
|
||||
# using a larger runner is still more performant (e.g., when running the test
|
||||
# suite) and we need to create a dev drive. This script automatically configures
|
||||
# the appropriate drive.
|
||||
#
|
||||
# When using GitHub Actions' "larger runners", the `D:` drive is not present and
|
||||
# we create a DevDrive mount on `C:`. This is purported to be more performant
|
||||
# than an ReFS drive, though we did not see a change when we switched over.
|
||||
#
|
||||
# When using Depot runners, the underling infrastructure is EC2, which does not
|
||||
# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can
|
||||
# create a ReFS drive using `diskpart` and `format` directory. We cannot use a
|
||||
# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already,
|
||||
# so we must check if it's a Depot runner first, and we use `V:` as the target
|
||||
# instead.
|
||||
|
||||
# When not using a GitHub Actions "larger runner", the `D:` drive is present and
|
||||
# has similar or better performance characteristics than a ReFS dev drive.
|
||||
# Sometimes using a larger runner is still more performant (e.g., when running
|
||||
# the test suite) and we need to create a dev drive. This script automatically
|
||||
# configures the appropriate drive.
|
||||
|
||||
# Note we use `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
if (Test-Path "D:\") {
|
||||
if ($env:DEPOT_RUNNER -eq "1") {
|
||||
Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..."
|
||||
|
||||
# Create VHD and configure drive using diskpart
|
||||
$vhdPath = "C:\uv_dev_drive.vhdx"
|
||||
@"
|
||||
create vdisk file="$vhdPath" maximum=20480 type=expandable
|
||||
attach vdisk
|
||||
create partition primary
|
||||
active
|
||||
assign letter=V
|
||||
"@ | diskpart
|
||||
|
||||
# Format the drive as ReFS
|
||||
format V: /fs:ReFS /q /y
|
||||
$Drive = "V:"
|
||||
|
||||
Write-Output "Custom dev drive created at $Drive"
|
||||
} elseif (Test-Path "D:\") {
|
||||
# Note `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
Write-Output "Using existing drive at D:"
|
||||
$Drive = "D:"
|
||||
} else {
|
||||
|
|
@ -55,10 +85,8 @@ Write-Output `
|
|||
"DEV_DRIVE=$($Drive)" `
|
||||
"TMP=$($Tmp)" `
|
||||
"TEMP=$($Tmp)" `
|
||||
"UV_INTERNAL__TEST_DIR=$($Tmp)" `
|
||||
"RUSTUP_HOME=$($Drive)/.rustup" `
|
||||
"CARGO_HOME=$($Drive)/.cargo" `
|
||||
"UV_WORKSPACE=$($Drive)/uv" `
|
||||
"PATH=$($Drive)/.cargo/bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: astral-sh/setup-uv@f0ec1fc3b38f5e7cd731bb6ce540c5af426746bb # v6.1.0
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ repos:
|
|||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.33.1
|
||||
rev: v1.34.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.13
|
||||
rev: v0.12.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
3.8.20
|
||||
# The following are required for packse scenarios
|
||||
3.9.20
|
||||
3.9.18
|
||||
3.9.12
|
||||
# The following is needed for `==3.13` request tests
|
||||
3.13.0
|
||||
|
|
|
|||
533
CHANGELOG.md
533
CHANGELOG.md
|
|
@ -3,459 +3,230 @@
|
|||
<!-- prettier-ignore-start -->
|
||||
|
||||
|
||||
## 0.7.13
|
||||
|
||||
## 0.8.3
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b2
|
||||
- Add Python 3.13.5
|
||||
- Fix stability of `uuid.getnode` on 3.13
|
||||
- Add CPython 3.14.0rc1
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250612)
|
||||
for more details.
|
||||
See the [`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250723) for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Download versions in `uv python pin` if not found ([#13946](https://github.com/astral-sh/uv/pull/13946))
|
||||
- Use TTY detection to determine if SIGINT forwarding is enabled ([#13925](https://github.com/astral-sh/uv/pull/13925))
|
||||
- Avoid fetching an exact, cached Git commit, even if it isn't locked ([#13748](https://github.com/astral-sh/uv/pull/13748))
|
||||
- Add `zstd` and `deflate` to `Accept-Encoding` ([#13982](https://github.com/astral-sh/uv/pull/13982))
|
||||
- Build binaries for riscv64 ([#12688](https://github.com/astral-sh/uv/pull/12688))
|
||||
- Allow non-standard entrypoint names in `uv_build` ([#14867](https://github.com/astral-sh/uv/pull/14867))
|
||||
- Publish riscv64 wheels to PyPI ([#14852](https://github.com/astral-sh/uv/pull/14852))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check if relative URL is valid directory before treating as index ([#13917](https://github.com/astral-sh/uv/pull/13917))
|
||||
- Ignore Python discovery errors during `uv python pin` ([#13944](https://github.com/astral-sh/uv/pull/13944))
|
||||
- Do not allow `uv add --group ... --script` ([#13997](https://github.com/astral-sh/uv/pull/13997))
|
||||
|
||||
### Preview changes
|
||||
|
||||
- Build backend: Support namespace packages ([#13833](https://github.com/astral-sh/uv/pull/13833))
|
||||
- Avoid writing redacted credentials to tool receipt ([#14855](https://github.com/astral-sh/uv/pull/14855))
|
||||
- Respect `--with` versions over base environment versions ([#14863](https://github.com/astral-sh/uv/pull/14863))
|
||||
- Respect credentials from all defined indexes ([#14858](https://github.com/astral-sh/uv/pull/14858))
|
||||
- Fix missed stabilization of removal of registry entry during Python uninstall ([#14859](https://github.com/astral-sh/uv/pull/14859))
|
||||
- Improve concurrency safety of Python downloads into cache ([#14846](https://github.com/astral-sh/uv/pull/14846))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add 3.14 to the supported platform reference ([#13990](https://github.com/astral-sh/uv/pull/13990))
|
||||
- Add an `llms.txt` to uv ([#13929](https://github.com/astral-sh/uv/pull/13929))
|
||||
- Add supported macOS version to the platform reference ([#13993](https://github.com/astral-sh/uv/pull/13993))
|
||||
- Update platform support reference to include Python implementation list ([#13991](https://github.com/astral-sh/uv/pull/13991))
|
||||
- Update pytorch.md ([#13899](https://github.com/astral-sh/uv/pull/13899))
|
||||
- Update the CLI help and reference to include references to the Python bin directory ([#13978](https://github.com/astral-sh/uv/pull/13978))
|
||||
- Fix typos in `uv_build` reference documentation ([#14853](https://github.com/astral-sh/uv/pull/14853))
|
||||
- Move the "Cargo" install method further down in docs ([#14842](https://github.com/astral-sh/uv/pull/14842))
|
||||
|
||||
## 0.7.12
|
||||
## 0.8.2
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv python pin --rm` to remove `.python-version` pins ([#13860](https://github.com/astral-sh/uv/pull/13860))
|
||||
- Don't hint at versions removed by `excluded-newer` ([#13884](https://github.com/astral-sh/uv/pull/13884))
|
||||
- Add hint to use `tool.uv.environments` on resolution error ([#13455](https://github.com/astral-sh/uv/pull/13455))
|
||||
- Add hint to use `tool.uv.required-environments` on resolution error ([#13575](https://github.com/astral-sh/uv/pull/13575))
|
||||
- Improve `python pin` error messages ([#13862](https://github.com/astral-sh/uv/pull/13862))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Lock environments during `uv sync`, `uv add` and `uv remove` to prevent race conditions ([#13869](https://github.com/astral-sh/uv/pull/13869))
|
||||
- Add `--no-editable` to `uv export` for `pylock.toml` ([#13852](https://github.com/astral-sh/uv/pull/13852))
|
||||
|
||||
### Documentation
|
||||
|
||||
- List `.gitignore` in project init files ([#13855](https://github.com/astral-sh/uv/pull/13855))
|
||||
- Move the pip interface documentation into the concepts section ([#13841](https://github.com/astral-sh/uv/pull/13841))
|
||||
- Remove the configuration section in favor of concepts / reference ([#13842](https://github.com/astral-sh/uv/pull/13842))
|
||||
- Update Git and GitHub Actions docs to mention `gh auth login` ([#13850](https://github.com/astral-sh/uv/pull/13850))
|
||||
|
||||
### Preview
|
||||
|
||||
- Fix directory glob traversal fallback preventing exclusion of all files ([#13882](https://github.com/astral-sh/uv/pull/13882))
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b1
|
||||
- Add Python 3.13.4
|
||||
- Add Python 3.12.11
|
||||
- Add Python 3.11.13
|
||||
- Add Python 3.10.18
|
||||
- Add Python 3.9.23
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add Pyodide support ([#12731](https://github.com/astral-sh/uv/pull/12731))
|
||||
- Better error message for version specifier with missing operator ([#13803](https://github.com/astral-sh/uv/pull/13803))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Downgrade `reqwest` and `hyper-util` to resolve connection reset errors over IPv6 ([#13835](https://github.com/astral-sh/uv/pull/13835))
|
||||
- Prefer `uv`'s binary's version when checking if it's up to date ([#13840](https://github.com/astral-sh/uv/pull/13840))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "terminal driver" instead of "shell" in `SIGINT` docs ([#13787](https://github.com/astral-sh/uv/pull/13787))
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--show-extras` to `uv tool list` ([#13783](https://github.com/astral-sh/uv/pull/13783))
|
||||
- Add dynamically generated sysconfig replacement mappings ([#13441](https://github.com/astral-sh/uv/pull/13441))
|
||||
- Add data locations to install wheel logs ([#13797](https://github.com/astral-sh/uv/pull/13797))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid redaction of placeholder `git` username when using SSH authentication ([#13799](https://github.com/astral-sh/uv/pull/13799))
|
||||
- Propagate credentials to files on devpi indexes ending in `/+simple` ([#13743](https://github.com/astral-sh/uv/pull/13743))
|
||||
- Restore retention of credentials for direct URLs in `uv export` ([#13809](https://github.com/astral-sh/uv/pull/13809))
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Python
|
||||
|
||||
The changes reverted in [0.7.8](#078) have been restored.
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250529)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve obfuscation of credentials in URLs ([#13560](https://github.com/astral-sh/uv/pull/13560))
|
||||
- Allow running non-default Python implementations via `uvx` ([#13583](https://github.com/astral-sh/uv/pull/13583))
|
||||
- Add `uvw` as alias for `uv` without console window on Windows ([#11786](https://github.com/astral-sh/uv/pull/11786))
|
||||
- Allow discovery of x86-64 managed Python builds on macOS ([#13722](https://github.com/astral-sh/uv/pull/13722))
|
||||
- Differentiate between implicit vs explicit architecture requests ([#13723](https://github.com/astral-sh/uv/pull/13723))
|
||||
- Implement ordering for Python architectures to prefer native installations ([#13709](https://github.com/astral-sh/uv/pull/13709))
|
||||
- Only show the first match per platform (and architecture) by default in `uv python list` ([#13721](https://github.com/astral-sh/uv/pull/13721))
|
||||
- Write the path of the parent environment to an `extends-environment` key in the `pyvenv.cfg` file of an ephemeral environment ([#13598](https://github.com/astral-sh/uv/pull/13598))
|
||||
- Improve the error message when libc cannot be found, e.g., when using the distroless containers ([#13549](https://github.com/astral-sh/uv/pull/13549))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid rendering info log level ([#13642](https://github.com/astral-sh/uv/pull/13642))
|
||||
- Improve performance of `uv-python` crate's manylinux submodule ([#11131](https://github.com/astral-sh/uv/pull/11131))
|
||||
- Optimize `Version` display ([#13643](https://github.com/astral-sh/uv/pull/13643))
|
||||
- Reduce number of reference-checks for `uv cache clean` ([#13669](https://github.com/astral-sh/uv/pull/13669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reinstalling dependency group members with `--all-packages` ([#13678](https://github.com/astral-sh/uv/pull/13678))
|
||||
- Don't fail direct URL hash checking with dependency metadata ([#13736](https://github.com/astral-sh/uv/pull/13736))
|
||||
- Exit early on `self update` if global `--offline` is set ([#13663](https://github.com/astral-sh/uv/pull/13663))
|
||||
- Fix cases where the uv lock is incorrectly marked as out of date ([#13635](https://github.com/astral-sh/uv/pull/13635))
|
||||
- Include pre-release versions in `uv python install --reinstall` ([#13645](https://github.com/astral-sh/uv/pull/13645))
|
||||
- Set `LC_ALL=C` for git when checking git worktree ([#13637](https://github.com/astral-sh/uv/pull/13637))
|
||||
- Avoid rejecting Windows paths for remote Python download JSON targets ([#13625](https://github.com/astral-sh/uv/pull/13625))
|
||||
|
||||
### Preview
|
||||
|
||||
- Add `uv add --bounds` to configure version constraints ([#12946](https://github.com/astral-sh/uv/pull/12946))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation about Python versions to Tools concept page ([#7673](https://github.com/astral-sh/uv/pull/7673))
|
||||
- Add example of enabling Dependabot ([#13692](https://github.com/astral-sh/uv/pull/13692))
|
||||
- Fix `exclude-newer` date format for persistent configuration files ([#13706](https://github.com/astral-sh/uv/pull/13706))
|
||||
- Quote versions variables in GitLab documentation ([#13679](https://github.com/astral-sh/uv/pull/13679))
|
||||
- Update Dependabot support status ([#13690](https://github.com/astral-sh/uv/pull/13690))
|
||||
- Explicitly specify to add a new repo entry to the repos list item in the `.pre-commit-config.yaml` ([#10243](https://github.com/astral-sh/uv/pull/10243))
|
||||
- Add integration with marimo guide ([#13691](https://github.com/astral-sh/uv/pull/13691))
|
||||
- Add pronunciation to README ([#5336](https://github.com/astral-sh/uv/pull/5336))
|
||||
|
||||
## 0.7.8
|
||||
|
||||
### Python
|
||||
|
||||
We are reverting most of our Python changes from `uv 0.7.6` and `uv 0.7.7` due to
|
||||
a miscompilation that makes the Python interpreter behave incorrectly, resulting
|
||||
in spurious type-errors involving str. This issue seems to be isolated to
|
||||
x86_64 Linux, and affected at least Python 3.12, 3.13, and 3.14.
|
||||
|
||||
The following changes that were introduced in those versions of uv are temporarily
|
||||
being reverted while we test and deploy a proper fix for the miscompilation:
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See [the issue for details](https://github.com/astral-sh/uv/issues/13610).
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove misleading line in pin documentation ([#13611](https://github.com/astral-sh/uv/pull/13611))
|
||||
|
||||
## 0.7.7
|
||||
|
||||
### Python
|
||||
|
||||
- Work around third-party packages that (incorrectly) assume the interpreter is dynamically linking libpython
|
||||
- Allow the experimental JIT to be enabled at runtime on Python 3.13 and 3.14 on macOS on aarch64 aka Apple Silicon
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250521)
|
||||
for more details.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `uv version` lock and sync ([#13317](https://github.com/astral-sh/uv/pull/13317))
|
||||
- Fix references to `ldd` in diagnostics to correctly refer to `ld.so` ([#13552](https://github.com/astral-sh/uv/pull/13552))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Clarify adding SSH Git dependencies ([#13534](https://github.com/astral-sh/uv/pull/13534))
|
||||
|
||||
## 0.7.6
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- Add free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250517)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve compatibility of `VIRTUAL_ENV_PROMPT` value ([#13501](https://github.com/astral-sh/uv/pull/13501))
|
||||
- Bump MSRV to 1.85 and Edition 2024 ([#13516](https://github.com/astral-sh/uv/pull/13516))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect default extras in uv remove ([#13380](https://github.com/astral-sh/uv/pull/13380))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix PowerShell code blocks ([#13511](https://github.com/astral-sh/uv/pull/13511))
|
||||
|
||||
## 0.7.5
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support case-sensitive module discovery in the build backend ([#13468](https://github.com/astral-sh/uv/pull/13468))
|
||||
- Bump Simple cache bucket to v16 ([#13498](https://github.com/astral-sh/uv/pull/13498))
|
||||
- Don't error when the script is too short for the buffer ([#13488](https://github.com/astral-sh/uv/pull/13488))
|
||||
- Add missing word in "script not supported" error ([#13483](https://github.com/astral-sh/uv/pull/13483))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add more context to external errors ([#13351](https://github.com/astral-sh/uv/pull/13351))
|
||||
- Align indentation of long arguments ([#13394](https://github.com/astral-sh/uv/pull/13394))
|
||||
- Preserve order of dependencies which are sorted naively ([#13334](https://github.com/astral-sh/uv/pull/13334))
|
||||
- Align progress bars by largest name length ([#13266](https://github.com/astral-sh/uv/pull/13266))
|
||||
- Reinstall local packages in `uv add` ([#13462](https://github.com/astral-sh/uv/pull/13462))
|
||||
- Rename `--raw-sources` to `--raw` ([#13348](https://github.com/astral-sh/uv/pull/13348))
|
||||
- Show 'Downgraded' when `self update` is used to install an older version ([#13340](https://github.com/astral-sh/uv/pull/13340))
|
||||
- Suggest `uv self update` if required uv version is newer ([#13305](https://github.com/astral-sh/uv/pull/13305))
|
||||
- Add 3.14 beta images to uv Docker images ([#13390](https://github.com/astral-sh/uv/pull/13390))
|
||||
- Add comma after "i.e." in Conda environment error ([#13423](https://github.com/astral-sh/uv/pull/13423))
|
||||
- Be more precise in unpinned packages warning ([#13426](https://github.com/astral-sh/uv/pull/13426))
|
||||
- Fix detection of sorted dependencies when include-group is used ([#13354](https://github.com/astral-sh/uv/pull/13354))
|
||||
- Fix display of HTTP responses in trace logs for retry of errors ([#13339](https://github.com/astral-sh/uv/pull/13339))
|
||||
- Log skip reasons during Python installation key interpreter match checks ([#13472](https://github.com/astral-sh/uv/pull/13472))
|
||||
- Redact credentials when displaying URLs ([#13333](https://github.com/astral-sh/uv/pull/13333))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on `pylock.toml` dependency entries ([#13384](https://github.com/astral-sh/uv/pull/13384))
|
||||
- Avoid panics for cannot-be-a-base URLs ([#13406](https://github.com/astral-sh/uv/pull/13406))
|
||||
- Ensure cached realm credentials are applied if no password is found for index URL ([#13463](https://github.com/astral-sh/uv/pull/13463))
|
||||
- Fix `.tgz` parsing to respect true extension ([#13382](https://github.com/astral-sh/uv/pull/13382))
|
||||
- Fix double self-dependency ([#13366](https://github.com/astral-sh/uv/pull/13366))
|
||||
- Reject `pylock.toml` in `uv add -r` ([#13421](https://github.com/astral-sh/uv/pull/13421))
|
||||
- Retain dot-separated wheel tags during cache prune ([#13379](https://github.com/astral-sh/uv/pull/13379))
|
||||
- Retain trailing comments after PEP 723 metadata block ([#13460](https://github.com/astral-sh/uv/pull/13460))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "export" instead of "install" in `uv export` arguments ([#13430](https://github.com/astral-sh/uv/pull/13430))
|
||||
- Remove extra newline ([#13461](https://github.com/astral-sh/uv/pull/13461))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Normalize glob paths ([#13465](https://github.com/astral-sh/uv/pull/13465))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--dry-run` support to `uv self update` ([#9829](https://github.com/astral-sh/uv/pull/9829))
|
||||
- Add `--show-with` to `uv tool list` to list packages included by `--with` ([#13264](https://github.com/astral-sh/uv/pull/13264))
|
||||
- De-duplicate fetched index URLs ([#13205](https://github.com/astral-sh/uv/pull/13205))
|
||||
- Support more zip compression formats: bzip2, lzma, xz, zstd ([#13285](https://github.com/astral-sh/uv/pull/13285))
|
||||
- Add support for downloading GraalPy ([#13172](https://github.com/astral-sh/uv/pull/13172))
|
||||
- Improve error message when a virtual environment Python symlink is broken ([#12168](https://github.com/astral-sh/uv/pull/12168))
|
||||
- Use `fs_err` for paths in symlinking errors ([#13303](https://github.com/astral-sh/uv/pull/13303))
|
||||
- Minify and embed managed Python JSON at compile time ([#12967](https://github.com/astral-sh/uv/pull/12967))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Make preview default and add configuration docs ([#12804](https://github.com/astral-sh/uv/pull/12804))
|
||||
- Build backend: Allow escaping in globs ([#13313](https://github.com/astral-sh/uv/pull/13313))
|
||||
- Build backend: Make builds reproducible across operating systems ([#13171](https://github.com/astral-sh/uv/pull/13171))
|
||||
- Add derivation chains for dependency errors ([#14824](https://github.com/astral-sh/uv/pull/14824))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `python-downloads-json-url` option for `uv.toml` to configure custom Python installations via JSON URL ([#12974](https://github.com/astral-sh/uv/pull/12974))
|
||||
- Add `UV_INIT_BUILD_BACKEND` ([#14821](https://github.com/astral-sh/uv/pull/14821))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check nested IO errors for retries ([#13260](https://github.com/astral-sh/uv/pull/13260))
|
||||
- Accept `musllinux_1_0` as a valid platform tag ([#13289](https://github.com/astral-sh/uv/pull/13289))
|
||||
- Fix discovery of pre-release managed Python versions in range requests ([#13330](https://github.com/astral-sh/uv/pull/13330))
|
||||
- Respect locked script preferences in `uv run --with` ([#13283](https://github.com/astral-sh/uv/pull/13283))
|
||||
- Retry streaming downloads on broken pipe errors ([#13281](https://github.com/astral-sh/uv/pull/13281))
|
||||
- Treat already-installed base environment packages as preferences in `uv run --with` ([#13284](https://github.com/astral-sh/uv/pull/13284))
|
||||
- Avoid enumerating sources in errors for path Python requests ([#13335](https://github.com/astral-sh/uv/pull/13335))
|
||||
- Avoid re-creating virtual environment with `--no-sync` ([#13287](https://github.com/astral-sh/uv/pull/13287))
|
||||
- Avoid reading files in the environment bin that are not entrypoints ([#14830](https://github.com/astral-sh/uv/pull/14830))
|
||||
- Avoid removing empty directories when constructing virtual environments ([#14822](https://github.com/astral-sh/uv/pull/14822))
|
||||
- Preserve index URL priority order when writing to pyproject.toml ([#14831](https://github.com/astral-sh/uv/pull/14831))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Expose `tls_built_in_root_certs` for client ([#14816](https://github.com/astral-sh/uv/pull/14816))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove outdated description of index strategy ([#13326](https://github.com/astral-sh/uv/pull/13326))
|
||||
- Update "Viewing the version" docs ([#13241](https://github.com/astral-sh/uv/pull/13241))
|
||||
- Archive the 0.7.x changelog ([#14819](https://github.com/astral-sh/uv/pull/14819))
|
||||
|
||||
## 0.7.2
|
||||
## 0.8.1
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve trace log for retryable errors ([#13228](https://github.com/astral-sh/uv/pull/13228))
|
||||
- Use "error" instead of "warning" for self-update message ([#13229](https://github.com/astral-sh/uv/pull/13229))
|
||||
- Error when `uv version` is used with project-specific flags but no project is found ([#13203](https://github.com/astral-sh/uv/pull/13203))
|
||||
- Add support for `HF_TOKEN` ([#14797](https://github.com/astral-sh/uv/pull/14797))
|
||||
- Allow `--config-settings-package` to apply configuration settings at the package level ([#14573](https://github.com/astral-sh/uv/pull/14573))
|
||||
- Create (e.g.) `python3.13t` executables in `uv venv` ([#14764](https://github.com/astral-sh/uv/pull/14764))
|
||||
- Disallow writing symlinks outside the source distribution target directory ([#12259](https://github.com/astral-sh/uv/pull/12259))
|
||||
- Elide traceback when `python -m uv` in interrupted with Ctrl-C on Windows ([#14715](https://github.com/astral-sh/uv/pull/14715))
|
||||
- Match `--bounds` formatting for `uv_build` bounds in `uv init` ([#14731](https://github.com/astral-sh/uv/pull/14731))
|
||||
- Support `extras` and `dependency_groups` markers in PEP 508 grammar ([#14753](https://github.com/astral-sh/uv/pull/14753))
|
||||
- Support `extras` and `dependency_groups` markers on `uv pip install` and `uv pip sync` ([#14755](https://github.com/astral-sh/uv/pull/14755))
|
||||
- Add hint to use `uv self version` when `uv version` cannot find a project ([#14738](https://github.com/astral-sh/uv/pull/14738))
|
||||
- Improve error reporting when removing Python versions from the Windows registry ([#14722](https://github.com/astral-sh/uv/pull/14722))
|
||||
- Make warnings about masked `[tool.uv]` fields more precise ([#14325](https://github.com/astral-sh/uv/pull/14325))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Emit JSON output in `uv sync` with `--quiet` ([#14810](https://github.com/astral-sh/uv/pull/14810))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix incorrect virtual environment invalidation for pre-release Python versions ([#13234](https://github.com/astral-sh/uv/pull/13234))
|
||||
- Fix patching of `clang` in managed Python sysconfig ([#13237](https://github.com/astral-sh/uv/pull/13237))
|
||||
- Respect `--project` in `uv version` ([#13230](https://github.com/astral-sh/uv/pull/13230))
|
||||
- Allow removal of virtual environments with missing interpreters ([#14812](https://github.com/astral-sh/uv/pull/14812))
|
||||
- Apply `Cache-Control` overrides to response, not request headers ([#14736](https://github.com/astral-sh/uv/pull/14736))
|
||||
- Copy entry points into ephemeral environments to ensure layers are respected ([#14790](https://github.com/astral-sh/uv/pull/14790))
|
||||
- Workaround Jupyter Lab application directory discovery in ephemeral environments ([#14790](https://github.com/astral-sh/uv/pull/14790))
|
||||
- Enforce `requires-python` in `pylock.toml` ([#14787](https://github.com/astral-sh/uv/pull/14787))
|
||||
- Fix kebab casing of `README` variants in build backend ([#14762](https://github.com/astral-sh/uv/pull/14762))
|
||||
- Improve concurrency resilience of removing Python versions from the Windows registry ([#14717](https://github.com/astral-sh/uv/pull/14717))
|
||||
- Retry HTTP requests on invalid data errors ([#14703](https://github.com/astral-sh/uv/pull/14703))
|
||||
- Update virtual environment removal to delete `pyvenv.cfg` last ([#14808](https://github.com/astral-sh/uv/pull/14808))
|
||||
- Error on unknown fields in `dependency-metadata` ([#14801](https://github.com/astral-sh/uv/pull/14801))
|
||||
|
||||
## 0.7.1
|
||||
### Documentation
|
||||
|
||||
### Enhancement
|
||||
- Recommend installing `setup-uv` after `setup-python` in Github Actions integration guide ([#14741](https://github.com/astral-sh/uv/pull/14741))
|
||||
- Clarify which portions of `requires-python` behavior are consistent with pip ([#14752](https://github.com/astral-sh/uv/pull/14752))
|
||||
|
||||
- Add support for BLAKE2b-256 ([#13204](https://github.com/astral-sh/uv/pull/13204))
|
||||
## 0.8.0
|
||||
|
||||
### Bugfix
|
||||
Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.7.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes.
|
||||
|
||||
- Revert fix handling of authentication when encountering redirects ([#13215](https://github.com/astral-sh/uv/pull/13215))
|
||||
|
||||
## 0.7.0
|
||||
|
||||
This release contains various changes that improve correctness and user experience, but could break some workflows; many changes have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes.
|
||||
This release also includes the stabilization of a couple `uv python install` features, which have been available under preview since late last year.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Update `uv version` to display and update project versions ([#12349](https://github.com/astral-sh/uv/pull/12349))**
|
||||
- **Install Python executables into a directory on the `PATH` ([#14626](https://github.com/astral-sh/uv/pull/14626))**
|
||||
|
||||
Previously, `uv version` displayed uv's version. Now, `uv version` will display or update the project's version. This interface was [heavily requested](https://github.com/astral-sh/uv/issues/6298) and, after much consideration, we decided that transitioning the top-level command was the best option.
|
||||
`uv python install` now installs a versioned Python executable (e.g., `python3.13`) into a directory on the `PATH` (e.g., `~/.local/bin`) by default. This behavior has been available under the `--preview` flag since [Oct 2024](https://github.com/astral-sh/uv/pull/8458). This change should not be breaking unless it shadows a Python executable elsewhere on the `PATH`.
|
||||
|
||||
Here's a brief example:
|
||||
To install unversioned executables, i.e., `python3` and `python`, use the `--default` flag. The `--default` flag has also been in preview, but is not stabilized in this release.
|
||||
|
||||
```console
|
||||
$ uv init example
|
||||
Initialized project `example` at `./example`
|
||||
$ cd example
|
||||
$ uv version
|
||||
example 0.1.0
|
||||
$ uv version --bump major
|
||||
example 0.1.0 => 1.0.0
|
||||
$ uv version --short
|
||||
1.0.0
|
||||
```
|
||||
Note that these executables point to the base Python installation and only include the standard library. That means they will not include dependencies from your current project (use `uv run python` instead) and you cannot install packages into their environment (use `uvx --with <package> python` instead).
|
||||
|
||||
If used outside of a project, uv will fallback to showing its own version still:
|
||||
As with tool installation, the target directory respects common variables like `XDG_BIN_HOME` and can be overridden with a `UV_PYTHON_BIN_DIR` variable.
|
||||
|
||||
```console
|
||||
$ uv version
|
||||
warning: failed to read project: No `pyproject.toml` found in current directory or any parent directory
|
||||
running `uv self version` for compatibility with old `uv version` command.
|
||||
this fallback will be removed soon, pass `--preview` to make this an error.
|
||||
You can opt out of this behavior with `uv python install --no-bin` or `UV_PYTHON_INSTALL_BIN=0`.
|
||||
|
||||
uv 0.7.0 (4433f41c9 2025-04-29)
|
||||
```
|
||||
See the [documentation on installing Python executables](https://docs.astral.sh/uv/concepts/python-versions/#installing-python-executables) for more details.
|
||||
- **Register Python versions with the Windows Registry ([#14625](https://github.com/astral-sh/uv/pull/14625))**
|
||||
|
||||
As described in the warning, `--preview` can be used to error instead:
|
||||
`uv python install` now registers the installed Python version with the Windows Registry as specified by [PEP 514](https://peps.python.org/pep-0514/). This allows using uv installed Python versions via the `py` launcher. This behavior has been available under the `--preview` flag since [Jan 2025](https://github.com/astral-sh/uv/pull/10634). This change should not be breaking, as using the uv Python versions with `py` requires explicit opt in.
|
||||
|
||||
```console
|
||||
$ uv version --preview
|
||||
error: No `pyproject.toml` found in current directory or any parent directory
|
||||
```
|
||||
You can opt out of this behavior with `uv python install --no-registry` or `UV_PYTHON_INSTALL_REGISTRY=0`.
|
||||
- **Prompt before removing an existing directory in `uv venv` ([#14309](https://github.com/astral-sh/uv/pull/14309))**
|
||||
|
||||
The previous functionality of `uv version` was moved to `uv self version`.
|
||||
- **Avoid fallback to subsequent indexes on authentication failure ([#12805](https://github.com/astral-sh/uv/pull/12805))**
|
||||
Previously, `uv venv` would remove an existing virtual environment without confirmation. While this is consistent with the behavior of project commands (e.g., `uv sync`), it's surprising to users that are using imperative workflows (i.e., `uv pip`). Now, `uv venv` will prompt for confirmation before removing an existing virtual environment. **If not in an interactive context, uv will still remove the virtual environment for backwards compatibility. However, this behavior is likely to change in a future release.**
|
||||
|
||||
When using the `first-index` strategy (the default), uv will stop searching indexes for a package once it is found on a single index. Previously, uv considered a package as "missing" from an index during authentication failures, such as an HTTP 401 or HTTP 403 (normally, missing packages are represented by an HTTP 404). This behavior was motivated by unusual responses from some package indexes, but reduces the safety of uv's index strategy when authentication fails. Now, uv will consider an authentication failure as a stop-point when searching for a package across indexes. The `index.ignore-error-codes` option can be used to recover the existing behavior, e.g.:
|
||||
The behavior for other commands (e.g., `uv sync`) is unchanged.
|
||||
|
||||
You can opt out of this behavior by setting `UV_VENV_CLEAR=1` or passing the `--clear` flag.
|
||||
- **Validate that discovered interpreters meet the Python preference ([#7934](https://github.com/astral-sh/uv/pull/7934))**
|
||||
|
||||
uv allows opting out of its managed Python versions with the `--no-managed-python` and `python-preference` options.
|
||||
|
||||
Previously, uv would not enforce this option for Python interpreters discovered on the `PATH`. For example, if a symlink to a managed Python interpreter was created, uv would allow it to be used even if `--no-managed-python` was provided. Now, uv ignores Python interpreters that do not match the Python preference *unless* they are in an active virtual environment or are explicitly requested, e.g., with `--python /path/to/python3.13`.
|
||||
|
||||
Similarly, uv would previously not invalidate existing project environments if they did not match the Python preference. Now, uv will invalidate and recreate project environments when the Python preference changes.
|
||||
|
||||
You can opt out of this behavior by providing the explicit path to the Python interpreter providing `--managed-python` / `--no-managed-python` matching the interpreter you want.
|
||||
- **Install dependencies without build systems when they are `path` sources ([#14413](https://github.com/astral-sh/uv/pull/14413))**
|
||||
|
||||
When working on a project, uv uses the [presence of a build system](https://docs.astral.sh/uv/concepts/projects/config/#build-systems) to determine if it should be built and installed into the environment. However, when a project is a dependency of another project, it can be surprising for the dependency to be missing from the environment.
|
||||
|
||||
Previously, uv would not build and install dependencies with [`path` sources](https://docs.astral.sh/uv/concepts/projects/dependencies/#path) unless they declared a build system or set `tool.uv.package = true`. Now, dependencies with `path` sources are built and installed regardless of the presence of a build system. If a build system is not present, the `setuptools.build_meta:__legacy__ ` backend will be used (per [PEP 517](https://peps.python.org/pep-0517/#source-trees)).
|
||||
|
||||
You can opt out of this behavior by setting `package = false` in the source declaration, e.g.:
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
ignore-error-codes = [401, 403]
|
||||
[tool.uv.sources]
|
||||
foo = { path = "./foo", package = false }
|
||||
```
|
||||
|
||||
Since PyTorch's indexes always return a HTTP 403 for missing packages, uv special-cases indexes on the `pytorch.org` domain to ignore that error code by default.
|
||||
- **Require the command in `uvx <name>` to be available in the Python environment ([#11603](https://github.com/astral-sh/uv/pull/11603))**
|
||||
Or, by setting `tool.uv.package = false` in the dependent `pyproject.toml`.
|
||||
|
||||
Previously, `uvx` would attempt to execute a command even if it was not provided by a Python package. For example, if we presume `foo` is an empty Python package which provides no command, `uvx foo` would invoke the `foo` command on the `PATH` (if present). Now, uv will error early if the `foo` executable is not provided by the requested Python package. This check is not enforced when `--from` is used, so patterns like `uvx --from foo bash -c "..."` are still valid. uv also still allows `uvx foo` where the `foo` executable is provided by a dependency of `foo` instead of `foo` itself, as this is fairly common for packages which depend on a dedicated package for their command-line interface.
|
||||
- **Use index URL instead of package URL for keyring credential lookups ([#12651](https://github.com/astral-sh/uv/pull/12651))**
|
||||
See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
|
||||
- **Install dependencies without build systems when they are workspace members ([#14663](https://github.com/astral-sh/uv/pull/14663))**
|
||||
|
||||
When determining credentials for querying a package URL, uv previously sent the full URL to the `keyring` command. However, some keyring plugins expect to receive the *index URL* (which is usually a parent of the package URL). Now, uv requests credentials for the index URL instead. This behavior matches `pip`.
|
||||
- **Remove `--version` from subcommands ([#13108](https://github.com/astral-sh/uv/pull/13108))**
|
||||
As described above for dependencies with `path` sources, uv previously would not build and install workspace members that did not declare a build system. Now, uv will build and install workspace members that are a dependency of *another* workspace member regardless of the presence of a build system. The behavior is unchanged for workspace members that are not included in the `project.dependencies`, `project.optional-dependencies`, or `dependency-groups` tables of another workspace member.
|
||||
|
||||
Previously, uv allowed the `--version` flag on arbitrary subcommands, e.g., `uv run --version`. However, the `--version` flag is useful for other operations since uv is a package manager. Consequently, we've removed the `--version` flag from subcommands — it is only available as `uv --version`.
|
||||
- **Omit Python 3.7 downloads from managed versions ([#13022](https://github.com/astral-sh/uv/pull/13022))**
|
||||
You can opt out of this behavior by setting `tool.uv.package = false` in the workspace member's `pyproject.toml`.
|
||||
|
||||
Python 3.7 is EOL and not formally supported by uv; however, Python 3.7 was previously available for download on a subset of platforms.
|
||||
- **Reject non-PEP 751 TOML files in install, compile, and export commands ([#13120](https://github.com/astral-sh/uv/pull/13120), [#13119](https://github.com/astral-sh/uv/pull/13119))**
|
||||
See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
|
||||
- **Bump `--python-platform linux` to `manylinux_2_28` ([#14300](https://github.com/astral-sh/uv/pull/14300))**
|
||||
|
||||
Previously, uv treated arbitrary `.toml` files passed to commands (e.g., `uv pip install -r foo.toml` or `uv pip compile -o foo.toml`) as `requirements.txt`-formatted files. Now, uv will error instead. If using PEP 751 lockfiles, use the standardized format for custom names instead, e.g., `pylock.foo.toml`.
|
||||
- **Ignore arbitrary Python requests in version files ([#12909](https://github.com/astral-sh/uv/pull/12909))**
|
||||
uv allows performing [platform-specific resolution](https://docs.astral.sh/uv/concepts/resolution/#platform-specific-resolution) for explicit targets and provides short aliases, e.g., `linux`, for common targets.
|
||||
|
||||
uv allows arbitrary strings to be used for Python version requests, in which they are treated as an executable name to search for in the `PATH`. However, using this form of request in `.python-version` files is non-standard and conflicts with `pyenv-virtualenv` which writes environment names to `.python-version` files. In this release, uv will now ignore requests that are arbitrary strings when found in `.python-version` files.
|
||||
- **Error on unknown dependency object specifiers ([12811](https://github.com/astral-sh/uv/pull/12811))**
|
||||
Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2019 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330).
|
||||
|
||||
The `[dependency-groups]` entries can include "object specifiers", e.g. `set-phasers-to = ...` in:
|
||||
This change only affects users requesting a specific target platform. Otherwise, uv detects the `manylinux` target from your local glibc version.
|
||||
|
||||
```toml
|
||||
[dependency-groups]
|
||||
foo = ["pyparsing"]
|
||||
bar = [{set-phasers-to = "stun"}]
|
||||
```
|
||||
You can opt out of this behavior by using `--python-platform x86_64-manylinux_2_17` instead.
|
||||
- **Remove `uv version` fallback ([#14161](https://github.com/astral-sh/uv/pull/14161))**
|
||||
|
||||
However, the only current spec-compliant object specifier is `include-group`. Previously, uv would ignore unknown object specifiers. Now, uv will error.
|
||||
- **Make `--frozen` and `--no-sources` conflicting options ([#12671](https://github.com/astral-sh/uv/pull/12671))**
|
||||
In [Apr 2025](https://github.com/astral-sh/uv/pull/12349), uv changed the `uv version` command to an interface for viewing and updating the version of the current project. However, when outside a project, `uv version` would continue to display uv's version for backwards compatibility. Now, when used outside of a project, `uv version` will fail.
|
||||
|
||||
Using `--no-sources` always requires a new resolution and `--frozen` will always fail when used with it. Now, this conflict is encoded in the CLI options for clarity.
|
||||
- **Treat empty `UV_PYTHON_INSTALL_DIR` and `UV_TOOL_DIR` as unset ([#12907](https://github.com/astral-sh/uv/pull/12907), [#12905](https://github.com/astral-sh/uv/pull/12905))**
|
||||
You cannot opt out of this behavior. Use `uv self version` instead.
|
||||
- **Require `--global` for removal of the global Python pin ([#14169](https://github.com/astral-sh/uv/pull/14169))**
|
||||
|
||||
Previously, these variables were treated as set to the current working directory when set to an empty string. Now, uv will ignore these variables when empty. This matches uv's behavior for other environment variables which configure directories.
|
||||
Previously, `uv python pin --rm` would allow you to remove the global Python pin without opt in. Now, uv requires the `--global` flag to remove the global Python pin.
|
||||
|
||||
### Enhancements
|
||||
You cannot opt out of this behavior. Use the `--global` flag instead.
|
||||
- **Support conflicting editable settings across groups ([#14197](https://github.com/astral-sh/uv/pull/14197))**
|
||||
|
||||
- Disallow mixing requirements across PyTorch indexes ([#13179](https://github.com/astral-sh/uv/pull/13179))
|
||||
- Add optional managed Python archive download cache ([#12175](https://github.com/astral-sh/uv/pull/12175))
|
||||
- Add `poetry-core` as a `uv init` build backend option ([#12781](https://github.com/astral-sh/uv/pull/12781))
|
||||
- Show tag hints when failing to find a compatible wheel in `pylock.toml` ([#13136](https://github.com/astral-sh/uv/pull/13136))
|
||||
- Report Python versions in `pyvenv.cfg` version mismatch ([#13027](https://github.com/astral-sh/uv/pull/13027))
|
||||
Previously, uv would always treat a package as editable if any requirement requested it as editable. However, this prevented users from declaring `path` sources that toggled the `editable` setting across dependency groups. Now, uv allows declaring different `editable` values for conflicting groups. However, if a project includes a path dependency twice, once with `editable = true` and once without any editable annotation, those are now considered conflicting, and uv will exit with an error.
|
||||
|
||||
### Bug fixes
|
||||
You cannot opt out of this behavior. Use consistent `editable` settings or [mark groups as conflicting](https://docs.astral.sh/uv/concepts/projects/config/#conflicting-dependencies).
|
||||
- **Make `uv_build` the default build backend in `uv init` ([#14661](https://github.com/astral-sh/uv/pull/14661))**
|
||||
|
||||
- Avoid erroring on omitted wheel-only packages in `pylock.toml` ([#13132](https://github.com/astral-sh/uv/pull/13132))
|
||||
- Fix display name for `uvx --version` ([#13109](https://github.com/astral-sh/uv/pull/13109))
|
||||
- Restore handling of authentication when encountering redirects ([#13050](https://github.com/astral-sh/uv/pull/13050))
|
||||
- Respect build options (`--no-binary` et al) in `pylock.toml` ([#13134](https://github.com/astral-sh/uv/pull/13134))
|
||||
- Use `upload-time` rather than `upload_time` in `uv.lock` ([#13176](https://github.com/astral-sh/uv/pull/13176))
|
||||
The uv build backend (`uv_build`) was [stabilized in uv 0.7.19](https://github.com/astral-sh/uv/releases/tag/0.7.19). Now, it is the default build backend for `uv init --package` and `uv init --lib`. Previously, `hatchling` was the default build backend. A build backend is still not used without opt-in in `uv init`, but we expect to change this in a future release.
|
||||
|
||||
### Documentation
|
||||
You can opt out of this behavior with `uv init --build-backend hatchling`.
|
||||
- **Set default `UV_TOOL_BIN_DIR` on Docker images ([#13391](https://github.com/astral-sh/uv/pull/13391))**
|
||||
|
||||
- Changed `fish` completions append `>>` to overwrite `>` ([#13130](https://github.com/astral-sh/uv/pull/13130))
|
||||
- Add `pylock.toml` mentions where relevant ([#13115](https://github.com/astral-sh/uv/pull/13115))
|
||||
- Add ROCm example to the PyTorch guide ([#13200](https://github.com/astral-sh/uv/pull/13200))
|
||||
- Upgrade PyTorch guide to CUDA 12.8 and PyTorch 2.7 ([#13199](https://github.com/astral-sh/uv/pull/13199))
|
||||
Previously, `UV_TOOL_BIN_DIR` was not set in Docker images which meant that `uv tool install` did not install tools into a directory on the `PATH` without additional configuration. Now, `UV_TOOL_BIN_DIR` is set to `/usr/local/bin` in all Docker derived images.
|
||||
|
||||
When the default image user is overridden (e.g. `USER <UID>`) with a less privileged user, this may cause `uv tool install` to fail.
|
||||
|
||||
You can opt out of this behavior by setting an alternative `UV_TOOL_BIN_DIR`.
|
||||
- **Update `--check` to return an exit code of 1 ([#14167](https://github.com/astral-sh/uv/pull/14167))**
|
||||
|
||||
uv uses an exit code of 1 to indicate a "successful failure" and an exit code of 2 to indicate an "error".
|
||||
|
||||
Previously, `uv lock --check` and `uv sync --check` would exit with a code of 2 when the lockfile or environment were outdated. Now, uv will exit with a code of 1.
|
||||
|
||||
You cannot opt out of this behavior.
|
||||
- **Use an ephemeral environment for `uv run --with` invocations ([#14447](https://github.com/astral-sh/uv/pull/14447))**
|
||||
|
||||
When using `uv run --with`, uv layers the requirements requested using `--with` into another virtual environment and caches it. Previously, uv would invoke the Python interpreter in this layered environment. However, this allows poisoning the cached environment and introduces race conditions for concurrent invocations. Now, uv will layer *another* empty virtual environment on top of the cached environment and invoke the Python interpreter there. This should only cause breakage in cases where the environment is being inspected at runtime.
|
||||
|
||||
You cannot opt out of this behavior.
|
||||
- **Restructure the `uv venv` command output and exit codes ([#14546](https://github.com/astral-sh/uv/pull/14546))**
|
||||
|
||||
Previously, uv used `miette` to format the `uv venv` output. However, this was inconsistent with most of the uv CLI. Now, the output is a little different and the exit code has switched from 1 to 2 for some error cases.
|
||||
|
||||
You cannot opt out of this behavior.
|
||||
- **Default to `--workspace` when adding subdirectories ([#14529](https://github.com/astral-sh/uv/pull/14529))**
|
||||
|
||||
When using `uv add` to add a subdirectory in a workspace, uv now defaults to adding the target as a workspace member.
|
||||
|
||||
You can opt out of this behavior by providing `--no-workspace`.
|
||||
- **Add missing validations for disallowed `uv.toml` fields ([#14322](https://github.com/astral-sh/uv/pull/14322))**
|
||||
|
||||
uv does not allow some settings in the `uv.toml`. Previously, some settings were silently ignored when present in the `uv.toml`. Now, uv will error.
|
||||
|
||||
You cannot opt out of this behavior. Use `--no-config` or remove the invalid settings.
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add support for toggling Python bin and registry install options via env vars ([#14662](https://github.com/astral-sh/uv/pull/14662))
|
||||
|
||||
## 0.7.x
|
||||
|
||||
See [changelogs/0.7.x](./changelogs/0.7.x.md)
|
||||
|
||||
## 0.6.x
|
||||
|
||||
|
|
|
|||
|
|
@ -165,6 +165,13 @@ After making changes to the documentation, format the markdown files with:
|
|||
npx prettier --prose-wrap always --write "**/*.md"
|
||||
```
|
||||
|
||||
Note that the command above requires Node.js and npm to be installed on your system. As an
|
||||
alternative, you can run this command using Docker:
|
||||
|
||||
```console
|
||||
$ docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md"
|
||||
```
|
||||
|
||||
## Releases
|
||||
|
||||
Releases can only be performed by Astral team members.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
124
Cargo.toml
124
Cargo.toml
|
|
@ -12,7 +12,7 @@ resolver = "2"
|
|||
|
||||
[workspace.package]
|
||||
edition = "2024"
|
||||
rust-version = "1.85"
|
||||
rust-version = "1.86"
|
||||
homepage = "https://pypi.org/project/uv/"
|
||||
documentation = "https://pypi.org/project/uv/"
|
||||
repository = "https://github.com/astral-sh/uv"
|
||||
|
|
@ -75,12 +75,13 @@ uv-workspace = { path = "crates/uv-workspace" }
|
|||
anstream = { version = "0.6.15" }
|
||||
anyhow = { version = "1.0.89" }
|
||||
arcstr = { version = "1.2.0" }
|
||||
astral-tokio-tar = { version = "0.5.1" }
|
||||
arrayvec = { version = "0.7.6" }
|
||||
astral-tokio-tar = { version = "0.5.2" }
|
||||
async-channel = { version = "2.3.1" }
|
||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||
async-trait = { version = "0.1.82" }
|
||||
async_http_range_reader = { version = "0.9.1" }
|
||||
async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
axoupdater = { version = "0.9.0", default-features = false }
|
||||
backon = { version = "1.3.0" }
|
||||
base64 = { version = "0.22.1" }
|
||||
|
|
@ -92,7 +93,7 @@ cargo-util = { version = "0.2.14" }
|
|||
clap = { version = "4.5.17", features = ["derive", "env", "string", "wrap_help"] }
|
||||
clap_complete_command = { version = "0.6.1" }
|
||||
configparser = { version = "3.1.0" }
|
||||
console = { version = "0.15.11", default-features = false }
|
||||
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
||||
csv = { version = "1.3.0" }
|
||||
ctrlc = { version = "3.4.5" }
|
||||
dashmap = { version = "6.1.0" }
|
||||
|
|
@ -116,7 +117,7 @@ home = { version = "0.5.9" }
|
|||
html-escape = { version = "0.2.13" }
|
||||
http = { version = "1.1.0" }
|
||||
indexmap = { version = "2.5.0" }
|
||||
indicatif = { version = "0.17.8" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indoc = { version = "2.0.5" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0", features = ["serde"] }
|
||||
|
|
@ -142,7 +143,7 @@ ref-cast = { version = "1.0.24" }
|
|||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||
reqwest = { version = "=0.12.15", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
|
|
@ -151,7 +152,7 @@ rust-netrc = { version = "0.1.2" }
|
|||
rustc-hash = { version = "2.0.0" }
|
||||
rustix = { version = "1.0.0", default-features = false, features = ["fs", "std"] }
|
||||
same-file = { version = "1.0.6" }
|
||||
schemars = { version = "0.8.21", features = ["url"] }
|
||||
schemars = { version = "1.0.0", features = ["url2"] }
|
||||
seahash = { version = "4.1.0" }
|
||||
self-replace = { version = "1.5.0" }
|
||||
serde = { version = "1.0.210", features = ["derive", "rc"] }
|
||||
|
|
@ -171,8 +172,8 @@ tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101
|
|||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] }
|
||||
tokio-stream = { version = "0.1.16" }
|
||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||
toml = { version = "0.8.19" }
|
||||
toml_edit = { version = "0.22.21", features = ["serde"] }
|
||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||
toml_edit = { version = "0.23.2", features = ["serde"] }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-durations-export = { version = "0.3.0", features = ["plot"] }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry"] }
|
||||
|
|
@ -184,15 +185,32 @@ url = { version = "2.5.2", features = ["serde"] }
|
|||
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||
walkdir = { version = "2.5.0" }
|
||||
which = { version = "8.0.0", features = ["regex"] }
|
||||
windows = { version = "0.59.0", features = ["Win32_Storage_FileSystem"] }
|
||||
windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] }
|
||||
windows-core = { version = "0.59.0" }
|
||||
windows-registry = { version = "0.5.0" }
|
||||
windows-result = { version = "0.3.0" }
|
||||
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Ioctl", "Win32_System_IO", "Win32_System_Registry"] }
|
||||
wiremock = { git = "https://github.com/astral-sh/wiremock-rs", rev = "b79b69f62521df9f83a54e866432397562eae789" }
|
||||
wiremock = { version = "0.6.4" }
|
||||
xz2 = { version = "0.1.7" }
|
||||
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
||||
|
||||
# dev-dependencies
|
||||
assert_cmd = { version = "2.0.16" }
|
||||
assert_fs = { version = "1.1.2" }
|
||||
byteorder = { version = "1.5.0" }
|
||||
filetime = { version = "0.2.25" }
|
||||
http-body-util = { version = "0.1.2" }
|
||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||
hyper-util = { version = "0.1.8", features = ["tokio"] }
|
||||
ignore = { version = "0.4.23" }
|
||||
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
||||
predicates = { version = "3.1.2" }
|
||||
similar = { version = "2.6.0" }
|
||||
temp-env = { version = "0.3.6" }
|
||||
test-case = { version = "3.3.1" }
|
||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||
whoami = { version = "1.6.0" }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["flate2", "xz2"]
|
||||
|
||||
|
|
@ -214,6 +232,7 @@ missing_panics_doc = "allow"
|
|||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
struct_excessive_bools = "allow"
|
||||
too_many_arguments = "allow"
|
||||
too_many_lines = "allow"
|
||||
used_underscore_binding = "allow"
|
||||
|
|
@ -296,89 +315,6 @@ codegen-units = 1
|
|||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
# Config for 'dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.28.4"
|
||||
# make a package being included in our releases opt-in instead of opt-out
|
||||
dist = false
|
||||
# CI backends to support
|
||||
ci = "github"
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-pc-windows-msvc",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"arm-unknown-linux-musleabihf",
|
||||
"armv7-unknown-linux-gnueabihf",
|
||||
"armv7-unknown-linux-musleabihf",
|
||||
"i686-pc-windows-msvc",
|
||||
"i686-unknown-linux-gnu",
|
||||
"i686-unknown-linux-musl",
|
||||
"powerpc64-unknown-linux-gnu",
|
||||
"powerpc64le-unknown-linux-gnu",
|
||||
"riscv64gc-unknown-linux-gnu",
|
||||
"s390x-unknown-linux-gnu",
|
||||
"x86_64-apple-darwin",
|
||||
"x86_64-pc-windows-msvc",
|
||||
"x86_64-unknown-linux-gnu",
|
||||
"x86_64-unknown-linux-musl",
|
||||
]
|
||||
# Whether to auto-include files like READMEs, LICENSEs, and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Which actions to run on pull requests
|
||||
pr-run-mode = "plan"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Which phase dist should use to create the GitHub release
|
||||
github-release = "announce"
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries", "./build-docker"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi"]
|
||||
# Post-announce jobs to run in CI
|
||||
post-announce-jobs = ["./publish-docs"]
|
||||
# Custom permissions for GitHub Jobs
|
||||
github-custom-job-permissions = { "build-docker" = { packages = "write", contents = "read", id-token = "write", attestations = "write" } }
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
# Path that installers should place binaries in
|
||||
install-path = ["$XDG_BIN_HOME/", "$XDG_DATA_HOME/../bin", "~/.local/bin"]
|
||||
|
||||
[workspace.metadata.dist.github-custom-runners]
|
||||
global = "depot-ubuntu-latest-4"
|
||||
|
||||
[workspace.metadata.dist.min-glibc-version]
|
||||
# Override glibc version for specific target triplets.
|
||||
aarch64-unknown-linux-gnu = "2.28"
|
||||
riscv64gc-unknown-linux-gnu = "2.31"
|
||||
# Override all remaining glibc versions.
|
||||
"*" = "2.17"
|
||||
|
||||
[workspace.metadata.dist.github-action-commits]
|
||||
"actions/checkout" = "11bd71901bbe5b1630ceea73d27597364c9af683" # v4
|
||||
"actions/upload-artifact" = "6027e3dd177782cd8ab9af838c04fd81a07f1d47" # v4.6.2
|
||||
"actions/download-artifact" = "d3f86a106a0bac45b974a628896c90dbdf5c8093" # v4.3.0
|
||||
"actions/attest-build-provenance" = "c074443f1aee8d4aeeae555aebba3282517141b2" #v2.2.3
|
||||
|
||||
[patch.crates-io]
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
|
||||
[workspace.metadata.dist.binaries]
|
||||
"*" = ["uv", "uvx"]
|
||||
# Add "uvw" binary for Windows targets
|
||||
aarch64-pc-windows-msvc = ["uv", "uvx", "uvw"]
|
||||
i686-pc-windows-msvc = ["uv", "uvx", "uvw"]
|
||||
x86_64-pc-windows-msvc = ["uv", "uvx", "uvw"]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# Changelog 0.6.x
|
||||
|
||||
## 0.6.0
|
||||
|
||||
There have been 31 releases and 1135 pull requests since
|
||||
|
|
|
|||
|
|
@ -0,0 +1,995 @@
|
|||
# Changelog 0.7.x
|
||||
|
||||
## 0.7.0
|
||||
|
||||
This release contains various changes that improve correctness and user experience, but could break
|
||||
some workflows; many changes have been marked as breaking out of an abundance of caution. We expect
|
||||
most users to be able to upgrade without making changes.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Update `uv version` to display and update project versions
|
||||
([#12349](https://github.com/astral-sh/uv/pull/12349))**
|
||||
|
||||
Previously, `uv version` displayed uv's version. Now, `uv version` will display or update the
|
||||
project's version. This interface was
|
||||
[heavily requested](https://github.com/astral-sh/uv/issues/6298) and, after much consideration, we
|
||||
decided that transitioning the top-level command was the best option.
|
||||
|
||||
Here's a brief example:
|
||||
|
||||
```console
|
||||
$ uv init example
|
||||
Initialized project `example` at `./example`
|
||||
$ cd example
|
||||
$ uv version
|
||||
example 0.1.0
|
||||
$ uv version --bump major
|
||||
example 0.1.0 => 1.0.0
|
||||
$ uv version --short
|
||||
1.0.0
|
||||
```
|
||||
|
||||
If used outside of a project, uv will fallback to showing its own version still:
|
||||
|
||||
```console
|
||||
$ uv version
|
||||
warning: failed to read project: No `pyproject.toml` found in current directory or any parent directory
|
||||
running `uv self version` for compatibility with old `uv version` command.
|
||||
this fallback will be removed soon, pass `--preview` to make this an error.
|
||||
|
||||
uv 0.7.0 (4433f41c9 2025-04-29)
|
||||
```
|
||||
|
||||
As described in the warning, `--preview` can be used to error instead:
|
||||
|
||||
```console
|
||||
$ uv version --preview
|
||||
error: No `pyproject.toml` found in current directory or any parent directory
|
||||
```
|
||||
|
||||
The previous functionality of `uv version` was moved to `uv self version`.
|
||||
|
||||
- **Avoid fallback to subsequent indexes on authentication failure
|
||||
([#12805](https://github.com/astral-sh/uv/pull/12805))**
|
||||
|
||||
When using the `first-index` strategy (the default), uv will stop searching indexes for a package
|
||||
once it is found on a single index. Previously, uv considered a package as "missing" from an index
|
||||
during authentication failures, such as an HTTP 401 or HTTP 403 (normally, missing packages are
|
||||
represented by an HTTP 404). This behavior was motivated by unusual responses from some package
|
||||
indexes, but reduces the safety of uv's index strategy when authentication fails. Now, uv will
|
||||
consider an authentication failure as a stop-point when searching for a package across indexes.
|
||||
The `index.ignore-error-codes` option can be used to recover the existing behavior, e.g.:
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
ignore-error-codes = [401, 403]
|
||||
```
|
||||
|
||||
Since PyTorch's indexes always return a HTTP 403 for missing packages, uv special-cases indexes on
|
||||
the `pytorch.org` domain to ignore that error code by default.
|
||||
|
||||
- **Require the command in `uvx <name>` to be available in the Python environment
|
||||
([#11603](https://github.com/astral-sh/uv/pull/11603))**
|
||||
|
||||
Previously, `uvx` would attempt to execute a command even if it was not provided by a Python
|
||||
package. For example, if we presume `foo` is an empty Python package which provides no command,
|
||||
`uvx foo` would invoke the `foo` command on the `PATH` (if present). Now, uv will error early if
|
||||
the `foo` executable is not provided by the requested Python package. This check is not enforced
|
||||
when `--from` is used, so patterns like `uvx --from foo bash -c "..."` are still valid. uv also
|
||||
still allows `uvx foo` where the `foo` executable is provided by a dependency of `foo` instead of
|
||||
`foo` itself, as this is fairly common for packages which depend on a dedicated package for their
|
||||
command-line interface.
|
||||
|
||||
- **Use index URL instead of package URL for keyring credential lookups
|
||||
([#12651](https://github.com/astral-sh/uv/pull/12651))**
|
||||
|
||||
When determining credentials for querying a package URL, uv previously sent the full URL to the
|
||||
`keyring` command. However, some keyring plugins expect to receive the _index URL_ (which is
|
||||
usually a parent of the package URL). Now, uv requests credentials for the index URL instead. This
|
||||
behavior matches `pip`.
|
||||
|
||||
- **Remove `--version` from subcommands ([#13108](https://github.com/astral-sh/uv/pull/13108))**
|
||||
|
||||
Previously, uv allowed the `--version` flag on arbitrary subcommands, e.g., `uv run --version`.
|
||||
However, the `--version` flag is useful for other operations since uv is a package manager.
|
||||
Consequently, we've removed the `--version` flag from subcommands — it is only available as
|
||||
`uv --version`.
|
||||
|
||||
- **Omit Python 3.7 downloads from managed versions
|
||||
([#13022](https://github.com/astral-sh/uv/pull/13022))**
|
||||
|
||||
Python 3.7 is EOL and not formally supported by uv; however, Python 3.7 was previously available
|
||||
for download on a subset of platforms.
|
||||
|
||||
- **Reject non-PEP 751 TOML files in install, compile, and export commands
|
||||
([#13120](https://github.com/astral-sh/uv/pull/13120),
|
||||
[#13119](https://github.com/astral-sh/uv/pull/13119))**
|
||||
|
||||
Previously, uv treated arbitrary `.toml` files passed to commands (e.g.,
|
||||
`uv pip install -r foo.toml` or `uv pip compile -o foo.toml`) as `requirements.txt`-formatted
|
||||
files. Now, uv will error instead. If using PEP 751 lockfiles, use the standardized format for
|
||||
custom names instead, e.g., `pylock.foo.toml`.
|
||||
|
||||
- **Ignore arbitrary Python requests in version files
|
||||
([#12909](https://github.com/astral-sh/uv/pull/12909))**
|
||||
|
||||
uv allows arbitrary strings to be used for Python version requests, in which they are treated as
|
||||
an executable name to search for in the `PATH`. However, using this form of request in
|
||||
`.python-version` files is non-standard and conflicts with `pyenv-virtualenv` which writes
|
||||
environment names to `.python-version` files. In this release, uv will now ignore requests that
|
||||
are arbitrary strings when found in `.python-version` files.
|
||||
|
||||
- **Error on unknown dependency object specifiers
|
||||
([12811](https://github.com/astral-sh/uv/pull/12811))**
|
||||
|
||||
The `[dependency-groups]` entries can include "object specifiers", e.g. `set-phasers-to = ...` in:
|
||||
|
||||
```toml
|
||||
[dependency-groups]
|
||||
foo = ["pyparsing"]
|
||||
bar = [{set-phasers-to = "stun"}]
|
||||
```
|
||||
|
||||
However, the only current spec-compliant object specifier is `include-group`. Previously, uv would
|
||||
ignore unknown object specifiers. Now, uv will error.
|
||||
|
||||
- **Make `--frozen` and `--no-sources` conflicting options
|
||||
([#12671](https://github.com/astral-sh/uv/pull/12671))**
|
||||
|
||||
Using `--no-sources` always requires a new resolution and `--frozen` will always fail when used
|
||||
with it. Now, this conflict is encoded in the CLI options for clarity.
|
||||
|
||||
- **Treat empty `UV_PYTHON_INSTALL_DIR` and `UV_TOOL_DIR` as unset
|
||||
([#12907](https://github.com/astral-sh/uv/pull/12907),
|
||||
[#12905](https://github.com/astral-sh/uv/pull/12905))**
|
||||
|
||||
Previously, these variables were treated as set to the current working directory when set to an
|
||||
empty string. Now, uv will ignore these variables when empty. This matches uv's behavior for other
|
||||
environment variables which configure directories.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Disallow mixing requirements across PyTorch indexes
|
||||
([#13179](https://github.com/astral-sh/uv/pull/13179))
|
||||
- Add optional managed Python archive download cache
|
||||
([#12175](https://github.com/astral-sh/uv/pull/12175))
|
||||
- Add `poetry-core` as a `uv init` build backend option
|
||||
([#12781](https://github.com/astral-sh/uv/pull/12781))
|
||||
- Show tag hints when failing to find a compatible wheel in `pylock.toml`
|
||||
([#13136](https://github.com/astral-sh/uv/pull/13136))
|
||||
- Report Python versions in `pyvenv.cfg` version mismatch
|
||||
([#13027](https://github.com/astral-sh/uv/pull/13027))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on omitted wheel-only packages in `pylock.toml`
|
||||
([#13132](https://github.com/astral-sh/uv/pull/13132))
|
||||
- Fix display name for `uvx --version` ([#13109](https://github.com/astral-sh/uv/pull/13109))
|
||||
- Restore handling of authentication when encountering redirects
|
||||
([#13050](https://github.com/astral-sh/uv/pull/13050))
|
||||
- Respect build options (`--no-binary` et al) in `pylock.toml`
|
||||
([#13134](https://github.com/astral-sh/uv/pull/13134))
|
||||
- Use `upload-time` rather than `upload_time` in `uv.lock`
|
||||
([#13176](https://github.com/astral-sh/uv/pull/13176))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Changed `fish` completions append `>>` to overwrite `>`
|
||||
([#13130](https://github.com/astral-sh/uv/pull/13130))
|
||||
- Add `pylock.toml` mentions where relevant ([#13115](https://github.com/astral-sh/uv/pull/13115))
|
||||
- Add ROCm example to the PyTorch guide ([#13200](https://github.com/astral-sh/uv/pull/13200))
|
||||
- Upgrade PyTorch guide to CUDA 12.8 and PyTorch 2.7
|
||||
([#13199](https://github.com/astral-sh/uv/pull/13199))
|
||||
|
||||
## 0.7.1
|
||||
|
||||
### Enhancement
|
||||
|
||||
- Add support for BLAKE2b-256 ([#13204](https://github.com/astral-sh/uv/pull/13204))
|
||||
|
||||
### Bugfix
|
||||
|
||||
- Revert fix handling of authentication when encountering redirects
|
||||
([#13215](https://github.com/astral-sh/uv/pull/13215))
|
||||
|
||||
## 0.7.2
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve trace log for retryable errors ([#13228](https://github.com/astral-sh/uv/pull/13228))
|
||||
- Use "error" instead of "warning" for self-update message
|
||||
([#13229](https://github.com/astral-sh/uv/pull/13229))
|
||||
- Error when `uv version` is used with project-specific flags but no project is found
|
||||
([#13203](https://github.com/astral-sh/uv/pull/13203))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix incorrect virtual environment invalidation for pre-release Python versions
|
||||
([#13234](https://github.com/astral-sh/uv/pull/13234))
|
||||
- Fix patching of `clang` in managed Python sysconfig
|
||||
([#13237](https://github.com/astral-sh/uv/pull/13237))
|
||||
- Respect `--project` in `uv version` ([#13230](https://github.com/astral-sh/uv/pull/13230))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--dry-run` support to `uv self update` ([#9829](https://github.com/astral-sh/uv/pull/9829))
|
||||
- Add `--show-with` to `uv tool list` to list packages included by `--with`
|
||||
([#13264](https://github.com/astral-sh/uv/pull/13264))
|
||||
- De-duplicate fetched index URLs ([#13205](https://github.com/astral-sh/uv/pull/13205))
|
||||
- Support more zip compression formats: bzip2, lzma, xz, zstd
|
||||
([#13285](https://github.com/astral-sh/uv/pull/13285))
|
||||
- Add support for downloading GraalPy ([#13172](https://github.com/astral-sh/uv/pull/13172))
|
||||
- Improve error message when a virtual environment Python symlink is broken
|
||||
([#12168](https://github.com/astral-sh/uv/pull/12168))
|
||||
- Use `fs_err` for paths in symlinking errors ([#13303](https://github.com/astral-sh/uv/pull/13303))
|
||||
- Minify and embed managed Python JSON at compile time
|
||||
([#12967](https://github.com/astral-sh/uv/pull/12967))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Make preview default and add configuration docs
|
||||
([#12804](https://github.com/astral-sh/uv/pull/12804))
|
||||
- Build backend: Allow escaping in globs ([#13313](https://github.com/astral-sh/uv/pull/13313))
|
||||
- Build backend: Make builds reproducible across operating systems
|
||||
([#13171](https://github.com/astral-sh/uv/pull/13171))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `python-downloads-json-url` option for `uv.toml` to configure custom Python installations via
|
||||
JSON URL ([#12974](https://github.com/astral-sh/uv/pull/12974))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check nested IO errors for retries ([#13260](https://github.com/astral-sh/uv/pull/13260))
|
||||
- Accept `musllinux_1_0` as a valid platform tag
|
||||
([#13289](https://github.com/astral-sh/uv/pull/13289))
|
||||
- Fix discovery of pre-release managed Python versions in range requests
|
||||
([#13330](https://github.com/astral-sh/uv/pull/13330))
|
||||
- Respect locked script preferences in `uv run --with`
|
||||
([#13283](https://github.com/astral-sh/uv/pull/13283))
|
||||
- Retry streaming downloads on broken pipe errors
|
||||
([#13281](https://github.com/astral-sh/uv/pull/13281))
|
||||
- Treat already-installed base environment packages as preferences in `uv run --with`
|
||||
([#13284](https://github.com/astral-sh/uv/pull/13284))
|
||||
- Avoid enumerating sources in errors for path Python requests
|
||||
([#13335](https://github.com/astral-sh/uv/pull/13335))
|
||||
- Avoid re-creating virtual environment with `--no-sync`
|
||||
([#13287](https://github.com/astral-sh/uv/pull/13287))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove outdated description of index strategy
|
||||
([#13326](https://github.com/astral-sh/uv/pull/13326))
|
||||
- Update "Viewing the version" docs ([#13241](https://github.com/astral-sh/uv/pull/13241))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add more context to external errors ([#13351](https://github.com/astral-sh/uv/pull/13351))
|
||||
- Align indentation of long arguments ([#13394](https://github.com/astral-sh/uv/pull/13394))
|
||||
- Preserve order of dependencies which are sorted naively
|
||||
([#13334](https://github.com/astral-sh/uv/pull/13334))
|
||||
- Align progress bars by largest name length ([#13266](https://github.com/astral-sh/uv/pull/13266))
|
||||
- Reinstall local packages in `uv add` ([#13462](https://github.com/astral-sh/uv/pull/13462))
|
||||
- Rename `--raw-sources` to `--raw` ([#13348](https://github.com/astral-sh/uv/pull/13348))
|
||||
- Show 'Downgraded' when `self update` is used to install an older version
|
||||
([#13340](https://github.com/astral-sh/uv/pull/13340))
|
||||
- Suggest `uv self update` if required uv version is newer
|
||||
([#13305](https://github.com/astral-sh/uv/pull/13305))
|
||||
- Add 3.14 beta images to uv Docker images ([#13390](https://github.com/astral-sh/uv/pull/13390))
|
||||
- Add comma after "i.e." in Conda environment error
|
||||
([#13423](https://github.com/astral-sh/uv/pull/13423))
|
||||
- Be more precise in unpinned packages warning
|
||||
([#13426](https://github.com/astral-sh/uv/pull/13426))
|
||||
- Fix detection of sorted dependencies when include-group is used
|
||||
([#13354](https://github.com/astral-sh/uv/pull/13354))
|
||||
- Fix display of HTTP responses in trace logs for retry of errors
|
||||
([#13339](https://github.com/astral-sh/uv/pull/13339))
|
||||
- Log skip reasons during Python installation key interpreter match checks
|
||||
([#13472](https://github.com/astral-sh/uv/pull/13472))
|
||||
- Redact credentials when displaying URLs ([#13333](https://github.com/astral-sh/uv/pull/13333))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on `pylock.toml` dependency entries
|
||||
([#13384](https://github.com/astral-sh/uv/pull/13384))
|
||||
- Avoid panics for cannot-be-a-base URLs ([#13406](https://github.com/astral-sh/uv/pull/13406))
|
||||
- Ensure cached realm credentials are applied if no password is found for index URL
|
||||
([#13463](https://github.com/astral-sh/uv/pull/13463))
|
||||
- Fix `.tgz` parsing to respect true extension
|
||||
([#13382](https://github.com/astral-sh/uv/pull/13382))
|
||||
- Fix double self-dependency ([#13366](https://github.com/astral-sh/uv/pull/13366))
|
||||
- Reject `pylock.toml` in `uv add -r` ([#13421](https://github.com/astral-sh/uv/pull/13421))
|
||||
- Retain dot-separated wheel tags during cache prune
|
||||
([#13379](https://github.com/astral-sh/uv/pull/13379))
|
||||
- Retain trailing comments after PEP 723 metadata block
|
||||
([#13460](https://github.com/astral-sh/uv/pull/13460))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "export" instead of "install" in `uv export` arguments
|
||||
([#13430](https://github.com/astral-sh/uv/pull/13430))
|
||||
- Remove extra newline ([#13461](https://github.com/astral-sh/uv/pull/13461))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Normalize glob paths ([#13465](https://github.com/astral-sh/uv/pull/13465))
|
||||
|
||||
## 0.7.5
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support case-sensitive module discovery in the build backend
|
||||
([#13468](https://github.com/astral-sh/uv/pull/13468))
|
||||
- Bump Simple cache bucket to v16 ([#13498](https://github.com/astral-sh/uv/pull/13498))
|
||||
- Don't error when the script is too short for the buffer
|
||||
([#13488](https://github.com/astral-sh/uv/pull/13488))
|
||||
- Add missing word in "script not supported" error
|
||||
([#13483](https://github.com/astral-sh/uv/pull/13483))
|
||||
|
||||
## 0.7.6
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- Add free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250517)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve compatibility of `VIRTUAL_ENV_PROMPT` value
|
||||
([#13501](https://github.com/astral-sh/uv/pull/13501))
|
||||
- Bump MSRV to 1.85 and Edition 2024 ([#13516](https://github.com/astral-sh/uv/pull/13516))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect default extras in uv remove ([#13380](https://github.com/astral-sh/uv/pull/13380))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix PowerShell code blocks ([#13511](https://github.com/astral-sh/uv/pull/13511))
|
||||
|
||||
## 0.7.7
|
||||
|
||||
### Python
|
||||
|
||||
- Work around third-party packages that (incorrectly) assume the interpreter is dynamically linking
|
||||
libpython
|
||||
- Allow the experimental JIT to be enabled at runtime on Python 3.13 and 3.14 on macOS on aarch64
|
||||
aka Apple Silicon
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250521)
|
||||
for more details.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `uv version` lock and sync ([#13317](https://github.com/astral-sh/uv/pull/13317))
|
||||
- Fix references to `ldd` in diagnostics to correctly refer to `ld.so`
|
||||
([#13552](https://github.com/astral-sh/uv/pull/13552))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Clarify adding SSH Git dependencies ([#13534](https://github.com/astral-sh/uv/pull/13534))
|
||||
|
||||
## 0.7.8
|
||||
|
||||
### Python
|
||||
|
||||
We are reverting most of our Python changes from `uv 0.7.6` and `uv 0.7.7` due to a miscompilation
|
||||
that makes the Python interpreter behave incorrectly, resulting in spurious type-errors involving
|
||||
str. This issue seems to be isolated to x86_64 Linux, and affected at least Python 3.12, 3.13, and
|
||||
3.14.
|
||||
|
||||
The following changes that were introduced in those versions of uv are temporarily being reverted
|
||||
while we test and deploy a proper fix for the miscompilation:
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See [the issue for details](https://github.com/astral-sh/uv/issues/13610).
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove misleading line in pin documentation ([#13611](https://github.com/astral-sh/uv/pull/13611))
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Python
|
||||
|
||||
The changes reverted in [0.7.8](#078) have been restored.
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250529)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve obfuscation of credentials in URLs ([#13560](https://github.com/astral-sh/uv/pull/13560))
|
||||
- Allow running non-default Python implementations via `uvx`
|
||||
([#13583](https://github.com/astral-sh/uv/pull/13583))
|
||||
- Add `uvw` as alias for `uv` without console window on Windows
|
||||
([#11786](https://github.com/astral-sh/uv/pull/11786))
|
||||
- Allow discovery of x86-64 managed Python builds on macOS
|
||||
([#13722](https://github.com/astral-sh/uv/pull/13722))
|
||||
- Differentiate between implicit vs explicit architecture requests
|
||||
([#13723](https://github.com/astral-sh/uv/pull/13723))
|
||||
- Implement ordering for Python architectures to prefer native installations
|
||||
([#13709](https://github.com/astral-sh/uv/pull/13709))
|
||||
- Only show the first match per platform (and architecture) by default in `uv python list`
|
||||
([#13721](https://github.com/astral-sh/uv/pull/13721))
|
||||
- Write the path of the parent environment to an `extends-environment` key in the `pyvenv.cfg` file
|
||||
of an ephemeral environment ([#13598](https://github.com/astral-sh/uv/pull/13598))
|
||||
- Improve the error message when libc cannot be found, e.g., when using the distroless containers
|
||||
([#13549](https://github.com/astral-sh/uv/pull/13549))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid rendering info log level ([#13642](https://github.com/astral-sh/uv/pull/13642))
|
||||
- Improve performance of `uv-python` crate's manylinux submodule
|
||||
([#11131](https://github.com/astral-sh/uv/pull/11131))
|
||||
- Optimize `Version` display ([#13643](https://github.com/astral-sh/uv/pull/13643))
|
||||
- Reduce number of reference-checks for `uv cache clean`
|
||||
([#13669](https://github.com/astral-sh/uv/pull/13669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reinstalling dependency group members with `--all-packages`
|
||||
([#13678](https://github.com/astral-sh/uv/pull/13678))
|
||||
- Don't fail direct URL hash checking with dependency metadata
|
||||
([#13736](https://github.com/astral-sh/uv/pull/13736))
|
||||
- Exit early on `self update` if global `--offline` is set
|
||||
([#13663](https://github.com/astral-sh/uv/pull/13663))
|
||||
- Fix cases where the uv lock is incorrectly marked as out of date
|
||||
([#13635](https://github.com/astral-sh/uv/pull/13635))
|
||||
- Include pre-release versions in `uv python install --reinstall`
|
||||
([#13645](https://github.com/astral-sh/uv/pull/13645))
|
||||
- Set `LC_ALL=C` for git when checking git worktree
|
||||
([#13637](https://github.com/astral-sh/uv/pull/13637))
|
||||
- Avoid rejecting Windows paths for remote Python download JSON targets
|
||||
([#13625](https://github.com/astral-sh/uv/pull/13625))
|
||||
|
||||
### Preview
|
||||
|
||||
- Add `uv add --bounds` to configure version constraints
|
||||
([#12946](https://github.com/astral-sh/uv/pull/12946))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation about Python versions to Tools concept page
|
||||
([#7673](https://github.com/astral-sh/uv/pull/7673))
|
||||
- Add example of enabling Dependabot ([#13692](https://github.com/astral-sh/uv/pull/13692))
|
||||
- Fix `exclude-newer` date format for persistent configuration files
|
||||
([#13706](https://github.com/astral-sh/uv/pull/13706))
|
||||
- Quote versions variables in GitLab documentation
|
||||
([#13679](https://github.com/astral-sh/uv/pull/13679))
|
||||
- Update Dependabot support status ([#13690](https://github.com/astral-sh/uv/pull/13690))
|
||||
- Explicitly specify to add a new repo entry to the repos list item in the `.pre-commit-config.yaml`
|
||||
([#10243](https://github.com/astral-sh/uv/pull/10243))
|
||||
- Add integration with marimo guide ([#13691](https://github.com/astral-sh/uv/pull/13691))
|
||||
- Add pronunciation to README ([#5336](https://github.com/astral-sh/uv/pull/5336))
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--show-extras` to `uv tool list` ([#13783](https://github.com/astral-sh/uv/pull/13783))
|
||||
- Add dynamically generated sysconfig replacement mappings
|
||||
([#13441](https://github.com/astral-sh/uv/pull/13441))
|
||||
- Add data locations to install wheel logs ([#13797](https://github.com/astral-sh/uv/pull/13797))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid redaction of placeholder `git` username when using SSH authentication
|
||||
([#13799](https://github.com/astral-sh/uv/pull/13799))
|
||||
- Propagate credentials to files on devpi indexes ending in `/+simple`
|
||||
([#13743](https://github.com/astral-sh/uv/pull/13743))
|
||||
- Restore retention of credentials for direct URLs in `uv export`
|
||||
([#13809](https://github.com/astral-sh/uv/pull/13809))
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b1
|
||||
- Add Python 3.13.4
|
||||
- Add Python 3.12.11
|
||||
- Add Python 3.11.13
|
||||
- Add Python 3.10.18
|
||||
- Add Python 3.9.23
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add Pyodide support ([#12731](https://github.com/astral-sh/uv/pull/12731))
|
||||
- Better error message for version specifier with missing operator
|
||||
([#13803](https://github.com/astral-sh/uv/pull/13803))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Downgrade `reqwest` and `hyper-util` to resolve connection reset errors over IPv6
|
||||
([#13835](https://github.com/astral-sh/uv/pull/13835))
|
||||
- Prefer `uv`'s binary's version when checking if it's up to date
|
||||
([#13840](https://github.com/astral-sh/uv/pull/13840))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "terminal driver" instead of "shell" in `SIGINT` docs
|
||||
([#13787](https://github.com/astral-sh/uv/pull/13787))
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv python pin --rm` to remove `.python-version` pins
|
||||
([#13860](https://github.com/astral-sh/uv/pull/13860))
|
||||
- Don't hint at versions removed by `excluded-newer`
|
||||
([#13884](https://github.com/astral-sh/uv/pull/13884))
|
||||
- Add hint to use `tool.uv.environments` on resolution error
|
||||
([#13455](https://github.com/astral-sh/uv/pull/13455))
|
||||
- Add hint to use `tool.uv.required-environments` on resolution error
|
||||
([#13575](https://github.com/astral-sh/uv/pull/13575))
|
||||
- Improve `python pin` error messages ([#13862](https://github.com/astral-sh/uv/pull/13862))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Lock environments during `uv sync`, `uv add` and `uv remove` to prevent race conditions
|
||||
([#13869](https://github.com/astral-sh/uv/pull/13869))
|
||||
- Add `--no-editable` to `uv export` for `pylock.toml`
|
||||
([#13852](https://github.com/astral-sh/uv/pull/13852))
|
||||
|
||||
### Documentation
|
||||
|
||||
- List `.gitignore` in project init files ([#13855](https://github.com/astral-sh/uv/pull/13855))
|
||||
- Move the pip interface documentation into the concepts section
|
||||
([#13841](https://github.com/astral-sh/uv/pull/13841))
|
||||
- Remove the configuration section in favor of concepts / reference
|
||||
([#13842](https://github.com/astral-sh/uv/pull/13842))
|
||||
- Update Git and GitHub Actions docs to mention `gh auth login`
|
||||
([#13850](https://github.com/astral-sh/uv/pull/13850))
|
||||
|
||||
### Preview
|
||||
|
||||
- Fix directory glob traversal fallback preventing exclusion of all files
|
||||
([#13882](https://github.com/astral-sh/uv/pull/13882))
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b2
|
||||
- Add Python 3.13.5
|
||||
- Fix stability of `uuid.getnode` on 3.13
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250612)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Download versions in `uv python pin` if not found
|
||||
([#13946](https://github.com/astral-sh/uv/pull/13946))
|
||||
- Use TTY detection to determine if SIGINT forwarding is enabled
|
||||
([#13925](https://github.com/astral-sh/uv/pull/13925))
|
||||
- Avoid fetching an exact, cached Git commit, even if it isn't locked
|
||||
([#13748](https://github.com/astral-sh/uv/pull/13748))
|
||||
- Add `zstd` and `deflate` to `Accept-Encoding`
|
||||
([#13982](https://github.com/astral-sh/uv/pull/13982))
|
||||
- Build binaries for riscv64 ([#12688](https://github.com/astral-sh/uv/pull/12688))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check if relative URL is valid directory before treating as index
|
||||
([#13917](https://github.com/astral-sh/uv/pull/13917))
|
||||
- Ignore Python discovery errors during `uv python pin`
|
||||
([#13944](https://github.com/astral-sh/uv/pull/13944))
|
||||
- Do not allow `uv add --group ... --script` ([#13997](https://github.com/astral-sh/uv/pull/13997))
|
||||
|
||||
### Preview changes
|
||||
|
||||
- Build backend: Support namespace packages ([#13833](https://github.com/astral-sh/uv/pull/13833))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add 3.14 to the supported platform reference
|
||||
([#13990](https://github.com/astral-sh/uv/pull/13990))
|
||||
- Add an `llms.txt` to uv ([#13929](https://github.com/astral-sh/uv/pull/13929))
|
||||
- Add supported macOS version to the platform reference
|
||||
([#13993](https://github.com/astral-sh/uv/pull/13993))
|
||||
- Update platform support reference to include Python implementation list
|
||||
([#13991](https://github.com/astral-sh/uv/pull/13991))
|
||||
- Update pytorch.md ([#13899](https://github.com/astral-sh/uv/pull/13899))
|
||||
- Update the CLI help and reference to include references to the Python bin directory
|
||||
([#13978](https://github.com/astral-sh/uv/pull/13978))
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add XPU to `--torch-backend` ([#14172](https://github.com/astral-sh/uv/pull/14172))
|
||||
- Add ROCm backends to `--torch-backend` ([#14120](https://github.com/astral-sh/uv/pull/14120))
|
||||
- Remove preview label from `--torch-backend` ([#14119](https://github.com/astral-sh/uv/pull/14119))
|
||||
- Add `[tool.uv.dependency-groups].mygroup.requires-python`
|
||||
([#13735](https://github.com/astral-sh/uv/pull/13735))
|
||||
- Add auto-detection for AMD GPUs ([#14176](https://github.com/astral-sh/uv/pull/14176))
|
||||
- Show retries for HTTP status code errors ([#13897](https://github.com/astral-sh/uv/pull/13897))
|
||||
- Support transparent Python patch version upgrades
|
||||
([#13954](https://github.com/astral-sh/uv/pull/13954))
|
||||
- Warn on empty index directory ([#13940](https://github.com/astral-sh/uv/pull/13940))
|
||||
- Publish to DockerHub ([#14088](https://github.com/astral-sh/uv/pull/14088))
|
||||
|
||||
### Performance
|
||||
|
||||
- Make cold resolves about 10% faster ([#14035](https://github.com/astral-sh/uv/pull/14035))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Don't use walrus operator in interpreter query script
|
||||
([#14108](https://github.com/astral-sh/uv/pull/14108))
|
||||
- Fix handling of changes to `requires-python`
|
||||
([#14076](https://github.com/astral-sh/uv/pull/14076))
|
||||
- Fix implied `platform_machine` marker for `win_amd64` platform tag
|
||||
([#14041](https://github.com/astral-sh/uv/pull/14041))
|
||||
- Only update existing symlink directories on preview uninstall
|
||||
([#14179](https://github.com/astral-sh/uv/pull/14179))
|
||||
- Serialize Python requests for tools as canonicalized strings
|
||||
([#14109](https://github.com/astral-sh/uv/pull/14109))
|
||||
- Support netrc and same-origin credential propagation on index redirects
|
||||
([#14126](https://github.com/astral-sh/uv/pull/14126))
|
||||
- Support reading `dependency-groups` from pyproject.tomls with no `[project]`
|
||||
([#13742](https://github.com/astral-sh/uv/pull/13742))
|
||||
- Handle an existing shebang in `uv init --script`
|
||||
([#14141](https://github.com/astral-sh/uv/pull/14141))
|
||||
- Prevent concurrent updates of the environment in `uv run`
|
||||
([#14153](https://github.com/astral-sh/uv/pull/14153))
|
||||
- Filter managed Python distributions by platform before querying when included in request
|
||||
([#13936](https://github.com/astral-sh/uv/pull/13936))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Replace cuda124 with cuda128 ([#14168](https://github.com/astral-sh/uv/pull/14168))
|
||||
- Document the way member sources shadow workspace sources
|
||||
([#14136](https://github.com/astral-sh/uv/pull/14136))
|
||||
- Sync documented PyTorch integration index for CUDA and ROCm versions from PyTorch website
|
||||
([#14100](https://github.com/astral-sh/uv/pull/14100))
|
||||
|
||||
## 0.7.15
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Consistently use `Ordering::Relaxed` for standalone atomic use cases
|
||||
([#14190](https://github.com/astral-sh/uv/pull/14190))
|
||||
- Warn on ambiguous relative paths for `--index`
|
||||
([#14152](https://github.com/astral-sh/uv/pull/14152))
|
||||
- Skip GitHub fast path when rate-limited ([#13033](https://github.com/astral-sh/uv/pull/13033))
|
||||
- Preserve newlines in `schema.json` descriptions
|
||||
([#13693](https://github.com/astral-sh/uv/pull/13693))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add check for using minor version link when creating a venv on Windows
|
||||
([#14252](https://github.com/astral-sh/uv/pull/14252))
|
||||
- Strip query parameters when parsing source URL
|
||||
([#14224](https://github.com/astral-sh/uv/pull/14224))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a link to PyPI FAQ to clarify what per-project token is
|
||||
([#14242](https://github.com/astral-sh/uv/pull/14242))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Allow symlinks in the build backend ([#14212](https://github.com/astral-sh/uv/pull/14212))
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b3
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250626)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Include path or URL when failing to convert in lockfile
|
||||
([#14292](https://github.com/astral-sh/uv/pull/14292))
|
||||
- Warn when `~=` is used as a Python version specifier without a patch version
|
||||
([#14008](https://github.com/astral-sh/uv/pull/14008))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ensure preview default Python installs are upgradeable
|
||||
([#14261](https://github.com/astral-sh/uv/pull/14261))
|
||||
|
||||
### Performance
|
||||
|
||||
- Share workspace cache between lock and sync operations
|
||||
([#14321](https://github.com/astral-sh/uv/pull/14321))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow local indexes to reference remote files
|
||||
([#14294](https://github.com/astral-sh/uv/pull/14294))
|
||||
- Avoid rendering desugared prefix matches in error messages
|
||||
([#14195](https://github.com/astral-sh/uv/pull/14195))
|
||||
- Avoid using path URL for workspace Git dependencies in `requirements.txt`
|
||||
([#14288](https://github.com/astral-sh/uv/pull/14288))
|
||||
- Normalize index URLs to remove trailing slash
|
||||
([#14245](https://github.com/astral-sh/uv/pull/14245))
|
||||
- Respect URL-encoded credentials in redirect location
|
||||
([#14315](https://github.com/astral-sh/uv/pull/14315))
|
||||
- Lock the source tree when running setuptools, to protect concurrent builds
|
||||
([#14174](https://github.com/astral-sh/uv/pull/14174))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Note that GCP Artifact Registry download URLs must have `/simple` component
|
||||
([#14251](https://github.com/astral-sh/uv/pull/14251))
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply build constraints when resolving `--with` dependencies
|
||||
([#14340](https://github.com/astral-sh/uv/pull/14340))
|
||||
- Drop trailing slashes when converting index URL from URL
|
||||
([#14346](https://github.com/astral-sh/uv/pull/14346))
|
||||
- Ignore `UV_PYTHON_CACHE_DIR` when empty ([#14336](https://github.com/astral-sh/uv/pull/14336))
|
||||
- Fix error message ordering for `pyvenv.cfg` version conflict
|
||||
([#14329](https://github.com/astral-sh/uv/pull/14329))
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Python
|
||||
|
||||
- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14 These are not downloaded by default, since
|
||||
x86-64 Python has broader ecosystem support on Windows. However, they can be requested with
|
||||
`cpython-<version>-windows-aarch64`.
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry`
|
||||
([#14378](https://github.com/astral-sh/uv/pull/14378))
|
||||
- Reuse build (virtual) environments across resolution and installation
|
||||
([#14338](https://github.com/astral-sh/uv/pull/14338))
|
||||
- Improve trace message for cached Python interpreter query
|
||||
([#14328](https://github.com/astral-sh/uv/pull/14328))
|
||||
- Use parsed URLs for conflicting URL error message
|
||||
([#14380](https://github.com/astral-sh/uv/pull/14380))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ignore invalid build backend settings when not building
|
||||
([#14372](https://github.com/astral-sh/uv/pull/14372))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix equals-star and tilde-equals with `python_version` and `python_full_version`
|
||||
([#14271](https://github.com/astral-sh/uv/pull/14271))
|
||||
- Include the canonical path in the interpreter query cache key
|
||||
([#14331](https://github.com/astral-sh/uv/pull/14331))
|
||||
- Only drop build directories on program exit ([#14304](https://github.com/astral-sh/uv/pull/14304))
|
||||
- Error instead of panic on conflict between global and subcommand flags
|
||||
([#14368](https://github.com/astral-sh/uv/pull/14368))
|
||||
- Consistently normalize trailing slashes on URLs with no path segments
|
||||
([#14349](https://github.com/astral-sh/uv/pull/14349))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions for publishing to JFrog's Artifactory
|
||||
([#14253](https://github.com/astral-sh/uv/pull/14253))
|
||||
- Edits to the build backend documentation ([#14376](https://github.com/astral-sh/uv/pull/14376))
|
||||
|
||||
## 0.7.19
|
||||
|
||||
The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and
|
||||
considered ready for production use.
|
||||
|
||||
The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with
|
||||
the goal of requiring zero configuration for most users, but provides flexible configuration to
|
||||
accommodate most Python project structures. It integrates tightly with uv, to improve messaging and
|
||||
user experience. It validates project metadata and structures, preventing common mistakes. And,
|
||||
finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with
|
||||
other build backends.
|
||||
|
||||
To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section
|
||||
in your `pyproject.toml`:
|
||||
|
||||
```toml
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will
|
||||
remain compatible with all standards-compliant build backends.
|
||||
|
||||
### Python
|
||||
|
||||
- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Ignore Python patch version for `--universal` pip compile
|
||||
([#14405](https://github.com/astral-sh/uv/pull/14405))
|
||||
- Update the tilde version specifier warning to include more context
|
||||
([#14335](https://github.com/astral-sh/uv/pull/14335))
|
||||
- Clarify behavior and hint on tool install when no executables are available
|
||||
([#14423](https://github.com/astral-sh/uv/pull/14423))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make project and interpreter lock acquisition non-fatal
|
||||
([#14404](https://github.com/astral-sh/uv/pull/14404))
|
||||
- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects
|
||||
([#14403](https://github.com/astral-sh/uv/pull/14403))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a migration guide from pip to uv projects
|
||||
([#12382](https://github.com/astral-sh/uv/pull/12382))
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b4
|
||||
- Add zstd support to Python 3.14 on Unix (it already was available on Windows)
|
||||
- Add PyPy 7.3.20 (for Python 3.11.13)
|
||||
|
||||
See the [PyPy](https://pypy.org/posts/2025/07/pypy-v7320-release.html) and
|
||||
[`python-build-standalone`](https://github.com/astral-sh/python-build-standalone/releases/tag/20250708)
|
||||
release notes for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--workspace` flag to `uv add` ([#14496](https://github.com/astral-sh/uv/pull/14496))
|
||||
- Add auto-detection for Intel GPUs ([#14386](https://github.com/astral-sh/uv/pull/14386))
|
||||
- Drop trailing arguments when writing shebangs
|
||||
([#14519](https://github.com/astral-sh/uv/pull/14519))
|
||||
- Add debug message when skipping Python downloads
|
||||
([#14509](https://github.com/astral-sh/uv/pull/14509))
|
||||
- Add support for declaring multiple modules in namespace packages
|
||||
([#14460](https://github.com/astral-sh/uv/pull/14460))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert normalization of trailing slashes on index URLs
|
||||
([#14511](https://github.com/astral-sh/uv/pull/14511))
|
||||
- Fix forced resolution with all extras in `uv version`
|
||||
([#14434](https://github.com/astral-sh/uv/pull/14434))
|
||||
- Fix handling of pre-releases in preferences ([#14498](https://github.com/astral-sh/uv/pull/14498))
|
||||
- Remove transparent variants in `uv-extract` to enable retries
|
||||
([#14450](https://github.com/astral-sh/uv/pull/14450))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Add method to get packages involved in a `NoSolutionError`
|
||||
([#14457](https://github.com/astral-sh/uv/pull/14457))
|
||||
- Make `ErrorTree` for `NoSolutionError` public
|
||||
([#14444](https://github.com/astral-sh/uv/pull/14444))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Finish incomplete sentence in pip migration guide
|
||||
([#14432](https://github.com/astral-sh/uv/pull/14432))
|
||||
- Remove `cache-dependency-glob` examples for `setup-uv`
|
||||
([#14493](https://github.com/astral-sh/uv/pull/14493))
|
||||
- Remove `uv pip sync` suggestion with `pyproject.toml`
|
||||
([#14510](https://github.com/astral-sh/uv/pull/14510))
|
||||
- Update documentation for GitHub to use `setup-uv@v6`
|
||||
([#14490](https://github.com/astral-sh/uv/pull/14490))
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Python
|
||||
|
||||
- Restore the SQLite `fts4`, `fts5`, `rtree`, and `geopoly` extensions on macOS and Linux
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250712)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--python-platform` to `uv sync` ([#14320](https://github.com/astral-sh/uv/pull/14320))
|
||||
- Support pre-releases in `uv version --bump` ([#13578](https://github.com/astral-sh/uv/pull/13578))
|
||||
- Add `-w` shorthand for `--with` ([#14530](https://github.com/astral-sh/uv/pull/14530))
|
||||
- Add an exception handler on Windows to display information on crash
|
||||
([#14582](https://github.com/astral-sh/uv/pull/14582))
|
||||
- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522))
|
||||
- Add `UV_HTTP_RETRIES` to customize retry counts
|
||||
([#14544](https://github.com/astral-sh/uv/pull/14544))
|
||||
- Follow leaf symlinks matched by globs in `cache-key`
|
||||
([#13438](https://github.com/astral-sh/uv/pull/13438))
|
||||
- Support parent path components (`..`) in globs in `cache-key`
|
||||
([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
- Improve `cache-key` performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `uv sync --output-format json` ([#13689](https://github.com/astral-sh/uv/pull/13689))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python`
|
||||
([#14606](https://github.com/astral-sh/uv/pull/14606))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document how to nest dependency groups with `include-group`
|
||||
([#14539](https://github.com/astral-sh/uv/pull/14539))
|
||||
- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554))
|
||||
- Update CONTRIBUTING.md with instructions to format Markdown files via Docker
|
||||
([#14246](https://github.com/astral-sh/uv/pull/14246))
|
||||
- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533))
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Python
|
||||
|
||||
- Upgrade GraalPy to 24.2.2
|
||||
|
||||
See the [GraalPy release notes](https://github.com/oracle/graalpython/releases/tag/graal-24.2.2) for
|
||||
more details.
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_COMPILE_BYTECODE_TIMEOUT` environment variable
|
||||
([#14369](https://github.com/astral-sh/uv/pull/14369))
|
||||
- Allow users to override index `cache-control` headers
|
||||
([#14620](https://github.com/astral-sh/uv/pull/14620))
|
||||
- Add `UV_LIBC` to override libc selection in multi-libc environment
|
||||
([#14646](https://github.com/astral-sh/uv/pull/14646))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `--all-arches` when paired with `--only-downloads`
|
||||
([#14629](https://github.com/astral-sh/uv/pull/14629))
|
||||
- Skip Windows Python interpreters that return a broken MSIX package code
|
||||
([#14636](https://github.com/astral-sh/uv/pull/14636))
|
||||
- Warn on invalid `uv.toml` when provided via direct path
|
||||
([#14653](https://github.com/astral-sh/uv/pull/14653))
|
||||
- Improve async signal safety in Windows exception handler
|
||||
([#14619](https://github.com/astral-sh/uv/pull/14619))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Mention the `revision` in the lockfile versioning doc
|
||||
([#14634](https://github.com/astral-sh/uv/pull/14634))
|
||||
- Move "Conflicting dependencies" to the "Resolution" page
|
||||
([#14633](https://github.com/astral-sh/uv/pull/14633))
|
||||
- Rename "Dependency specifiers" section to exclude PEP 508 reference
|
||||
([#14631](https://github.com/astral-sh/uv/pull/14631))
|
||||
- Suggest `uv cache clean` prior to `--reinstall`
|
||||
([#14659](https://github.com/astral-sh/uv/pull/14659))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Make preview Python registration on Windows non-fatal
|
||||
([#14614](https://github.com/astral-sh/uv/pull/14614))
|
||||
- Update preview installation of Python executables to be non-fatal
|
||||
([#14612](https://github.com/astral-sh/uv/pull/14612))
|
||||
- Add `uv python update-shell` ([#14627](https://github.com/astral-sh/uv/pull/14627))
|
||||
|
|
@ -6,6 +6,8 @@ doc-valid-idents = [
|
|||
"GraalPy",
|
||||
"ReFS",
|
||||
"PyTorch",
|
||||
"ROCm",
|
||||
"XPU",
|
||||
".." # Include the defaults
|
||||
]
|
||||
|
||||
|
|
@ -35,7 +37,7 @@ disallowed-methods = [
|
|||
"std::fs::soft_link",
|
||||
"std::fs::symlink_metadata",
|
||||
"std::fs::write",
|
||||
"std::os::unix::fs::symlink",
|
||||
"std::os::windows::fs::symlink_dir",
|
||||
"std::os::windows::fs::symlink_file",
|
||||
{ path = "std::os::unix::fs::symlink", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_dir", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_file", allow-invalid = true },
|
||||
]
|
||||
|
|
|
|||
|
|
@ -33,8 +33,8 @@ tracing = { workspace = true }
|
|||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.40.0" }
|
||||
insta = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||
test-log = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ impl Indexes {
|
|||
Self(FxHashSet::default())
|
||||
}
|
||||
|
||||
/// Create a new [`AuthIndexUrls`] from an iterator of [`AuthIndexUrl`]s.
|
||||
/// Create a new [`Indexes`] instance from an iterator of [`Index`]s.
|
||||
pub fn from_indexes(urls: impl IntoIterator<Item = Index>) -> Self {
|
||||
let mut index_urls = Self::new();
|
||||
for url in urls {
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ mod credentials;
|
|||
mod index;
|
||||
mod keyring;
|
||||
mod middleware;
|
||||
mod providers;
|
||||
mod realm;
|
||||
|
||||
// TODO(zanieb): Consider passing a cache explicitly throughout
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use reqwest::{Request, Response};
|
|||
use reqwest_middleware::{Error, Middleware, Next};
|
||||
use tracing::{debug, trace, warn};
|
||||
|
||||
use crate::providers::HuggingFaceProvider;
|
||||
use crate::{
|
||||
CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||
cache::FetchUrl,
|
||||
|
|
@ -457,9 +458,8 @@ impl AuthMiddleware {
|
|||
Some(credentials)
|
||||
};
|
||||
|
||||
return self
|
||||
.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await;
|
||||
self.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Fetch credentials for a URL.
|
||||
|
|
@ -503,6 +503,13 @@ impl AuthMiddleware {
|
|||
return credentials;
|
||||
}
|
||||
|
||||
// Support for known providers, like Hugging Face.
|
||||
if let Some(credentials) = HuggingFaceProvider::credentials_for(url).map(Arc::new) {
|
||||
debug!("Found Hugging Face credentials for {url}");
|
||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||
return Some(credentials);
|
||||
}
|
||||
|
||||
// Netrc support based on: <https://github.com/gribouille/netrc>.
|
||||
let credentials = if let Some(credentials) = self.netrc.get().and_then(|netrc| {
|
||||
debug!("Checking netrc for credentials for {url}");
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
use std::sync::LazyLock;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::Credentials;
|
||||
use crate::realm::{Realm, RealmRef};
|
||||
|
||||
/// The [`Realm`] for the Hugging Face platform.
|
||||
static HUGGING_FACE_REALM: LazyLock<Realm> = LazyLock::new(|| {
|
||||
let url = Url::parse("https://huggingface.co").expect("Failed to parse Hugging Face URL");
|
||||
Realm::from(&url)
|
||||
});
|
||||
|
||||
/// The authentication token for the Hugging Face platform, if set.
|
||||
static HUGGING_FACE_TOKEN: LazyLock<Option<Vec<u8>>> = LazyLock::new(|| {
|
||||
// Extract the Hugging Face token from the environment variable, if it exists.
|
||||
let hf_token = std::env::var(EnvVars::HF_TOKEN)
|
||||
.ok()
|
||||
.map(String::into_bytes)
|
||||
.filter(|token| !token.is_empty())?;
|
||||
|
||||
if std::env::var_os(EnvVars::UV_NO_HF_TOKEN).is_some() {
|
||||
debug!("Ignoring Hugging Face token from environment due to `UV_NO_HF_TOKEN`");
|
||||
return None;
|
||||
}
|
||||
|
||||
debug!("Found Hugging Face token in environment");
|
||||
Some(hf_token)
|
||||
});
|
||||
|
||||
/// A provider for authentication credentials for the Hugging Face platform.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct HuggingFaceProvider;
|
||||
|
||||
impl HuggingFaceProvider {
|
||||
/// Returns the credentials for the Hugging Face platform, if available.
|
||||
pub(crate) fn credentials_for(url: &Url) -> Option<Credentials> {
|
||||
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
||||
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
||||
return Some(Credentials::Bearer {
|
||||
token: token.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
use std::hash::{Hash, Hasher};
|
||||
use std::{fmt::Display, fmt::Formatter};
|
||||
|
||||
use url::Url;
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ use uv_small_str::SmallString;
|
|||
// The port is only allowed to differ if it matches the "default port" for the scheme.
|
||||
// However, `url` (and therefore `reqwest`) sets the `port` to `None` if it matches the default port
|
||||
// so we do not need any special handling here.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct Realm {
|
||||
scheme: SmallString,
|
||||
host: Option<SmallString>,
|
||||
|
|
@ -59,6 +59,76 @@ impl Display for Realm {
|
|||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Realm {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
RealmRef::from(self) == RealmRef::from(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Realm {}
|
||||
|
||||
impl Hash for Realm {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
RealmRef::from(self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub(crate) struct RealmRef<'a> {
|
||||
scheme: &'a str,
|
||||
host: Option<&'a str>,
|
||||
port: Option<u16>,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Url> for RealmRef<'a> {
|
||||
fn from(url: &'a Url) -> Self {
|
||||
Self {
|
||||
scheme: url.scheme(),
|
||||
host: url.host_str(),
|
||||
port: url.port(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for RealmRef<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.scheme == other.scheme && self.host == other.host && self.port == other.port
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RealmRef<'_> {}
|
||||
|
||||
impl Hash for RealmRef<'_> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.scheme.hash(state);
|
||||
self.host.hash(state);
|
||||
self.port.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<RealmRef<'a>> for Realm {
|
||||
fn eq(&self, rhs: &RealmRef<'a>) -> bool {
|
||||
RealmRef::from(self) == *rhs
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Realm> for RealmRef<'_> {
|
||||
fn eq(&self, rhs: &Realm) -> bool {
|
||||
*self == RealmRef::from(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Realm> for RealmRef<'a> {
|
||||
fn from(realm: &'a Realm) -> Self {
|
||||
Self {
|
||||
scheme: &realm.scheme,
|
||||
host: realm.host.as_deref(),
|
||||
port: realm.port,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::{ParseError, Url};
|
||||
|
|
|
|||
|
|
@ -18,11 +18,6 @@ workspace = true
|
|||
doctest = false
|
||||
bench = false
|
||||
|
||||
[[bench]]
|
||||
name = "distribution-filename"
|
||||
path = "benches/distribution_filename.rs"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "uv"
|
||||
path = "benches/uv.rs"
|
||||
|
|
@ -34,7 +29,6 @@ uv-client = { workspace = true }
|
|||
uv-configuration = { workspace = true }
|
||||
uv-dispatch = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-filename = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
uv-extract = { workspace = true, optional = true }
|
||||
uv-install-wheel = { workspace = true }
|
||||
|
|
@ -48,8 +42,10 @@ uv-types = { workspace = true }
|
|||
uv-workspace = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
codspeed-criterion-compat = { version = "2.7.2", default-features = false, optional = true }
|
||||
criterion = { version = "0.6.0", default-features = false, features = ["async_tokio"] }
|
||||
codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true }
|
||||
criterion = { version = "0.6.0", default-features = false, features = [
|
||||
"async_tokio",
|
||||
] }
|
||||
jiff = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
|
|
|
|||
|
|
@ -1,168 +0,0 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use uv_bench::criterion::{
|
||||
BenchmarkId, Criterion, Throughput, criterion_group, criterion_main, measurement::WallTime,
|
||||
};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag, Tags};
|
||||
|
||||
/// A set of platform tags extracted from burntsushi's Archlinux workstation.
|
||||
/// We could just re-create these via `Tags::from_env`, but those might differ
|
||||
/// depending on the platform. This way, we always use the same data. It also
|
||||
/// lets us assert tag compatibility regardless of where the benchmarks run.
|
||||
const PLATFORM_TAGS: &[(&str, &str, &str)] = include!("../inputs/platform_tags.rs");
|
||||
|
||||
/// A set of wheel names used in the benchmarks below. We pick short and long
|
||||
/// names, as well as compatible and not-compatibles (with `PLATFORM_TAGS`)
|
||||
/// names.
|
||||
///
|
||||
/// The tuple is (name, filename, compatible) where `name` is a descriptive
|
||||
/// name for humans used in the benchmark definition. And `filename` is the
|
||||
/// actual wheel filename we want to benchmark operation on. And `compatible`
|
||||
/// indicates whether the tags in the wheel filename are expected to be
|
||||
/// compatible with the tags in `PLATFORM_TAGS`.
|
||||
const WHEEL_NAMES: &[(&str, &str, bool)] = &[
|
||||
// This tests a case with a very short name that *is* compatible with
|
||||
// PLATFORM_TAGS. It only uses one tag for each component (one Python
|
||||
// version, one ABI and one platform).
|
||||
(
|
||||
"flyte-short-compatible",
|
||||
"ipython-2.1.0-py3-none-any.whl",
|
||||
true,
|
||||
),
|
||||
// This tests a case with a long name that is *not* compatible. That
|
||||
// is, all platform tags need to be checked against the tags in the
|
||||
// wheel filename. This is essentially the worst possible practical
|
||||
// case.
|
||||
(
|
||||
"flyte-long-incompatible",
|
||||
"protobuf-3.5.2.post1-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl",
|
||||
false,
|
||||
),
|
||||
// This tests a case with a long name that *is* compatible. We
|
||||
// expect this to be (on average) quicker because the compatibility
|
||||
// check stops as soon as a positive match is found. (Where as the
|
||||
// incompatible case needs to check all tags.)
|
||||
(
|
||||
"flyte-long-compatible",
|
||||
"coverage-6.6.0b1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
true,
|
||||
),
|
||||
];
|
||||
|
||||
/// A list of names that are candidates for wheel filenames but will ultimately
|
||||
/// fail to parse.
|
||||
const INVALID_WHEEL_NAMES: &[(&str, &str)] = &[
|
||||
("flyte-short-extension", "mock-5.1.0.tar.gz"),
|
||||
(
|
||||
"flyte-long-extension",
|
||||
"Pillow-5.4.0.dev0-py3.7-macosx-10.13-x86_64.egg",
|
||||
),
|
||||
];
|
||||
|
||||
/// Benchmarks the construction of platform tags.
|
||||
///
|
||||
/// This only happens ~once per program startup. Originally, construction was
|
||||
/// trivial. But to speed up `WheelFilename::is_compatible`, we added some
|
||||
/// extra processing. We thus expect construction to become slower, but we
|
||||
/// write a benchmark to ensure it is still "reasonable."
|
||||
fn benchmark_build_platform_tags(c: &mut Criterion<WallTime>) {
|
||||
let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS
|
||||
.iter()
|
||||
.map(|&(py, abi, plat)| {
|
||||
(
|
||||
LanguageTag::from_str(py).unwrap(),
|
||||
AbiTag::from_str(abi).unwrap(),
|
||||
PlatformTag::from_str(plat).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut group = c.benchmark_group("build_platform_tags");
|
||||
group.bench_function(BenchmarkId::from_parameter("burntsushi-archlinux"), |b| {
|
||||
b.iter(|| std::hint::black_box(Tags::new(tags.clone())));
|
||||
});
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks `WheelFilename::from_str`. This has been observed to take some
|
||||
/// non-trivial time in profiling (although, at time of writing, not as much
|
||||
/// as tag compatibility). In the process of optimizing tag compatibility,
|
||||
/// we tweaked wheel filename parsing. This benchmark was therefore added to
|
||||
/// ensure we didn't regress here.
|
||||
fn benchmark_wheelname_parsing(c: &mut Criterion<WallTime>) {
|
||||
let mut group = c.benchmark_group("wheelname_parsing");
|
||||
for (name, filename, _) in WHEEL_NAMES.iter().copied() {
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
filename
|
||||
.parse::<WheelFilename>()
|
||||
.expect("valid wheel filename");
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks `WheelFilename::from_str` when it fails. This routine is called
|
||||
/// on every filename in a package's metadata. A non-trivial portion of which
|
||||
/// are not wheel filenames. Ensuring that the error path is fast is thus
|
||||
/// probably a good idea.
|
||||
fn benchmark_wheelname_parsing_failure(c: &mut Criterion<WallTime>) {
|
||||
let mut group = c.benchmark_group("wheelname_parsing_failure");
|
||||
for (name, filename) in INVALID_WHEEL_NAMES.iter().copied() {
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
filename
|
||||
.parse::<WheelFilename>()
|
||||
.expect_err("invalid wheel filename");
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks the `WheelFilename::is_compatible` routine. This was revealed
|
||||
/// to be the #1 bottleneck in the resolver. The main issue was that the
|
||||
/// set of platform tags (generated once) is quite large, and the original
|
||||
/// implementation did an exhaustive search over each of them for each tag in
|
||||
/// the wheel filename.
|
||||
fn benchmark_wheelname_tag_compatibility(c: &mut Criterion<WallTime>) {
|
||||
let tags: Vec<(LanguageTag, AbiTag, PlatformTag)> = PLATFORM_TAGS
|
||||
.iter()
|
||||
.map(|&(py, abi, plat)| {
|
||||
(
|
||||
LanguageTag::from_str(py).unwrap(),
|
||||
AbiTag::from_str(abi).unwrap(),
|
||||
PlatformTag::from_str(plat).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let tags = Tags::new(tags);
|
||||
|
||||
let mut group = c.benchmark_group("wheelname_tag_compatibility");
|
||||
for (name, filename, expected) in WHEEL_NAMES.iter().copied() {
|
||||
let wheelname: WheelFilename = filename.parse().expect("valid wheel filename");
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
assert_eq!(expected, wheelname.is_compatible(&tags));
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
uv_distribution_filename,
|
||||
benchmark_build_platform_tags,
|
||||
benchmark_wheelname_parsing,
|
||||
benchmark_wheelname_parsing_failure,
|
||||
benchmark_wheelname_tag_compatibility,
|
||||
);
|
||||
criterion_main!(uv_distribution_filename);
|
||||
|
|
@ -86,8 +86,8 @@ mod resolver {
|
|||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_configuration::{
|
||||
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, PreviewMode,
|
||||
SourceStrategy,
|
||||
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy,
|
||||
PackageConfigSettings, Preview, SourceStrategy,
|
||||
};
|
||||
use uv_dispatch::{BuildDispatch, SharedState};
|
||||
use uv_distribution::DistributionDatabase;
|
||||
|
|
@ -144,6 +144,7 @@ mod resolver {
|
|||
let build_options = BuildOptions::default();
|
||||
let concurrency = Concurrency::default();
|
||||
let config_settings = ConfigSettings::default();
|
||||
let config_settings_package = PackageConfigSettings::default();
|
||||
let exclude_newer = Some(
|
||||
jiff::civil::date(2024, 9, 1)
|
||||
.to_zoned(jiff::tz::TimeZone::UTC)
|
||||
|
|
@ -184,6 +185,7 @@ mod resolver {
|
|||
state,
|
||||
IndexStrategy::default(),
|
||||
&config_settings,
|
||||
&config_settings_package,
|
||||
build_isolation,
|
||||
LinkMode::default(),
|
||||
&build_options,
|
||||
|
|
@ -192,7 +194,7 @@ mod resolver {
|
|||
sources,
|
||||
workspace_cache,
|
||||
concurrency,
|
||||
PreviewMode::Enabled,
|
||||
Preview::default(),
|
||||
);
|
||||
|
||||
let markers = if universal {
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ flate2 = { workspace = true, default-features = false }
|
|||
fs-err = { workspace = true }
|
||||
globset = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
|
|
@ -55,5 +56,6 @@ schemars = ["dep:schemars", "uv-pypi-types/schemars"]
|
|||
|
||||
[dev-dependencies]
|
||||
indoc = { workspace = true }
|
||||
insta = { version = "1.40.0", features = ["filters"] }
|
||||
insta = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -9,12 +9,12 @@ pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
|||
pub use source_dist::{build_source_dist, list_source_dist};
|
||||
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
||||
|
||||
use std::fs::FileType;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use thiserror::Error;
|
||||
use tracing::debug;
|
||||
use walkdir::DirEntry;
|
||||
|
||||
use uv_fs::Simplified;
|
||||
use uv_globfilter::PortableGlobError;
|
||||
|
|
@ -22,6 +22,7 @@ use uv_normalize::PackageName;
|
|||
use uv_pypi_types::{Identifier, IdentifierParseError};
|
||||
|
||||
use crate::metadata::ValidationError;
|
||||
use crate::settings::ModuleName;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
|
|
@ -54,8 +55,6 @@ pub enum Error {
|
|||
#[source]
|
||||
err: walkdir::Error,
|
||||
},
|
||||
#[error("Unsupported file type {:?}: `{}`", _1, _0.user_display())]
|
||||
UnsupportedFileType(PathBuf, FileType),
|
||||
#[error("Failed to write wheel zip archive")]
|
||||
Zip(#[from] zip::result::ZipError),
|
||||
#[error("Failed to write RECORD file")]
|
||||
|
|
@ -86,6 +85,16 @@ trait DirectoryWriter {
|
|||
/// Files added through the method are considered generated when listing included files.
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error>;
|
||||
|
||||
/// Add the file or directory to the path.
|
||||
fn write_dir_entry(&mut self, entry: &DirEntry, target_path: &str) -> Result<(), Error> {
|
||||
if entry.file_type().is_dir() {
|
||||
self.write_directory(target_path)?;
|
||||
} else {
|
||||
self.write_file(target_path, entry.path())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Add a local file.
|
||||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error>;
|
||||
|
||||
|
|
@ -176,7 +185,7 @@ fn check_metadata_directory(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the source root and the module path with the `__init__.py[i]` below to it while
|
||||
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
||||
/// checking the project layout and names.
|
||||
///
|
||||
/// Some target platforms have case-sensitive filesystems, while others have case-insensitive
|
||||
|
|
@ -190,13 +199,15 @@ fn check_metadata_directory(
|
|||
/// dist-info-normalization, the rules are lowercasing, replacing `.` with `_` and
|
||||
/// replace `-` with `_`. Since `.` and `-` are not allowed in identifiers, we can use a string
|
||||
/// comparison with the module name.
|
||||
///
|
||||
/// While we recommend one module per package, it is possible to declare a list of modules.
|
||||
fn find_roots(
|
||||
source_tree: &Path,
|
||||
pyproject_toml: &PyProjectToml,
|
||||
relative_module_root: &Path,
|
||||
module_name: Option<&str>,
|
||||
module_name: Option<&ModuleName>,
|
||||
namespace: bool,
|
||||
) -> Result<(PathBuf, PathBuf), Error> {
|
||||
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
||||
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
||||
let src_root = source_tree.join(&relative_module_root);
|
||||
if !src_root.starts_with(source_tree) {
|
||||
|
|
@ -207,22 +218,45 @@ fn find_roots(
|
|||
|
||||
if namespace {
|
||||
// `namespace = true` disables module structure checks.
|
||||
let module_relative = if let Some(module_name) = module_name {
|
||||
module_name.split('.').collect::<PathBuf>()
|
||||
let modules_relative = if let Some(module_name) = module_name {
|
||||
match module_name {
|
||||
ModuleName::Name(name) => {
|
||||
vec![name.split('.').collect::<PathBuf>()]
|
||||
}
|
||||
ModuleName::Names(names) => names
|
||||
.iter()
|
||||
.map(|name| name.split('.').collect::<PathBuf>())
|
||||
.collect(),
|
||||
}
|
||||
} else {
|
||||
PathBuf::from(pyproject_toml.name().as_dist_info_name().to_string())
|
||||
vec![PathBuf::from(
|
||||
pyproject_toml.name().as_dist_info_name().to_string(),
|
||||
)]
|
||||
};
|
||||
for module_relative in &modules_relative {
|
||||
debug!("Namespace module path: {}", module_relative.user_display());
|
||||
return Ok((src_root, module_relative));
|
||||
}
|
||||
return Ok((src_root, modules_relative));
|
||||
}
|
||||
|
||||
let module_relative = if let Some(module_name) = module_name {
|
||||
module_path_from_module_name(&src_root, module_name)?
|
||||
let modules_relative = if let Some(module_name) = module_name {
|
||||
match module_name {
|
||||
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
||||
ModuleName::Names(names) => names
|
||||
.iter()
|
||||
.map(|name| module_path_from_module_name(&src_root, name))
|
||||
.collect::<Result<_, _>>()?,
|
||||
}
|
||||
} else {
|
||||
find_module_path_from_package_name(&src_root, pyproject_toml.name())?
|
||||
vec![find_module_path_from_package_name(
|
||||
&src_root,
|
||||
pyproject_toml.name(),
|
||||
)?]
|
||||
};
|
||||
for module_relative in &modules_relative {
|
||||
debug!("Module path: {}", module_relative.user_display());
|
||||
Ok((src_root, module_relative))
|
||||
}
|
||||
Ok((src_root, modules_relative))
|
||||
}
|
||||
|
||||
/// Infer stubs packages from package name alone.
|
||||
|
|
@ -321,6 +355,7 @@ mod tests {
|
|||
use indoc::indoc;
|
||||
use insta::assert_snapshot;
|
||||
use itertools::Itertools;
|
||||
use regex::Regex;
|
||||
use sha2::Digest;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::iter;
|
||||
|
|
@ -328,6 +363,8 @@ mod tests {
|
|||
use uv_distribution_filename::{SourceDistFilename, WheelFilename};
|
||||
use uv_fs::{copy_dir_all, relative_to};
|
||||
|
||||
const MOCK_UV_VERSION: &str = "1.0.0+test";
|
||||
|
||||
fn format_err(err: &Error) -> String {
|
||||
let context = iter::successors(std::error::Error::source(&err), |&err| err.source())
|
||||
.map(|err| format!(" Caused by: {err}"))
|
||||
|
|
@ -354,19 +391,19 @@ mod tests {
|
|||
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
||||
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
||||
// latest and remove it since it has the same filename as the indirect wheel.
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, "1.0.0+test")?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, "1.0.0+test")?;
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION)?;
|
||||
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
||||
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
||||
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
||||
fs_err::remove_file(&direct_wheel_path)?;
|
||||
|
||||
// Build a source distribution.
|
||||
let (_name, source_dist_list_files) = list_source_dist(source_root, "1.0.0+test")?;
|
||||
let (_name, source_dist_list_files) = list_source_dist(source_root, MOCK_UV_VERSION)?;
|
||||
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
||||
// normalize the path.
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, "1.0.0+test")?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, "1.0.0+test")?;
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION)?;
|
||||
let source_dist_path = dist.join(source_dist_filename.to_string());
|
||||
let source_dist_contents = sdist_contents(&source_dist_path);
|
||||
|
||||
|
|
@ -380,7 +417,7 @@ mod tests {
|
|||
source_dist_filename.name.as_dist_info_name(),
|
||||
source_dist_filename.version
|
||||
));
|
||||
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, "1.0.0+test")?;
|
||||
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, MOCK_UV_VERSION)?;
|
||||
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
||||
|
||||
// Check that direct and indirect wheels are identical.
|
||||
|
|
@ -402,6 +439,15 @@ mod tests {
|
|||
})
|
||||
}
|
||||
|
||||
fn build_err(source_root: &Path) -> String {
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build_err = build(source_root, dist.path()).unwrap_err();
|
||||
let err_message: String = format_err(&build_err)
|
||||
.replace(&source_root.user_display().to_string(), "[TEMP_PATH]")
|
||||
.replace('\\', "/");
|
||||
err_message
|
||||
}
|
||||
|
||||
fn sdist_contents(source_dist_path: &Path) -> Vec<String> {
|
||||
let sdist_reader = BufReader::new(File::open(source_dist_path).unwrap());
|
||||
let mut source_dist = tar::Archive::new(GzDecoder::new(sdist_reader));
|
||||
|
|
@ -472,14 +518,14 @@ mod tests {
|
|||
] {
|
||||
copy_dir_all(built_by_uv.join(dir), src.path().join(dir)).unwrap();
|
||||
}
|
||||
for dir in [
|
||||
for filename in [
|
||||
"pyproject.toml",
|
||||
"README.md",
|
||||
"uv.lock",
|
||||
"LICENSE-APACHE",
|
||||
"LICENSE-MIT",
|
||||
] {
|
||||
fs_err::copy(built_by_uv.join(dir), src.path().join(dir)).unwrap();
|
||||
fs_err::copy(built_by_uv.join(filename), src.path().join(filename)).unwrap();
|
||||
}
|
||||
|
||||
// Clear executable bit on Unix to build the same archive between Unix and Windows.
|
||||
|
|
@ -496,6 +542,14 @@ mod tests {
|
|||
fs_err::set_permissions(&path, perms).unwrap();
|
||||
}
|
||||
|
||||
// Redact the uv_build version to keep the hash stable across releases
|
||||
let pyproject_toml = fs_err::read_to_string(src.path().join("pyproject.toml")).unwrap();
|
||||
let current_requires =
|
||||
Regex::new(r#"requires = \["uv_build>=[0-9.]+,<[0-9.]+"\]"#).unwrap();
|
||||
let mocked_requires = r#"requires = ["uv_build>=1,<2"]"#;
|
||||
let pyproject_toml = current_requires.replace(pyproject_toml.as_str(), mocked_requires);
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml.as_bytes()).unwrap();
|
||||
|
||||
// Add some files to be excluded
|
||||
let module_root = src.path().join("src").join("built_by_uv");
|
||||
fs_err::create_dir_all(module_root.join("__pycache__")).unwrap();
|
||||
|
|
@ -514,7 +568,7 @@ mod tests {
|
|||
// Check that the source dist is reproducible across platforms.
|
||||
assert_snapshot!(
|
||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
||||
@"dab46bcc4d66960a11cfdc19604512a8e1a3241a67536f7e962166760e9c575c"
|
||||
@"871d1f859140721b67cbeaca074e7a2740c88c38028d0509eba87d1285f1da9e"
|
||||
);
|
||||
// Check both the files we report and the actual files
|
||||
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
||||
|
|
@ -626,7 +680,7 @@ mod tests {
|
|||
license = { file = "license.txt" }
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -694,7 +748,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -758,7 +812,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -800,7 +854,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -825,7 +879,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -874,7 +928,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -905,7 +959,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -956,7 +1010,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -982,7 +1036,7 @@ mod tests {
|
|||
module-name = "simple_namespace.part"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -990,13 +1044,8 @@ mod tests {
|
|||
fs_err::create_dir_all(src.path().join("src").join("simple_namespace").join("part"))
|
||||
.unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build_err = build(src.path(), dist.path()).unwrap_err();
|
||||
let err_message = format_err(&build_err)
|
||||
.replace(&src.path().user_display().to_string(), "[TEMP_PATH]")
|
||||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
build_err(src.path()),
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part/__init__.py`"
|
||||
);
|
||||
|
||||
|
|
@ -1017,16 +1066,13 @@ mod tests {
|
|||
.join("simple_namespace")
|
||||
.join("__init__.py");
|
||||
File::create(&bogus_init_py).unwrap();
|
||||
let build_err = build(src.path(), dist.path()).unwrap_err();
|
||||
let err_message = format_err(&build_err)
|
||||
.replace(&src.path().user_display().to_string(), "[TEMP_PATH]")
|
||||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
build_err(src.path()),
|
||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||
);
|
||||
fs_err::remove_file(bogus_init_py).unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build1 = build(src.path(), dist.path()).unwrap();
|
||||
assert_snapshot!(build1.source_dist_contents.join("\n"), @r"
|
||||
simple_namespace_part-1.0.0/
|
||||
|
|
@ -1058,7 +1104,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1081,7 +1127,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1142,7 +1188,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1165,7 +1211,7 @@ mod tests {
|
|||
module-name = "cloud-stubs.db.schema"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1201,4 +1247,117 @@ mod tests {
|
|||
cloud_db_schema_stubs-1.0.0.dist-info/WHEEL
|
||||
");
|
||||
}
|
||||
|
||||
/// A package with multiple modules, one a regular module and two namespace modules.
|
||||
#[test]
|
||||
fn multiple_module_names() {
|
||||
let src = TempDir::new().unwrap();
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "simple-namespace-part"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = ["foo", "simple_namespace.part_a", "simple_namespace.part_b"]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
fs_err::create_dir_all(src.path().join("src").join("foo")).unwrap();
|
||||
fs_err::create_dir_all(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("simple_namespace")
|
||||
.join("part_a"),
|
||||
)
|
||||
.unwrap();
|
||||
fs_err::create_dir_all(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("simple_namespace")
|
||||
.join("part_b"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Most of these checks exist in other tests too, but we want to ensure that they apply
|
||||
// with multiple modules too.
|
||||
|
||||
// The first module is missing an `__init__.py`.
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/foo/__init__.py`"
|
||||
);
|
||||
|
||||
// Create the first correct `__init__.py` file
|
||||
File::create(src.path().join("src").join("foo").join("__init__.py")).unwrap();
|
||||
|
||||
// The second module, a namespace, is missing an `__init__.py`.
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part_a/__init__.py`"
|
||||
);
|
||||
|
||||
// Create the other two correct `__init__.py` files
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("simple_namespace")
|
||||
.join("part_a")
|
||||
.join("__init__.py"),
|
||||
)
|
||||
.unwrap();
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("simple_namespace")
|
||||
.join("part_b")
|
||||
.join("__init__.py"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// For the second module, a namespace, there must not be an `__init__.py` here.
|
||||
let bogus_init_py = src
|
||||
.path()
|
||||
.join("src")
|
||||
.join("simple_namespace")
|
||||
.join("__init__.py");
|
||||
File::create(&bogus_init_py).unwrap();
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||
);
|
||||
fs_err::remove_file(bogus_init_py).unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build = build(src.path(), dist.path()).unwrap();
|
||||
assert_snapshot!(build.source_dist_contents.join("\n"), @r"
|
||||
simple_namespace_part-1.0.0/
|
||||
simple_namespace_part-1.0.0/PKG-INFO
|
||||
simple_namespace_part-1.0.0/pyproject.toml
|
||||
simple_namespace_part-1.0.0/src
|
||||
simple_namespace_part-1.0.0/src/foo
|
||||
simple_namespace_part-1.0.0/src/foo/__init__.py
|
||||
simple_namespace_part-1.0.0/src/simple_namespace
|
||||
simple_namespace_part-1.0.0/src/simple_namespace/part_a
|
||||
simple_namespace_part-1.0.0/src/simple_namespace/part_a/__init__.py
|
||||
simple_namespace_part-1.0.0/src/simple_namespace/part_b
|
||||
simple_namespace_part-1.0.0/src/simple_namespace/part_b/__init__.py
|
||||
");
|
||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
||||
foo/
|
||||
foo/__init__.py
|
||||
simple_namespace/
|
||||
simple_namespace/part_a/
|
||||
simple_namespace/part_a/__init__.py
|
||||
simple_namespace/part_b/
|
||||
simple_namespace/part_b/__init__.py
|
||||
simple_namespace_part-1.0.0.dist-info/
|
||||
simple_namespace_part-1.0.0.dist-info/METADATA
|
||||
simple_namespace_part-1.0.0.dist-info/RECORD
|
||||
simple_namespace_part-1.0.0.dist-info/WHEEL
|
||||
");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use std::str::FromStr;
|
|||
|
||||
use itertools::Itertools;
|
||||
use serde::Deserialize;
|
||||
use tracing::{debug, trace};
|
||||
use tracing::{debug, trace, warn};
|
||||
use version_ranges::Ranges;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
|
|
@ -54,10 +54,6 @@ pub enum ValidationError {
|
|||
"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `{0}`"
|
||||
)]
|
||||
InvalidGroup(String),
|
||||
#[error(
|
||||
"Entrypoint names must consist of letters, numbers, dots, underscores and dashes; invalid name: `{0}`"
|
||||
)]
|
||||
InvalidName(String),
|
||||
#[error("Use `project.scripts` instead of `project.entry-points.console_scripts`")]
|
||||
ReservedScripts,
|
||||
#[error("Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`")]
|
||||
|
|
@ -171,7 +167,7 @@ impl PyProjectToml {
|
|||
///
|
||||
/// ```toml
|
||||
/// [build-system]
|
||||
/// requires = ["uv_build>=0.4.15,<5"]
|
||||
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
/// build-backend = "uv_build"
|
||||
/// ```
|
||||
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
||||
|
|
@ -620,12 +616,14 @@ impl PyProjectToml {
|
|||
|
||||
let _ = writeln!(writer, "[{group}]");
|
||||
for (name, object_reference) in entries {
|
||||
// More strict than the spec, we enforce the recommendation
|
||||
if !name
|
||||
.chars()
|
||||
.all(|c| c.is_alphanumeric() || c == '.' || c == '-' || c == '_')
|
||||
{
|
||||
return Err(ValidationError::InvalidName(name.to_string()));
|
||||
warn!(
|
||||
"Entrypoint names should consist of letters, numbers, dots, underscores and \
|
||||
dashes; non-compliant name: `{name}`"
|
||||
);
|
||||
}
|
||||
|
||||
// TODO(konsti): Validate that the object references are valid Python identifiers.
|
||||
|
|
@ -703,7 +701,7 @@ struct Project {
|
|||
/// The optional `project.readme` key in a pyproject.toml as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/#readme>.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged, rename_all = "kebab-case")]
|
||||
#[serde(untagged, rename_all_fields = "kebab-case")]
|
||||
pub(crate) enum Readme {
|
||||
/// Relative path to the README.
|
||||
String(PathBuf),
|
||||
|
|
@ -713,7 +711,7 @@ pub(crate) enum Readme {
|
|||
content_type: String,
|
||||
charset: Option<String>,
|
||||
},
|
||||
/// The full description of the project as inline value.
|
||||
/// The full description of the project as an inline value.
|
||||
Text {
|
||||
text: String,
|
||||
content_type: String,
|
||||
|
|
@ -826,7 +824,7 @@ mod tests {
|
|||
{payload}
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
}
|
||||
|
|
@ -909,7 +907,7 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -965,6 +963,65 @@ mod tests {
|
|||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn readme() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("Readme.md"),
|
||||
indoc! {r"
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("License.txt"),
|
||||
indoc! {r#"
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"#},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = indoc! {r#"
|
||||
# See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example
|
||||
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
description = "A Python package"
|
||||
readme = { file = "Readme.md", content-type = "text/markdown" }
|
||||
requires_python = ">=3.12"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
Summary: A Python package
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_extras() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
|
@ -1036,7 +1093,7 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1104,7 +1161,7 @@ mod tests {
|
|||
let contents = extend_project("");
|
||||
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("1.0.0+test").join("\n"),
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@""
|
||||
);
|
||||
}
|
||||
|
|
@ -1135,7 +1192,7 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5", "wheel"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
|
|
@ -1171,7 +1228,7 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
|
|
@ -1344,16 +1401,6 @@ mod tests {
|
|||
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_name() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.scripts]
|
||||
"a@b" = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint names must consist of letters, numbers, dots, underscores and dashes; invalid name: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_conflict_scripts() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
|
|
|
|||
|
|
@ -4,10 +4,6 @@ use uv_macros::OptionsMetadata;
|
|||
|
||||
/// Settings for the uv build backend (`uv_build`).
|
||||
///
|
||||
/// !!! note
|
||||
///
|
||||
/// The uv build backend is currently in preview and may change in any future release.
|
||||
///
|
||||
/// Note that those settings only apply when using the `uv_build` backend, other build backends
|
||||
/// (such as hatchling) have their own configuration.
|
||||
///
|
||||
|
|
@ -38,15 +34,19 @@ pub struct BuildBackendSettings {
|
|||
/// For namespace packages with a single module, the path can be dotted, e.g., `foo.bar` or
|
||||
/// `foo-stubs.bar`.
|
||||
///
|
||||
/// For namespace packages with multiple modules, the path can be a list, e.g.,
|
||||
/// `["foo", "bar"]`. We recommend using a single module per package, splitting multiple
|
||||
/// packages into a workspace.
|
||||
///
|
||||
/// Note that using this option runs the risk of creating two packages with different names but
|
||||
/// the same module names. Installing such packages together leads to unspecified behavior,
|
||||
/// often with corrupted files or directory trees.
|
||||
#[option(
|
||||
default = r#"None"#,
|
||||
value_type = "str",
|
||||
value_type = "str | list[str]",
|
||||
example = r#"module-name = "sklearn""#
|
||||
)]
|
||||
pub module_name: Option<String>,
|
||||
pub module_name: Option<ModuleName>,
|
||||
|
||||
/// Glob expressions which files and directories to additionally include in the source
|
||||
/// distribution.
|
||||
|
|
@ -155,7 +155,7 @@ pub struct BuildBackendSettings {
|
|||
/// with this package as build requirement use the include directory to find additional header
|
||||
/// files.
|
||||
/// - `purelib` and `platlib`: Installed to the `site-packages` directory. It is not recommended
|
||||
/// to uses these two options.
|
||||
/// to use these two options.
|
||||
// TODO(konsti): We should show a flat example instead.
|
||||
// ```toml
|
||||
// [tool.uv.build-backend.data]
|
||||
|
|
@ -165,7 +165,7 @@ pub struct BuildBackendSettings {
|
|||
#[option(
|
||||
default = r#"{}"#,
|
||||
value_type = "dict[str, str]",
|
||||
example = r#"data = { "headers": "include/headers", "scripts": "bin" }"#
|
||||
example = r#"data = { headers = "include/headers", scripts = "bin" }"#
|
||||
)]
|
||||
pub data: WheelDataIncludes,
|
||||
}
|
||||
|
|
@ -185,6 +185,17 @@ impl Default for BuildBackendSettings {
|
|||
}
|
||||
}
|
||||
|
||||
/// Whether to include a single module or multiple modules.
|
||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(untagged)]
|
||||
pub enum ModuleName {
|
||||
/// A single module name.
|
||||
Name(String),
|
||||
/// Multiple module names, which are all included.
|
||||
Names(Vec<String>),
|
||||
}
|
||||
|
||||
/// Data includes for wheels.
|
||||
///
|
||||
/// See `BuildBackendSettings::data`.
|
||||
|
|
|
|||
|
|
@ -68,13 +68,14 @@ fn source_dist_matcher(
|
|||
includes.push(globset::escape("pyproject.toml"));
|
||||
|
||||
// Check that the source tree contains a module.
|
||||
let (src_root, module_relative) = find_roots(
|
||||
let (src_root, modules_relative) = find_roots(
|
||||
source_tree,
|
||||
pyproject_toml,
|
||||
&settings.module_root,
|
||||
settings.module_name.as_deref(),
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
)?;
|
||||
for module_relative in modules_relative {
|
||||
// The wheel must not include any files included by the source distribution (at least until we
|
||||
// have files generated in the source dist -> wheel build step).
|
||||
let import_path = uv_fs::normalize_path(
|
||||
|
|
@ -84,6 +85,7 @@ fn source_dist_matcher(
|
|||
.portable_display()
|
||||
.to_string();
|
||||
includes.push(format!("{}/**", globset::escape(&import_path)));
|
||||
}
|
||||
for include in includes {
|
||||
let glob = PortableGlobParser::Uv
|
||||
.parse(&include)
|
||||
|
|
@ -250,32 +252,16 @@ fn write_source_dist(
|
|||
.expect("walkdir starts with root");
|
||||
|
||||
if !include_matcher.match_path(relative) || exclude_matcher.is_match(relative) {
|
||||
trace!("Excluding: `{}`", relative.user_display());
|
||||
trace!("Excluding from sdist: `{}`", relative.user_display());
|
||||
continue;
|
||||
}
|
||||
|
||||
debug!("Including {}", relative.user_display());
|
||||
if entry.file_type().is_dir() {
|
||||
writer.write_directory(
|
||||
&Path::new(&top_level)
|
||||
let entry_path = Path::new(&top_level)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string(),
|
||||
)?;
|
||||
} else if entry.file_type().is_file() {
|
||||
writer.write_file(
|
||||
&Path::new(&top_level)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string(),
|
||||
entry.path(),
|
||||
)?;
|
||||
} else {
|
||||
return Err(Error::UnsupportedFileType(
|
||||
relative.to_path_buf(),
|
||||
entry.file_type(),
|
||||
));
|
||||
}
|
||||
.to_string();
|
||||
debug!("Adding to sdist: {}", relative.user_display());
|
||||
writer.write_dir_entry(&entry, &entry_path)?;
|
||||
}
|
||||
debug!("Visited {files_visited} files for source dist build");
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use fs_err::File;
|
||||
use globset::{GlobSet, GlobSetBuilder};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::io::{BufReader, Read, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
|
@ -127,19 +128,24 @@ fn write_wheel(
|
|||
source_tree,
|
||||
pyproject_toml,
|
||||
&settings.module_root,
|
||||
settings.module_name.as_deref(),
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
)?;
|
||||
|
||||
let mut files_visited = 0;
|
||||
let mut prefix_directories = FxHashSet::default();
|
||||
for module_relative in module_relative {
|
||||
// For convenience, have directories for the whole tree in the wheel
|
||||
for ancestor in module_relative.ancestors().skip(1) {
|
||||
if ancestor == Path::new("") {
|
||||
continue;
|
||||
}
|
||||
// Avoid duplicate directories in the zip.
|
||||
if prefix_directories.insert(ancestor.to_path_buf()) {
|
||||
wheel_writer.write_directory(&ancestor.portable_display().to_string())?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut files_visited = 0;
|
||||
for entry in WalkDir::new(src_root.join(module_relative))
|
||||
.sort_by_file_name()
|
||||
.into_iter()
|
||||
|
|
@ -164,7 +170,7 @@ fn write_wheel(
|
|||
.path()
|
||||
.strip_prefix(source_tree)
|
||||
.expect("walkdir starts with root");
|
||||
let wheel_path = entry
|
||||
let entry_path = entry
|
||||
.path()
|
||||
.strip_prefix(&src_root)
|
||||
.expect("walkdir starts with root");
|
||||
|
|
@ -172,20 +178,10 @@ fn write_wheel(
|
|||
trace!("Excluding from module: `{}`", match_path.user_display());
|
||||
continue;
|
||||
}
|
||||
let wheel_path = wheel_path.portable_display().to_string();
|
||||
|
||||
debug!("Adding to wheel: `{wheel_path}`");
|
||||
|
||||
if entry.file_type().is_dir() {
|
||||
wheel_writer.write_directory(&wheel_path)?;
|
||||
} else if entry.file_type().is_file() {
|
||||
wheel_writer.write_file(&wheel_path, entry.path())?;
|
||||
} else {
|
||||
// TODO(konsti): We may want to support symlinks, there is support for installing them.
|
||||
return Err(Error::UnsupportedFileType(
|
||||
entry.path().to_path_buf(),
|
||||
entry.file_type(),
|
||||
));
|
||||
let entry_path = entry_path.portable_display().to_string();
|
||||
debug!("Adding to wheel: {entry_path}");
|
||||
wheel_writer.write_dir_entry(&entry, &entry_path)?;
|
||||
}
|
||||
}
|
||||
debug!("Visited {files_visited} files for wheel build");
|
||||
|
|
@ -280,7 +276,7 @@ pub fn build_editable(
|
|||
source_tree,
|
||||
&pyproject_toml,
|
||||
&settings.module_root,
|
||||
settings.module_name.as_deref(),
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
)?;
|
||||
|
||||
|
|
@ -519,23 +515,12 @@ fn wheel_subdir_from_globs(
|
|||
continue;
|
||||
}
|
||||
|
||||
let relative_licenses = Path::new(target)
|
||||
let license_path = Path::new(target)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string();
|
||||
|
||||
if entry.file_type().is_dir() {
|
||||
wheel_writer.write_directory(&relative_licenses)?;
|
||||
} else if entry.file_type().is_file() {
|
||||
debug!("Adding {} file: `{}`", globs_field, relative.user_display());
|
||||
wheel_writer.write_file(&relative_licenses, entry.path())?;
|
||||
} else {
|
||||
// TODO(konsti): We may want to support symlinks, there is support for installing them.
|
||||
return Err(Error::UnsupportedFileType(
|
||||
entry.path().to_path_buf(),
|
||||
entry.file_type(),
|
||||
));
|
||||
}
|
||||
debug!("Adding for {}: `{}`", globs_field, relative.user_display());
|
||||
wheel_writer.write_dir_entry(&entry, &license_path)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-cache-key = { workspace = true }
|
||||
uv-configuration = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
|
|
@ -47,4 +48,4 @@ tracing = { workspace = true }
|
|||
rustc-hash = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.40.0" }
|
||||
insta = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -19,17 +19,20 @@ use fs_err as fs;
|
|||
use indoc::formatdoc;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::de::{IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer, de};
|
||||
use serde::de::{self, IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{Instrument, debug, info_span, instrument};
|
||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_configuration::Preview;
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||
use uv_distribution::BuildRequires;
|
||||
use uv_distribution_types::{IndexLocations, Requirement, Resolution};
|
||||
use uv_fs::LockedFile;
|
||||
use uv_fs::{PythonExt, Simplified};
|
||||
use uv_pep440::Version;
|
||||
use uv_pep508::PackageName;
|
||||
|
|
@ -200,6 +203,11 @@ impl Pep517Backend {
|
|||
{import}
|
||||
"#, backend_path = backend_path_encoded}
|
||||
}
|
||||
|
||||
fn is_setuptools(&self) -> bool {
|
||||
// either `setuptools.build_meta` or `setuptools.build_meta:__legacy__`
|
||||
self.backend.split(':').next() == Some("setuptools.build_meta")
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses an [`Rc`] internally, clone freely.
|
||||
|
|
@ -278,6 +286,7 @@ impl SourceBuild {
|
|||
mut environment_variables: FxHashMap<OsString, OsString>,
|
||||
level: BuildOutput,
|
||||
concurrent_builds: usize,
|
||||
preview: Preview,
|
||||
) -> Result<Self, Error> {
|
||||
let temp_dir = build_context.cache().venv_dir()?;
|
||||
|
||||
|
|
@ -322,9 +331,11 @@ impl SourceBuild {
|
|||
interpreter.clone(),
|
||||
uv_virtualenv::Prompt::None,
|
||||
false,
|
||||
uv_virtualenv::OnExisting::Remove,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
preview,
|
||||
)?
|
||||
};
|
||||
|
||||
|
|
@ -430,6 +441,31 @@ impl SourceBuild {
|
|||
})
|
||||
}
|
||||
|
||||
/// Acquire a lock on the source tree, if necessary.
|
||||
async fn acquire_lock(&self) -> Result<Option<LockedFile>, Error> {
|
||||
// Depending on the command, setuptools puts `*.egg-info`, `build/`, and `dist/` in the
|
||||
// source tree, and concurrent invocations of setuptools using the same source dir can
|
||||
// stomp on each other. We need to lock something to fix that, but we don't want to dump a
|
||||
// `.lock` file into the source tree that the user will need to .gitignore. Take a global
|
||||
// proxy lock instead.
|
||||
let mut source_tree_lock = None;
|
||||
if self.pep517_backend.is_setuptools() {
|
||||
debug!("Locking the source tree for setuptools");
|
||||
let canonical_source_path = self.source_tree.canonicalize()?;
|
||||
let lock_path = env::temp_dir().join(format!(
|
||||
"uv-setuptools-{}.lock",
|
||||
cache_digest(&canonical_source_path)
|
||||
));
|
||||
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire build lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Ok(source_tree_lock)
|
||||
}
|
||||
|
||||
async fn get_resolved_requirements(
|
||||
build_context: &impl BuildContext,
|
||||
source_build_context: SourceBuildContext,
|
||||
|
|
@ -475,11 +511,9 @@ impl SourceBuild {
|
|||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||
Ok(toml) => {
|
||||
let pyproject_toml: toml_edit::ImDocument<_> =
|
||||
toml_edit::ImDocument::from_str(&toml)
|
||||
let pyproject_toml = toml_edit::Document::from_str(&toml)
|
||||
.map_err(Error::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml: PyProjectToml =
|
||||
PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(Error::InvalidPyprojectTomlSchema)?;
|
||||
|
||||
let backend = if let Some(build_system) = pyproject_toml.build_system {
|
||||
|
|
@ -600,6 +634,9 @@ impl SourceBuild {
|
|||
return Ok(Some(metadata_dir.clone()));
|
||||
}
|
||||
|
||||
// Lock the source tree, if necessary.
|
||||
let _lock = self.acquire_lock().await?;
|
||||
|
||||
// Hatch allows for highly dynamic customization of metadata via hooks. In such cases, Hatch
|
||||
// can't uphold the PEP 517 contract, in that the metadata Hatch would return by
|
||||
// `prepare_metadata_for_build_wheel` isn't guaranteed to match that of the built wheel.
|
||||
|
|
@ -712,16 +749,15 @@ impl SourceBuild {
|
|||
pub async fn build(&self, wheel_dir: &Path) -> Result<String, Error> {
|
||||
// The build scripts run with the extracted root as cwd, so they need the absolute path.
|
||||
let wheel_dir = std::path::absolute(wheel_dir)?;
|
||||
let filename = self.pep517_build(&wheel_dir, &self.pep517_backend).await?;
|
||||
let filename = self.pep517_build(&wheel_dir).await?;
|
||||
Ok(filename)
|
||||
}
|
||||
|
||||
/// Perform a PEP 517 build for a wheel or source distribution (sdist).
|
||||
async fn pep517_build(
|
||||
&self,
|
||||
output_dir: &Path,
|
||||
pep517_backend: &Pep517Backend,
|
||||
) -> Result<String, Error> {
|
||||
async fn pep517_build(&self, output_dir: &Path) -> Result<String, Error> {
|
||||
// Lock the source tree, if necessary.
|
||||
let _lock = self.acquire_lock().await?;
|
||||
|
||||
// Write the hook output to a file so that we can read it back reliably.
|
||||
let outfile = self
|
||||
.temp_dir
|
||||
|
|
@ -733,7 +769,7 @@ impl SourceBuild {
|
|||
BuildKind::Sdist => {
|
||||
debug!(
|
||||
r#"Calling `{}.build_{}("{}", {})`"#,
|
||||
pep517_backend.backend,
|
||||
self.pep517_backend.backend,
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
|
@ -746,7 +782,7 @@ impl SourceBuild {
|
|||
with open("{}", "w") as fp:
|
||||
fp.write(sdist_filename)
|
||||
"#,
|
||||
pep517_backend.backend_import(),
|
||||
self.pep517_backend.backend_import(),
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
|
@ -762,7 +798,7 @@ impl SourceBuild {
|
|||
});
|
||||
debug!(
|
||||
r#"Calling `{}.build_{}("{}", {}, {})`"#,
|
||||
pep517_backend.backend,
|
||||
self.pep517_backend.backend,
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
|
@ -776,7 +812,7 @@ impl SourceBuild {
|
|||
with open("{}", "w") as fp:
|
||||
fp.write(wheel_filename)
|
||||
"#,
|
||||
pep517_backend.backend_import(),
|
||||
self.pep517_backend.backend_import(),
|
||||
self.build_kind,
|
||||
output_dir.escape_for_python(),
|
||||
self.config_settings.escape_for_python(),
|
||||
|
|
@ -806,7 +842,7 @@ impl SourceBuild {
|
|||
return Err(Error::from_command_output(
|
||||
format!(
|
||||
"Call to `{}.build_{}` failed",
|
||||
pep517_backend.backend, self.build_kind
|
||||
self.pep517_backend.backend, self.build_kind
|
||||
),
|
||||
&output,
|
||||
self.level,
|
||||
|
|
@ -821,7 +857,7 @@ impl SourceBuild {
|
|||
return Err(Error::from_command_output(
|
||||
format!(
|
||||
"Call to `{}.build_{}` failed",
|
||||
pep517_backend.backend, self.build_kind
|
||||
self.pep517_backend.backend, self.build_kind
|
||||
),
|
||||
&output,
|
||||
self.level,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv-build"
|
||||
version = "0.7.13"
|
||||
version = "0.8.3"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "uv-build"
|
||||
version = "0.7.13"
|
||||
version = "0.8.3"
|
||||
description = "The uv build backend"
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
# It is important retain compatibility with old versions in the build backend
|
||||
target-version = "py37"
|
||||
|
|
@ -24,3 +24,7 @@ thiserror = { workspace = true }
|
|||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use serde::Deserialize;
|
|||
use tracing::{debug, warn};
|
||||
|
||||
use crate::git_info::{Commit, Tags};
|
||||
use crate::glob::cluster_globs;
|
||||
use crate::timestamp::Timestamp;
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
|
@ -212,9 +213,13 @@ impl CacheInfo {
|
|||
}
|
||||
}
|
||||
|
||||
// If we have any globs, process them in a single pass.
|
||||
// If we have any globs, first cluster them using LCP and then do a single pass on each group.
|
||||
if !globs.is_empty() {
|
||||
let walker = globwalk::GlobWalkerBuilder::from_patterns(directory, &globs)
|
||||
for (glob_base, glob_patterns) in cluster_globs(&globs) {
|
||||
let walker = globwalk::GlobWalkerBuilder::from_patterns(
|
||||
directory.join(glob_base),
|
||||
&glob_patterns,
|
||||
)
|
||||
.file_type(globwalk::FileType::FILE | globwalk::FileType::SYMLINK)
|
||||
.build()?;
|
||||
for entry in walker {
|
||||
|
|
@ -225,23 +230,38 @@ impl CacheInfo {
|
|||
continue;
|
||||
}
|
||||
};
|
||||
let metadata = match entry.metadata() {
|
||||
let metadata = if entry.path_is_symlink() {
|
||||
// resolve symlinks for leaf entries without following symlinks while globbing
|
||||
match fs_err::metadata(entry.path()) {
|
||||
Ok(metadata) => metadata,
|
||||
Err(err) => {
|
||||
warn!("Failed to resolve symlink for glob entry: {err}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match entry.metadata() {
|
||||
Ok(metadata) => metadata,
|
||||
Err(err) => {
|
||||
warn!("Failed to read metadata for glob entry: {err}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
if !metadata.is_file() {
|
||||
if !entry.path_is_symlink() {
|
||||
// don't warn if it was a symlink - it may legitimately resolve to a directory
|
||||
warn!(
|
||||
"Expected file for cache key, but found directory: `{}`",
|
||||
entry.path().display()
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
debug!(
|
||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}"
|
||||
|
|
@ -340,3 +360,71 @@ enum DirectoryTimestamp {
|
|||
Timestamp(Timestamp),
|
||||
Inode(u64),
|
||||
}
|
||||
|
||||
#[cfg(all(test, unix))]
|
||||
mod tests_unix {
|
||||
use anyhow::Result;
|
||||
|
||||
use super::{CacheInfo, Timestamp};
|
||||
|
||||
#[test]
|
||||
fn test_cache_info_symlink_resolve() -> Result<()> {
|
||||
let dir = tempfile::tempdir()?;
|
||||
let dir = dir.path().join("dir");
|
||||
fs_err::create_dir_all(&dir)?;
|
||||
|
||||
let write_manifest = |cache_key: &str| {
|
||||
fs_err::write(
|
||||
dir.join("pyproject.toml"),
|
||||
format!(
|
||||
r#"
|
||||
[tool.uv]
|
||||
cache-keys = [
|
||||
"{cache_key}"
|
||||
]
|
||||
"#
|
||||
),
|
||||
)
|
||||
};
|
||||
|
||||
let touch = |path: &str| -> Result<_> {
|
||||
let path = dir.join(path);
|
||||
fs_err::create_dir_all(path.parent().unwrap())?;
|
||||
fs_err::write(&path, "")?;
|
||||
Ok(Timestamp::from_metadata(&path.metadata()?))
|
||||
};
|
||||
|
||||
let cache_timestamp = || -> Result<_> { Ok(CacheInfo::from_directory(&dir)?.timestamp) };
|
||||
|
||||
write_manifest("x/**")?;
|
||||
assert_eq!(cache_timestamp()?, None);
|
||||
let y = touch("x/y")?;
|
||||
assert_eq!(cache_timestamp()?, Some(y));
|
||||
let z = touch("x/z")?;
|
||||
assert_eq!(cache_timestamp()?, Some(z));
|
||||
|
||||
// leaf entry symlink should be resolved
|
||||
let a = touch("../a")?;
|
||||
fs_err::os::unix::fs::symlink(dir.join("../a"), dir.join("x/a"))?;
|
||||
assert_eq!(cache_timestamp()?, Some(a));
|
||||
|
||||
// symlink directories should not be followed while globbing
|
||||
let c = touch("../b/c")?;
|
||||
fs_err::os::unix::fs::symlink(dir.join("../b"), dir.join("x/b"))?;
|
||||
assert_eq!(cache_timestamp()?, Some(a));
|
||||
|
||||
// no globs, should work as expected
|
||||
write_manifest("x/y")?;
|
||||
assert_eq!(cache_timestamp()?, Some(y));
|
||||
write_manifest("x/a")?;
|
||||
assert_eq!(cache_timestamp()?, Some(a));
|
||||
write_manifest("x/b/c")?;
|
||||
assert_eq!(cache_timestamp()?, Some(c));
|
||||
|
||||
// symlink pointing to a directory
|
||||
write_manifest("x/*b*")?;
|
||||
assert_eq!(cache_timestamp()?, None);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,318 @@
|
|||
use std::{
|
||||
collections::BTreeMap,
|
||||
path::{Component, Components, Path, PathBuf},
|
||||
};
|
||||
|
||||
/// Check if a component of the path looks like it may be a glob pattern.
|
||||
///
|
||||
/// Note: this function is being used when splitting a glob pattern into a long possible
|
||||
/// base and the glob remainder (scanning through components until we hit the first component
|
||||
/// for which this function returns true). It is acceptable for this function to return
|
||||
/// false positives (e.g. patterns like 'foo[bar' or 'foo{bar') in which case correctness
|
||||
/// will not be affected but efficiency might be (because we'll traverse more than we should),
|
||||
/// however it should not return false negatives.
|
||||
fn is_glob_like(part: Component) -> bool {
|
||||
matches!(part, Component::Normal(_))
|
||||
&& part.as_os_str().to_str().is_some_and(|part| {
|
||||
["*", "{", "}", "?", "[", "]"]
|
||||
.into_iter()
|
||||
.any(|c| part.contains(c))
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
struct GlobParts {
|
||||
base: PathBuf,
|
||||
pattern: PathBuf,
|
||||
}
|
||||
|
||||
/// Split a glob into longest possible base + shortest possible glob pattern.
|
||||
fn split_glob(pattern: impl AsRef<str>) -> GlobParts {
|
||||
let pattern: &Path = pattern.as_ref().as_ref();
|
||||
|
||||
let mut glob = GlobParts::default();
|
||||
let mut globbing = false;
|
||||
let mut last = None;
|
||||
|
||||
for part in pattern.components() {
|
||||
if let Some(last) = last {
|
||||
if last != Component::CurDir {
|
||||
if globbing {
|
||||
glob.pattern.push(last);
|
||||
} else {
|
||||
glob.base.push(last);
|
||||
}
|
||||
}
|
||||
}
|
||||
if !globbing {
|
||||
globbing = is_glob_like(part);
|
||||
}
|
||||
// we don't know if this part is the last one, defer handling it by one iteration
|
||||
last = Some(part);
|
||||
}
|
||||
|
||||
if let Some(last) = last {
|
||||
// defer handling the last component to prevent draining entire pattern into base
|
||||
if globbing || matches!(last, Component::Normal(_)) {
|
||||
glob.pattern.push(last);
|
||||
} else {
|
||||
glob.base.push(last);
|
||||
}
|
||||
}
|
||||
glob
|
||||
}
|
||||
|
||||
/// Classic trie with edges being path components and values being glob patterns.
|
||||
#[derive(Default)]
|
||||
struct Trie<'a> {
|
||||
children: BTreeMap<Component<'a>, Trie<'a>>,
|
||||
patterns: Vec<&'a Path>,
|
||||
}
|
||||
|
||||
impl<'a> Trie<'a> {
|
||||
fn insert(&mut self, mut components: Components<'a>, pattern: &'a Path) {
|
||||
if let Some(part) = components.next() {
|
||||
self.children
|
||||
.entry(part)
|
||||
.or_default()
|
||||
.insert(components, pattern);
|
||||
} else {
|
||||
self.patterns.push(pattern);
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn collect_patterns(
|
||||
&self,
|
||||
pattern_prefix: PathBuf,
|
||||
group_prefix: PathBuf,
|
||||
patterns: &mut Vec<PathBuf>,
|
||||
groups: &mut Vec<(PathBuf, Vec<PathBuf>)>,
|
||||
) {
|
||||
// collect all patterns beneath and including this node
|
||||
for pattern in &self.patterns {
|
||||
patterns.push(pattern_prefix.join(pattern));
|
||||
}
|
||||
for (part, child) in &self.children {
|
||||
if let Component::Normal(_) = part {
|
||||
// for normal components, collect all descendant patterns ('normal' edges only)
|
||||
child.collect_patterns(
|
||||
pattern_prefix.join(part),
|
||||
group_prefix.join(part),
|
||||
patterns,
|
||||
groups,
|
||||
);
|
||||
} else {
|
||||
// for non-normal component edges, kick off separate group collection at this node
|
||||
child.collect_groups(group_prefix.join(part), groups);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn collect_groups(&self, prefix: PathBuf, groups: &mut Vec<(PathBuf, Vec<PathBuf>)>) {
|
||||
// LCP-style grouping of patterns
|
||||
if self.patterns.is_empty() {
|
||||
// no patterns in this node; child nodes can form independent groups
|
||||
for (part, child) in &self.children {
|
||||
child.collect_groups(prefix.join(part), groups);
|
||||
}
|
||||
} else {
|
||||
// pivot point, we've hit a pattern node; we have to stop here and form a group
|
||||
let mut group = Vec::new();
|
||||
self.collect_patterns(PathBuf::new(), prefix.clone(), &mut group, groups);
|
||||
groups.push((prefix, group));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a collection of globs, cluster them into (base, globs) groups so that:
|
||||
/// - base doesn't contain any glob symbols
|
||||
/// - each directory would only be walked at most once
|
||||
/// - base of each group is the longest common prefix of globs in the group
|
||||
pub(crate) fn cluster_globs(patterns: &[impl AsRef<str>]) -> Vec<(PathBuf, Vec<String>)> {
|
||||
// split all globs into base/pattern
|
||||
let globs: Vec<_> = patterns.iter().map(split_glob).collect();
|
||||
|
||||
// construct a path trie out of all split globs
|
||||
let mut trie = Trie::default();
|
||||
for glob in &globs {
|
||||
trie.insert(glob.base.components(), &glob.pattern);
|
||||
}
|
||||
|
||||
// run LCP-style aggregation of patterns in the trie into groups
|
||||
let mut groups = Vec::new();
|
||||
trie.collect_groups(PathBuf::new(), &mut groups);
|
||||
|
||||
// finally, convert resulting patterns to strings
|
||||
groups
|
||||
.into_iter()
|
||||
.map(|(base, patterns)| {
|
||||
(
|
||||
base,
|
||||
patterns
|
||||
.iter()
|
||||
// NOTE: this unwrap is ok because input patterns are valid utf-8
|
||||
.map(|p| p.to_str().unwrap().to_owned())
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{GlobParts, cluster_globs, split_glob};
|
||||
|
||||
fn windowsify(path: &str) -> String {
|
||||
if cfg!(windows) {
|
||||
path.replace('/', "\\")
|
||||
} else {
|
||||
path.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_split_glob() {
|
||||
#[track_caller]
|
||||
fn check(input: &str, base: &str, pattern: &str) {
|
||||
let result = split_glob(input);
|
||||
let expected = GlobParts {
|
||||
base: base.into(),
|
||||
pattern: pattern.into(),
|
||||
};
|
||||
assert_eq!(result, expected, "{input:?} != {base:?} + {pattern:?}");
|
||||
}
|
||||
|
||||
check("", "", "");
|
||||
check("a", "", "a");
|
||||
check("a/b", "a", "b");
|
||||
check("a/b/", "a", "b");
|
||||
check("a/.//b/", "a", "b");
|
||||
check("./a/b/c", "a/b", "c");
|
||||
check("c/d/*", "c/d", "*");
|
||||
check("c/d/*/../*", "c/d", "*/../*");
|
||||
check("a/?b/c", "a", "?b/c");
|
||||
check("/a/b/*", "/a/b", "*");
|
||||
check("../x/*", "../x", "*");
|
||||
check("a/{b,c}/d", "a", "{b,c}/d");
|
||||
check("a/[bc]/d", "a", "[bc]/d");
|
||||
check("*", "", "*");
|
||||
check("*/*", "", "*/*");
|
||||
check("..", "..", "");
|
||||
check("/", "/", "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cluster_globs() {
|
||||
#[track_caller]
|
||||
fn check(input: &[&str], expected: &[(&str, &[&str])]) {
|
||||
let input = input.iter().map(|s| windowsify(s)).collect::<Vec<_>>();
|
||||
|
||||
let mut result_sorted = cluster_globs(&input);
|
||||
for (_, patterns) in &mut result_sorted {
|
||||
patterns.sort_unstable();
|
||||
}
|
||||
result_sorted.sort_unstable();
|
||||
|
||||
let mut expected_sorted = Vec::new();
|
||||
for (base, patterns) in expected {
|
||||
let mut patterns_sorted = Vec::new();
|
||||
for pattern in *patterns {
|
||||
patterns_sorted.push(windowsify(pattern));
|
||||
}
|
||||
patterns_sorted.sort_unstable();
|
||||
expected_sorted.push((windowsify(base).into(), patterns_sorted));
|
||||
}
|
||||
expected_sorted.sort_unstable();
|
||||
|
||||
assert_eq!(
|
||||
result_sorted, expected_sorted,
|
||||
"{input:?} != {expected_sorted:?} (got: {result_sorted:?})"
|
||||
);
|
||||
}
|
||||
|
||||
check(&["a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]);
|
||||
check(&["./a/b/*", "a/c/*"], &[("a/b", &["*"]), ("a/c", &["*"])]);
|
||||
check(&["/a/b/*", "/a/c/*"], &[("/a/b", &["*"]), ("/a/c", &["*"])]);
|
||||
check(
|
||||
&["../a/b/*", "../a/c/*"],
|
||||
&[("../a/b", &["*"]), ("../a/c", &["*"])],
|
||||
);
|
||||
check(&["x/*", "y/*"], &[("x", &["*"]), ("y", &["*"])]);
|
||||
check(&[], &[]);
|
||||
check(
|
||||
&["./*", "a/*", "../foo/*.png"],
|
||||
&[("", &["*", "a/*"]), ("../foo", &["*.png"])],
|
||||
);
|
||||
check(
|
||||
&[
|
||||
"?",
|
||||
"/foo/?",
|
||||
"/foo/bar/*",
|
||||
"../bar/*.png",
|
||||
"../bar/../baz/*.jpg",
|
||||
],
|
||||
&[
|
||||
("", &["?"]),
|
||||
("/foo", &["?", "bar/*"]),
|
||||
("../bar", &["*.png"]),
|
||||
("../bar/../baz", &["*.jpg"]),
|
||||
],
|
||||
);
|
||||
check(&["/abs/path/*"], &[("/abs/path", &["*"])]);
|
||||
check(&["/abs/*", "rel/*"], &[("/abs", &["*"]), ("rel", &["*"])]);
|
||||
check(&["a/{b,c}/*", "a/d?/*"], &[("a", &["{b,c}/*", "d?/*"])]);
|
||||
check(
|
||||
&[
|
||||
"../shared/a/[abc].png",
|
||||
"../shared/a/b/*",
|
||||
"../shared/b/c/?x/d",
|
||||
"docs/important/*.{doc,xls}",
|
||||
"docs/important/very/*",
|
||||
],
|
||||
&[
|
||||
("../shared/a", &["[abc].png", "b/*"]),
|
||||
("../shared/b/c", &["?x/d"]),
|
||||
("docs/important", &["*.{doc,xls}", "very/*"]),
|
||||
],
|
||||
);
|
||||
check(&["file.txt"], &[("", &["file.txt"])]);
|
||||
check(&["/"], &[("/", &[""])]);
|
||||
check(&[".."], &[("..", &[""])]);
|
||||
check(
|
||||
&["file1.txt", "file2.txt"],
|
||||
&[("", &["file1.txt", "file2.txt"])],
|
||||
);
|
||||
check(
|
||||
&["a/file1.txt", "a/file2.txt"],
|
||||
&[("a", &["file1.txt", "file2.txt"])],
|
||||
);
|
||||
check(
|
||||
&["*", "a/b/*", "a/../c/*.jpg", "a/../c/*.png", "/a/*", "/b/*"],
|
||||
&[
|
||||
("", &["*", "a/b/*"]),
|
||||
("a/../c", &["*.jpg", "*.png"]),
|
||||
("/a", &["*"]),
|
||||
("/b", &["*"]),
|
||||
],
|
||||
);
|
||||
|
||||
if cfg!(windows) {
|
||||
check(
|
||||
&[
|
||||
r"\\foo\bar\shared/a/[abc].png",
|
||||
r"\\foo\bar\shared/a/b/*",
|
||||
r"\\foo\bar/shared/b/c/?x/d",
|
||||
r"D:\docs\important/*.{doc,xls}",
|
||||
r"D:\docs/important/very/*",
|
||||
],
|
||||
&[
|
||||
(r"\\foo\bar\shared\a", &["[abc].png", r"b\*"]),
|
||||
(r"\\foo\bar\shared\b\c", &[r"?x\d"]),
|
||||
(r"D:\docs\important", &["*.{doc,xls}", r"very\*"]),
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,4 +3,5 @@ pub use crate::timestamp::*;
|
|||
|
||||
mod cache_info;
|
||||
mod git_info;
|
||||
mod glob;
|
||||
mod timestamp;
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ serde = { workspace = true }
|
|||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.40.0", features = ["filters", "json"] }
|
||||
insta = { workspace = true }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ pub trait CompatArgs {
|
|||
/// For example, users often pass `--allow-unsafe`, which is unnecessary with uv. But it's a
|
||||
/// nice user experience to warn, rather than fail, when users pass `--allow-unsafe`.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipCompileCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
allow_unsafe: bool,
|
||||
|
|
@ -159,7 +158,6 @@ impl CompatArgs for PipCompileCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip list` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipListCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
disable_pip_version_check: bool,
|
||||
|
|
@ -184,7 +182,6 @@ impl CompatArgs for PipListCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip-sync` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipSyncCompatArgs {
|
||||
#[clap(short, long, hide = true)]
|
||||
ask: bool,
|
||||
|
|
@ -268,11 +265,7 @@ enum Resolver {
|
|||
///
|
||||
/// These represent a subset of the `virtualenv` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct VenvCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
clear: bool,
|
||||
|
||||
#[clap(long, hide = true)]
|
||||
no_seed: bool,
|
||||
|
||||
|
|
@ -293,12 +286,6 @@ impl CompatArgs for VenvCompatArgs {
|
|||
/// behavior. If an argument is passed that does _not_ match uv's behavior, this method will
|
||||
/// return an error.
|
||||
fn validate(&self) -> Result<()> {
|
||||
if self.clear {
|
||||
warn_user!(
|
||||
"virtualenv's `--clear` has no effect (uv always clears the virtual environment)"
|
||||
);
|
||||
}
|
||||
|
||||
if self.no_seed {
|
||||
warn_user!(
|
||||
"virtualenv's `--no-seed` has no effect (uv omits seed packages by default)"
|
||||
|
|
@ -327,7 +314,6 @@ impl CompatArgs for VenvCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip install` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipInstallCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
disable_pip_version_check: bool,
|
||||
|
|
@ -361,7 +347,6 @@ impl CompatArgs for PipInstallCompatArgs {
|
|||
///
|
||||
/// These represent a subset of the `pip` interface that exists on all commands.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipGlobalCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
disable_pip_version_check: bool,
|
||||
|
|
|
|||
|
|
@ -10,8 +10,9 @@ use clap::{Args, Parser, Subcommand};
|
|||
|
||||
use uv_cache::CacheArgs;
|
||||
use uv_configuration::{
|
||||
ConfigSettingEntry, ExportFormat, IndexStrategy, KeyringProviderType, PackageNameSpecifier,
|
||||
ProjectBuildBackend, TargetTriple, TrustedHost, TrustedPublishing, VersionControlSystem,
|
||||
ConfigSettingEntry, ConfigSettingPackageEntry, ExportFormat, IndexStrategy,
|
||||
KeyringProviderType, PackageNameSpecifier, PreviewFeatures, ProjectBuildBackend, TargetTriple,
|
||||
TrustedHost, TrustedPublishing, VersionControlSystem,
|
||||
};
|
||||
use uv_distribution_types::{Index, IndexUrl, Origin, PipExtraIndex, PipFindLinks, PipIndex};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName, PipGroupName};
|
||||
|
|
@ -46,6 +47,15 @@ pub enum PythonListFormat {
|
|||
Json,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Copy, clap::ValueEnum)]
|
||||
pub enum SyncFormat {
|
||||
/// Display the result in a human-readable format.
|
||||
#[default]
|
||||
Text,
|
||||
/// Display the result in JSON format.
|
||||
Json,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, clap::ValueEnum)]
|
||||
pub enum ListFormat {
|
||||
/// Display the list of packages in a human-readable table.
|
||||
|
|
@ -85,7 +95,6 @@ const STYLES: Styles = Styles::styled()
|
|||
disable_version_flag = true
|
||||
)]
|
||||
#[command(styles=STYLES)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
pub command: Box<Commands>,
|
||||
|
|
@ -133,7 +142,6 @@ pub struct TopLevelArgs {
|
|||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[command(next_help_heading = "Global options", next_display_order = 1000)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct GlobalArgs {
|
||||
#[arg(
|
||||
global = true,
|
||||
|
|
@ -265,7 +273,7 @@ pub struct GlobalArgs {
|
|||
)]
|
||||
pub allow_insecure_host: Option<Vec<Maybe<TrustedHost>>>,
|
||||
|
||||
/// Whether to enable experimental, preview features.
|
||||
/// Whether to enable all experimental preview features.
|
||||
///
|
||||
/// Preview features may change without warning.
|
||||
#[arg(global = true, long, hide = true, env = EnvVars::UV_PREVIEW, value_parser = clap::builder::BoolishValueParser::new(), overrides_with("no_preview"))]
|
||||
|
|
@ -274,6 +282,25 @@ pub struct GlobalArgs {
|
|||
#[arg(global = true, long, overrides_with("preview"), hide = true)]
|
||||
pub no_preview: bool,
|
||||
|
||||
/// Enable experimental preview features.
|
||||
///
|
||||
/// Preview features may change without warning.
|
||||
///
|
||||
/// Use comma-separated values or pass multiple times to enable multiple features.
|
||||
///
|
||||
/// The following features are available: `python-install-default`, `python-upgrade`,
|
||||
/// `json-output`, `pylock`, `add-bounds`.
|
||||
#[arg(
|
||||
global = true,
|
||||
long = "preview-features",
|
||||
env = EnvVars::UV_PREVIEW_FEATURES,
|
||||
value_delimiter = ',',
|
||||
hide = true,
|
||||
alias = "preview-feature",
|
||||
value_enum,
|
||||
)]
|
||||
pub preview_features: Vec<PreviewFeatures>,
|
||||
|
||||
/// Avoid discovering a `pyproject.toml` or `uv.toml` file.
|
||||
///
|
||||
/// Normally, configuration files are discovered in the current directory,
|
||||
|
|
@ -526,7 +553,6 @@ pub struct HelpArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("operation"))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct VersionArgs {
|
||||
/// Set the project version to this value
|
||||
///
|
||||
|
|
@ -535,8 +561,10 @@ pub struct VersionArgs {
|
|||
pub value: Option<String>,
|
||||
|
||||
/// Update the project version using the given semantics
|
||||
///
|
||||
/// This flag can be passed multiple times.
|
||||
#[arg(group = "operation", long)]
|
||||
pub bump: Option<VersionBump>,
|
||||
pub bump: Vec<VersionBump>,
|
||||
|
||||
/// Don't write a new version to the `pyproject.toml`
|
||||
///
|
||||
|
|
@ -611,14 +639,56 @@ pub struct VersionArgs {
|
|||
pub python: Option<Maybe<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, clap::ValueEnum)]
|
||||
// Note that the ordering of the variants is significant, as when given a list of operations
|
||||
// to perform, we sort them and apply them in order, so users don't have to think too hard about it.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, clap::ValueEnum)]
|
||||
pub enum VersionBump {
|
||||
/// Increase the major version (1.2.3 => 2.0.0)
|
||||
/// Increase the major version (e.g., 1.2.3 => 2.0.0)
|
||||
Major,
|
||||
/// Increase the minor version (1.2.3 => 1.3.0)
|
||||
/// Increase the minor version (e.g., 1.2.3 => 1.3.0)
|
||||
Minor,
|
||||
/// Increase the patch version (1.2.3 => 1.2.4)
|
||||
/// Increase the patch version (e.g., 1.2.3 => 1.2.4)
|
||||
Patch,
|
||||
/// Move from a pre-release to stable version (e.g., 1.2.3b4.post5.dev6 => 1.2.3)
|
||||
///
|
||||
/// Removes all pre-release components, but will not remove "local" components.
|
||||
Stable,
|
||||
/// Increase the alpha version (e.g., 1.2.3a4 => 1.2.3a5)
|
||||
///
|
||||
/// To move from a stable to a pre-release version, combine this with a stable component, e.g.,
|
||||
/// for 1.2.3 => 2.0.0a1, you'd also include [`VersionBump::Major`].
|
||||
Alpha,
|
||||
/// Increase the beta version (e.g., 1.2.3b4 => 1.2.3b5)
|
||||
///
|
||||
/// To move from a stable to a pre-release version, combine this with a stable component, e.g.,
|
||||
/// for 1.2.3 => 2.0.0b1, you'd also include [`VersionBump::Major`].
|
||||
Beta,
|
||||
/// Increase the rc version (e.g., 1.2.3rc4 => 1.2.3rc5)
|
||||
///
|
||||
/// To move from a stable to a pre-release version, combine this with a stable component, e.g.,
|
||||
/// for 1.2.3 => 2.0.0rc1, you'd also include [`VersionBump::Major`].]
|
||||
Rc,
|
||||
/// Increase the post version (e.g., 1.2.3.post5 => 1.2.3.post6)
|
||||
Post,
|
||||
/// Increase the dev version (e.g., 1.2.3a4.dev6 => 1.2.3.dev7)
|
||||
Dev,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for VersionBump {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let string = match self {
|
||||
VersionBump::Major => "major",
|
||||
VersionBump::Minor => "minor",
|
||||
VersionBump::Patch => "patch",
|
||||
VersionBump::Stable => "stable",
|
||||
VersionBump::Alpha => "alpha",
|
||||
VersionBump::Beta => "beta",
|
||||
VersionBump::Rc => "rc",
|
||||
VersionBump::Post => "post",
|
||||
VersionBump::Dev => "dev",
|
||||
};
|
||||
string.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
|
|
@ -657,7 +727,6 @@ pub struct SelfUpdateArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CacheNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: CacheCommand,
|
||||
|
|
@ -687,14 +756,12 @@ pub enum CacheCommand {
|
|||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CleanArgs {
|
||||
/// The packages to remove from the cache.
|
||||
pub package: Vec<PackageName>,
|
||||
}
|
||||
|
||||
#[derive(Args, Debug)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PruneArgs {
|
||||
/// Optimize the cache for persistence in a continuous integration environment, like GitHub
|
||||
/// Actions.
|
||||
|
|
@ -714,7 +781,6 @@ pub struct PruneArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: PipCommand,
|
||||
|
|
@ -1095,7 +1161,6 @@ fn parse_maybe_string(input: &str) -> Result<Maybe<String>, String> {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
pub struct PipCompileArgs {
|
||||
/// Include all packages listed in the given `requirements.in` files.
|
||||
|
|
@ -1156,6 +1221,14 @@ pub struct PipCompileArgs {
|
|||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub resolver: ResolverArgs,
|
||||
|
||||
|
|
@ -1170,14 +1243,6 @@ pub struct PipCompileArgs {
|
|||
#[arg(long, overrides_with("no_deps"), hide = true)]
|
||||
pub deps: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
/// Write the compiled requirements to the given `requirements.txt` or `pylock.toml` file.
|
||||
///
|
||||
/// If the file already exists, the existing versions will be preferred when resolving
|
||||
|
|
@ -1443,7 +1508,6 @@ pub struct PipCompileArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipSyncArgs {
|
||||
/// Include all packages listed in the given `requirements.txt` files.
|
||||
///
|
||||
|
|
@ -1473,6 +1537,30 @@ pub struct PipSyncArgs {
|
|||
#[arg(long, short, alias = "build-constraint", env = EnvVars::UV_BUILD_CONSTRAINT, value_delimiter = ' ', value_parser = parse_maybe_file_path)]
|
||||
pub build_constraints: Vec<Maybe<PathBuf>>,
|
||||
|
||||
/// Include optional dependencies from the specified extra name; may be provided more than once.
|
||||
///
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")]
|
||||
pub all_extras: bool,
|
||||
|
||||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is
|
||||
/// used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub installer: InstallerArgs,
|
||||
|
||||
|
|
@ -1700,7 +1788,6 @@ pub struct PipSyncArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipInstallArgs {
|
||||
/// Install all listed packages.
|
||||
///
|
||||
|
|
@ -1754,19 +1841,28 @@ pub struct PipInstallArgs {
|
|||
|
||||
/// Include optional dependencies from the specified extra name; may be provided more than once.
|
||||
///
|
||||
/// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")]
|
||||
pub all_extras: bool,
|
||||
|
||||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is
|
||||
/// used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub installer: ResolverInstallerArgs,
|
||||
|
||||
|
|
@ -1781,14 +1877,6 @@ pub struct PipInstallArgs {
|
|||
#[arg(long, overrides_with("no_deps"), hide = true)]
|
||||
pub deps: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
/// Require a matching hash for each requirement.
|
||||
///
|
||||
/// By default, uv will verify any available hashes in the requirements file, but will not
|
||||
|
|
@ -2015,7 +2103,6 @@ pub struct PipInstallArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipUninstallArgs {
|
||||
/// Uninstall all listed packages.
|
||||
#[arg(group = "sources")]
|
||||
|
|
@ -2104,7 +2191,6 @@ pub struct PipUninstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipFreezeArgs {
|
||||
/// Exclude any editable packages from output.
|
||||
#[arg(long)]
|
||||
|
|
@ -2159,7 +2245,6 @@ pub struct PipFreezeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipListArgs {
|
||||
/// Only include editable projects.
|
||||
#[arg(short, long)]
|
||||
|
|
@ -2235,7 +2320,6 @@ pub struct PipListArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipCheckArgs {
|
||||
/// The Python interpreter for which packages should be checked.
|
||||
///
|
||||
|
|
@ -2271,7 +2355,6 @@ pub struct PipCheckArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipShowArgs {
|
||||
/// The package(s) to display.
|
||||
pub package: Vec<PackageName>,
|
||||
|
|
@ -2325,7 +2408,6 @@ pub struct PipShowArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PipTreeArgs {
|
||||
/// Show the version constraint(s) imposed on each package.
|
||||
#[arg(long)]
|
||||
|
|
@ -2382,7 +2464,6 @@ pub struct PipTreeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct BuildArgs {
|
||||
/// The directory from which distributions should be built, or a source
|
||||
/// distribution archive to build into a wheel.
|
||||
|
|
@ -2529,7 +2610,6 @@ pub struct BuildArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct VenvArgs {
|
||||
/// The Python interpreter to use for the virtual environment.
|
||||
///
|
||||
|
|
@ -2580,16 +2660,23 @@ pub struct VenvArgs {
|
|||
#[arg(long, value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_SEED)]
|
||||
pub seed: bool,
|
||||
|
||||
/// Remove any existing files or directories at the target path.
|
||||
///
|
||||
/// By default, `uv venv` will exit with an error if the given path is non-empty. The
|
||||
/// `--clear` option will instead clear a non-empty path before creating a new virtual
|
||||
/// environment.
|
||||
#[clap(long, short, overrides_with = "allow_existing", value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_CLEAR)]
|
||||
pub clear: bool,
|
||||
|
||||
/// Preserve any existing files or directories at the target path.
|
||||
///
|
||||
/// By default, `uv venv` will remove an existing virtual environment at the given path, and
|
||||
/// exit with an error if the path is non-empty but _not_ a virtual environment. The
|
||||
/// By default, `uv venv` will exit with an error if the given path is non-empty. The
|
||||
/// `--allow-existing` option will instead write to the given path, regardless of its contents,
|
||||
/// and without clearing it beforehand.
|
||||
///
|
||||
/// WARNING: This option can lead to unexpected behavior if the existing virtual environment and
|
||||
/// the newly-created virtual environment are linked to different Python interpreters.
|
||||
#[clap(long)]
|
||||
#[clap(long, overrides_with = "clear")]
|
||||
pub allow_existing: bool,
|
||||
|
||||
/// The path to the virtual environment to create.
|
||||
|
|
@ -2725,7 +2812,6 @@ pub enum AuthorFrom {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct InitArgs {
|
||||
/// The path to use for the project/script.
|
||||
///
|
||||
|
|
@ -2824,7 +2910,7 @@ pub struct InitArgs {
|
|||
/// Initialize a build-backend of choice for the project.
|
||||
///
|
||||
/// Implicitly sets `--package`.
|
||||
#[arg(long, value_enum, conflicts_with_all=["script", "no_package"])]
|
||||
#[arg(long, value_enum, conflicts_with_all=["script", "no_package"], env = EnvVars::UV_INIT_BUILD_BACKEND)]
|
||||
pub build_backend: Option<ProjectBuildBackend>,
|
||||
|
||||
/// Invalid option name for build backend.
|
||||
|
|
@ -2883,7 +2969,6 @@ pub struct InitArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct RunArgs {
|
||||
/// Include optional dependencies from the specified extra name.
|
||||
///
|
||||
|
|
@ -3021,7 +3106,7 @@ pub struct RunArgs {
|
|||
/// When used in a project, these dependencies will be layered on top of the project environment
|
||||
/// in a separate, ephemeral environment. These dependencies are allowed to conflict with those
|
||||
/// specified by the project.
|
||||
#[arg(long)]
|
||||
#[arg(short = 'w', long)]
|
||||
pub with: Vec<comma::CommaSeparatedRequirements>,
|
||||
|
||||
/// Run with the given packages installed in editable mode.
|
||||
|
|
@ -3170,7 +3255,6 @@ pub struct RunArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct SyncArgs {
|
||||
/// Include optional dependencies from the specified extra name.
|
||||
///
|
||||
|
|
@ -3184,6 +3268,10 @@ pub struct SyncArgs {
|
|||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Select the output format.
|
||||
#[arg(long, value_enum, default_value_t = SyncFormat::default())]
|
||||
pub output_format: SyncFormat,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// When two or more extras are declared as conflicting in `tool.uv.conflicts`, using this flag
|
||||
|
|
@ -3416,6 +3504,23 @@ pub struct SyncArgs {
|
|||
)]
|
||||
pub python: Option<Maybe<String>>,
|
||||
|
||||
/// The platform for which requirements should be installed.
|
||||
///
|
||||
/// Represented as a "target triple", a string that describes the target platform in terms of
|
||||
/// its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or
|
||||
/// `aarch64-apple-darwin`.
|
||||
///
|
||||
/// When targeting macOS (Darwin), the default minimum version is `12.0`. Use
|
||||
/// `MACOSX_DEPLOYMENT_TARGET` to specify a different minimum version, e.g., `13.0`.
|
||||
///
|
||||
/// WARNING: When specified, uv will select wheels that are compatible with the _target_
|
||||
/// platform; as a result, the installed distributions may not be compatible with the _current_
|
||||
/// platform. Conversely, any distributions that are built from source may be incompatible with
|
||||
/// the _target_ platform, as they will be built for the _current_ platform. The
|
||||
/// `--python-platform` option is intended for advanced use cases.
|
||||
#[arg(long)]
|
||||
pub python_platform: Option<TargetTriple>,
|
||||
|
||||
/// Check if the Python environment is synchronized with the project.
|
||||
///
|
||||
/// If the environment is not up to date, uv will exit with an error.
|
||||
|
|
@ -3427,7 +3532,6 @@ pub struct SyncArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct LockArgs {
|
||||
/// Check if the lockfile is up-to-date.
|
||||
///
|
||||
|
|
@ -3489,7 +3593,6 @@ pub struct LockArgs {
|
|||
|
||||
#[derive(Args)]
|
||||
#[command(group = clap::ArgGroup::new("sources").required(true).multiple(true))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct AddArgs {
|
||||
/// The packages to add, as PEP 508 requirements (e.g., `ruff==0.5.0`).
|
||||
#[arg(group = "sources")]
|
||||
|
|
@ -3655,7 +3758,8 @@ pub struct AddArgs {
|
|||
long,
|
||||
conflicts_with = "dev",
|
||||
conflicts_with = "optional",
|
||||
conflicts_with = "package"
|
||||
conflicts_with = "package",
|
||||
conflicts_with = "workspace"
|
||||
)]
|
||||
pub script: Option<PathBuf>,
|
||||
|
||||
|
|
@ -3671,10 +3775,25 @@ pub struct AddArgs {
|
|||
value_parser = parse_maybe_string,
|
||||
)]
|
||||
pub python: Option<Maybe<String>>,
|
||||
|
||||
/// Add the dependency as a workspace member.
|
||||
///
|
||||
/// By default, uv will add path dependencies that are within the workspace directory
|
||||
/// as workspace members. When used with a path dependency, the package will be added
|
||||
/// to the workspace's `members` list in the root `pyproject.toml` file.
|
||||
#[arg(long, overrides_with = "no_workspace")]
|
||||
pub workspace: bool,
|
||||
|
||||
/// Don't add the dependency as a workspace member.
|
||||
///
|
||||
/// By default, when adding a dependency that's a local path and is within the workspace
|
||||
/// directory, uv will add it as a workspace member; pass `--no-workspace` to add the package
|
||||
/// as direct path dependency instead.
|
||||
#[arg(long, overrides_with = "workspace")]
|
||||
pub no_workspace: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct RemoveArgs {
|
||||
/// The names of the dependencies to remove (e.g., `ruff`).
|
||||
#[arg(required = true)]
|
||||
|
|
@ -3769,7 +3888,6 @@ pub struct RemoveArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct TreeArgs {
|
||||
/// Show a platform-independent dependency tree.
|
||||
///
|
||||
|
|
@ -3909,7 +4027,6 @@ pub struct TreeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ExportArgs {
|
||||
/// The format to which `uv.lock` should be exported.
|
||||
///
|
||||
|
|
@ -4124,7 +4241,6 @@ pub struct ExportArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: ToolCommand,
|
||||
|
|
@ -4217,7 +4333,6 @@ pub enum ToolCommand {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolRunArgs {
|
||||
/// The command to run.
|
||||
///
|
||||
|
|
@ -4232,7 +4347,7 @@ pub struct ToolRunArgs {
|
|||
pub from: Option<String>,
|
||||
|
||||
/// Run with the given packages installed.
|
||||
#[arg(long)]
|
||||
#[arg(short = 'w', long)]
|
||||
pub with: Vec<comma::CommaSeparatedRequirements>,
|
||||
|
||||
/// Run with the given packages installed in editable mode
|
||||
|
|
@ -4336,7 +4451,6 @@ pub struct UvxArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolInstallArgs {
|
||||
/// The package to install commands from.
|
||||
pub package: String,
|
||||
|
|
@ -4348,7 +4462,7 @@ pub struct ToolInstallArgs {
|
|||
pub from: Option<String>,
|
||||
|
||||
/// Include the following additional requirements.
|
||||
#[arg(long)]
|
||||
#[arg(short = 'w', long)]
|
||||
pub with: Vec<comma::CommaSeparatedRequirements>,
|
||||
|
||||
/// Include all requirements listed in the given `requirements.txt` files.
|
||||
|
|
@ -4425,7 +4539,6 @@ pub struct ToolInstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolListArgs {
|
||||
/// Whether to display the path to each tool environment and installed executable.
|
||||
#[arg(long)]
|
||||
|
|
@ -4452,7 +4565,6 @@ pub struct ToolListArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolDirArgs {
|
||||
/// Show the directory into which `uv tool` will install executables.
|
||||
///
|
||||
|
|
@ -4471,7 +4583,6 @@ pub struct ToolDirArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolUninstallArgs {
|
||||
/// The name of the tool to uninstall.
|
||||
#[arg(required = true)]
|
||||
|
|
@ -4483,7 +4594,6 @@ pub struct ToolUninstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ToolUpgradeArgs {
|
||||
/// The name of the tool to upgrade, along with an optional version specifier.
|
||||
#[arg(required = true)]
|
||||
|
|
@ -4628,6 +4738,14 @@ pub struct ToolUpgradeArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_setting_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
@ -4713,7 +4831,6 @@ pub struct ToolUpgradeArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonNamespace {
|
||||
#[command(subcommand)]
|
||||
pub command: PythonCommand,
|
||||
|
|
@ -4746,16 +4863,35 @@ pub enum PythonCommand {
|
|||
/// Python versions are installed into the uv Python directory, which can be retrieved with `uv
|
||||
/// python dir`.
|
||||
///
|
||||
/// A `python` executable is not made globally available, managed Python versions are only used
|
||||
/// in uv commands or in active virtual environments. There is experimental support for adding
|
||||
/// Python executables to a directory on the path — use the `--preview` flag to enable this
|
||||
/// behavior and `uv python dir --bin` to retrieve the target directory.
|
||||
/// By default, Python executables are added to a directory on the path with a minor version
|
||||
/// suffix, e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use
|
||||
/// `uv python dir --bin` to see the target directory.
|
||||
///
|
||||
/// Multiple Python versions may be requested.
|
||||
///
|
||||
/// See `uv help python` to view supported request formats.
|
||||
Install(PythonInstallArgs),
|
||||
|
||||
/// Upgrade installed Python versions.
|
||||
///
|
||||
/// Upgrades versions to the latest supported patch release. Requires the `python-upgrade`
|
||||
/// preview feature.
|
||||
///
|
||||
/// A target Python minor version to upgrade may be provided, e.g., `3.13`. Multiple versions
|
||||
/// may be provided to perform more than one upgrade.
|
||||
///
|
||||
/// If no target version is provided, then uv will upgrade all managed CPython versions.
|
||||
///
|
||||
/// During an upgrade, uv will not uninstall outdated patch versions.
|
||||
///
|
||||
/// When an upgrade is performed, virtual environments created by uv will automatically
|
||||
/// use the new version. However, if the virtual environment was created before the
|
||||
/// upgrade functionality was added, it will continue to use the old Python version; to enable
|
||||
/// upgrades, the environment must be recreated.
|
||||
///
|
||||
/// Upgrades are not yet supported for alternative implementations, like PyPy.
|
||||
Upgrade(PythonUpgradeArgs),
|
||||
|
||||
/// Search for a Python installation.
|
||||
///
|
||||
/// Displays the path to the Python executable.
|
||||
|
|
@ -4790,10 +4926,22 @@ pub enum PythonCommand {
|
|||
|
||||
/// Uninstall Python versions.
|
||||
Uninstall(PythonUninstallArgs),
|
||||
|
||||
/// Ensure that the Python executable directory is on the `PATH`.
|
||||
///
|
||||
/// If the Python executable directory is not present on the `PATH`, uv will attempt to add it to
|
||||
/// the relevant shell configuration files.
|
||||
///
|
||||
/// If the shell configuration files already include a blurb to add the executable directory to
|
||||
/// the path, but the directory is not present on the `PATH`, uv will exit with an error.
|
||||
///
|
||||
/// The Python executable directory is determined according to the XDG standard and can be
|
||||
/// retrieved with `uv python dir --bin`.
|
||||
#[command(alias = "ensurepath")]
|
||||
UpdateShell,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonListArgs {
|
||||
/// A Python request to filter by.
|
||||
///
|
||||
|
|
@ -4848,7 +4996,6 @@ pub struct PythonListArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonDirArgs {
|
||||
/// Show the directory into which `uv python` will install Python executables.
|
||||
///
|
||||
|
|
@ -4866,7 +5013,6 @@ pub struct PythonDirArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonInstallArgs {
|
||||
/// The directory to store the Python installation in.
|
||||
///
|
||||
|
|
@ -4878,6 +5024,38 @@ pub struct PythonInstallArgs {
|
|||
#[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)]
|
||||
pub install_dir: Option<PathBuf>,
|
||||
|
||||
/// Install a Python executable into the `bin` directory.
|
||||
///
|
||||
/// This is the default behavior. If this flag is provided explicitly, uv will error if the
|
||||
/// executable cannot be installed.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_BIN=1`.
|
||||
///
|
||||
/// See `UV_PYTHON_BIN_DIR` to customize the target directory.
|
||||
#[arg(long, overrides_with("no_bin"), hide = true)]
|
||||
pub bin: bool,
|
||||
|
||||
/// Do not install a Python executable into the `bin` directory.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_BIN=0`.
|
||||
#[arg(long, overrides_with("bin"), conflicts_with("default"))]
|
||||
pub no_bin: bool,
|
||||
|
||||
/// Register the Python installation in the Windows registry.
|
||||
///
|
||||
/// This is the default behavior on Windows. If this flag is provided explicitly, uv will error if the
|
||||
/// registry entry cannot be created.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=1`.
|
||||
#[arg(long, overrides_with("no_registry"), hide = true)]
|
||||
pub registry: bool,
|
||||
|
||||
/// Do not register the Python installation in the Windows registry.
|
||||
///
|
||||
/// This can also be set with `UV_PYTHON_INSTALL_REGISTRY=0`.
|
||||
#[arg(long, overrides_with("registry"))]
|
||||
pub no_registry: bool,
|
||||
|
||||
/// The Python version(s) to install.
|
||||
///
|
||||
/// If not provided, the requested Python version(s) will be read from the `UV_PYTHON`
|
||||
|
|
@ -4940,12 +5118,55 @@ pub struct PythonInstallArgs {
|
|||
/// and `python`.
|
||||
///
|
||||
/// If multiple Python versions are requested, uv will exit with an error.
|
||||
#[arg(long)]
|
||||
#[arg(long, conflicts_with("no_bin"))]
|
||||
pub default: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonUpgradeArgs {
|
||||
/// The directory Python installations are stored in.
|
||||
///
|
||||
/// If provided, `UV_PYTHON_INSTALL_DIR` will need to be set for subsequent operations for uv to
|
||||
/// discover the Python installation.
|
||||
///
|
||||
/// See `uv python dir` to view the current Python installation directory. Defaults to
|
||||
/// `~/.local/share/uv/python`.
|
||||
#[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)]
|
||||
pub install_dir: Option<PathBuf>,
|
||||
|
||||
/// The Python minor version(s) to upgrade.
|
||||
///
|
||||
/// If no target version is provided, then uv will upgrade all managed CPython versions.
|
||||
#[arg(env = EnvVars::UV_PYTHON)]
|
||||
pub targets: Vec<String>,
|
||||
|
||||
/// Set the URL to use as the source for downloading Python installations.
|
||||
///
|
||||
/// The provided URL will replace
|
||||
/// `https://github.com/astral-sh/python-build-standalone/releases/download` in, e.g.,
|
||||
/// `https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`.
|
||||
///
|
||||
/// Distributions can be read from a local directory by using the `file://` URL scheme.
|
||||
#[arg(long, env = EnvVars::UV_PYTHON_INSTALL_MIRROR)]
|
||||
pub mirror: Option<String>,
|
||||
|
||||
/// Set the URL to use as the source for downloading PyPy installations.
|
||||
///
|
||||
/// The provided URL will replace `https://downloads.python.org/pypy` in, e.g.,
|
||||
/// `https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`.
|
||||
///
|
||||
/// Distributions can be read from a local directory by using the `file://` URL scheme.
|
||||
#[arg(long, env = EnvVars::UV_PYPY_INSTALL_MIRROR)]
|
||||
pub pypy_mirror: Option<String>,
|
||||
|
||||
/// URL pointing to JSON of custom Python installations.
|
||||
///
|
||||
/// Note that currently, only local paths are supported.
|
||||
#[arg(long, env = EnvVars::UV_PYTHON_DOWNLOADS_JSON_URL)]
|
||||
pub python_downloads_json_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
pub struct PythonUninstallArgs {
|
||||
/// The directory where the Python was installed.
|
||||
#[arg(long, short, env = EnvVars::UV_PYTHON_INSTALL_DIR)]
|
||||
|
|
@ -4963,7 +5184,6 @@ pub struct PythonUninstallArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonFindArgs {
|
||||
/// The Python request.
|
||||
///
|
||||
|
|
@ -5012,7 +5232,6 @@ pub struct PythonFindArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct PythonPinArgs {
|
||||
/// The Python version request.
|
||||
///
|
||||
|
|
@ -5061,7 +5280,6 @@ pub struct PythonPinArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct GenerateShellCompletionArgs {
|
||||
/// The shell to generate the completion script for
|
||||
pub shell: clap_complete_command::Shell,
|
||||
|
|
@ -5100,7 +5318,6 @@ pub struct GenerateShellCompletionArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct IndexArgs {
|
||||
/// The URLs to use when resolving dependencies, in addition to the default index.
|
||||
///
|
||||
|
|
@ -5110,6 +5327,9 @@ pub struct IndexArgs {
|
|||
/// All indexes provided via this flag take priority over the index specified by
|
||||
/// `--default-index` (which defaults to PyPI). When multiple `--index` flags are provided,
|
||||
/// earlier values take priority.
|
||||
///
|
||||
/// Index names are not supported as values. Relative paths must be disambiguated from index
|
||||
/// names with `./` or `../` on Unix or `.\\`, `..\\`, `./` or `../` on Windows.
|
||||
//
|
||||
// The nested Vec structure (`Vec<Vec<Maybe<Index>>>`) is required for clap's
|
||||
// value parsing mechanism, which processes one value at a time, in order to handle
|
||||
|
|
@ -5175,7 +5395,6 @@ pub struct IndexArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct RefreshArgs {
|
||||
/// Refresh all cached data.
|
||||
#[arg(
|
||||
|
|
@ -5201,7 +5420,6 @@ pub struct RefreshArgs {
|
|||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct BuildOptionsArgs {
|
||||
/// Don't build source distributions.
|
||||
///
|
||||
|
|
@ -5257,7 +5475,6 @@ pub struct BuildOptionsArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to install (but not resolve) packages.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct InstallerArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
|
@ -5322,6 +5539,14 @@ pub struct InstallerArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_settings_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
@ -5399,7 +5624,6 @@ pub struct InstallerArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to resolve (but not install) packages.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ResolverArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
|
@ -5510,6 +5734,14 @@ pub struct ResolverArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_settings_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
@ -5566,7 +5798,6 @@ pub struct ResolverArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to resolve and install packages.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct ResolverInstallerArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
|
@ -5700,6 +5931,14 @@ pub struct ResolverInstallerArgs {
|
|||
)]
|
||||
pub config_setting: Option<Vec<ConfigSettingEntry>>,
|
||||
|
||||
/// Settings to pass to the PEP 517 build backend for a specific package, specified as `PACKAGE:KEY=VALUE` pairs.
|
||||
#[arg(
|
||||
long,
|
||||
alias = "config-settings-package",
|
||||
help_heading = "Build options"
|
||||
)]
|
||||
pub config_settings_package: Option<Vec<ConfigSettingPackageEntry>>,
|
||||
|
||||
/// Disable isolation when building source distributions.
|
||||
///
|
||||
/// Assumes that build dependencies specified by PEP 518 are already installed.
|
||||
|
|
@ -5783,7 +6022,6 @@ pub struct ResolverInstallerArgs {
|
|||
|
||||
/// Arguments that are used by commands that need to fetch from the Simple API.
|
||||
#[derive(Args)]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct FetchArgs {
|
||||
#[command(flatten)]
|
||||
pub index_args: IndexArgs,
|
||||
|
|
|
|||
|
|
@ -1,7 +1,10 @@
|
|||
use anstream::eprintln;
|
||||
|
||||
use uv_cache::Refresh;
|
||||
use uv_configuration::ConfigSettings;
|
||||
use uv_configuration::{ConfigSettings, PackageConfigSettings};
|
||||
use uv_resolver::PrereleaseMode;
|
||||
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
|
||||
use uv_warnings::owo_colors::OwoColorize;
|
||||
|
||||
use crate::{
|
||||
BuildOptionsArgs, FetchArgs, IndexArgs, InstallerArgs, Maybe, RefreshArgs, ResolverArgs,
|
||||
|
|
@ -9,12 +12,27 @@ use crate::{
|
|||
};
|
||||
|
||||
/// Given a boolean flag pair (like `--upgrade` and `--no-upgrade`), resolve the value of the flag.
|
||||
pub fn flag(yes: bool, no: bool) -> Option<bool> {
|
||||
pub fn flag(yes: bool, no: bool, name: &str) -> Option<bool> {
|
||||
match (yes, no) {
|
||||
(true, false) => Some(true),
|
||||
(false, true) => Some(false),
|
||||
(false, false) => None,
|
||||
(..) => unreachable!("Clap should make this impossible"),
|
||||
(..) => {
|
||||
eprintln!(
|
||||
"{}{} `{}` and `{}` cannot be used together. \
|
||||
Boolean flags on different levels are currently not supported \
|
||||
(https://github.com/clap-rs/clap/issues/6049)",
|
||||
"error".bold().red(),
|
||||
":".bold(),
|
||||
format!("--{name}").green(),
|
||||
format!("--no-{name}").green(),
|
||||
);
|
||||
// No error forwarding since should eventually be solved on the clap side.
|
||||
#[allow(clippy::exit)]
|
||||
{
|
||||
std::process::exit(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -26,7 +44,7 @@ impl From<RefreshArgs> for Refresh {
|
|||
refresh_package,
|
||||
} = value;
|
||||
|
||||
Self::from_args(flag(refresh, no_refresh), refresh_package)
|
||||
Self::from_args(flag(refresh, no_refresh, "no-refresh"), refresh_package)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -44,6 +62,7 @@ impl From<ResolverArgs> for PipOptions {
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -53,7 +72,7 @@ impl From<ResolverArgs> for PipOptions {
|
|||
} = args;
|
||||
|
||||
Self {
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "no-upgrade"),
|
||||
upgrade_package: Some(upgrade_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
|
|
@ -66,7 +85,12 @@ impl From<ResolverArgs> for PipOptions {
|
|||
},
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
|
|
@ -86,6 +110,7 @@ impl From<InstallerArgs> for PipOptions {
|
|||
index_strategy,
|
||||
keyring_provider,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
build_isolation,
|
||||
exclude_newer,
|
||||
|
|
@ -96,16 +121,21 @@ impl From<InstallerArgs> for PipOptions {
|
|||
} = args;
|
||||
|
||||
Self {
|
||||
reinstall: flag(reinstall, no_reinstall),
|
||||
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package: Some(reinstall_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode),
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
|
|
@ -129,6 +159,7 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -140,9 +171,9 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
} = args;
|
||||
|
||||
Self {
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "upgrade"),
|
||||
upgrade_package: Some(upgrade_package),
|
||||
reinstall: flag(reinstall, no_reinstall),
|
||||
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package: Some(reinstall_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
|
|
@ -155,11 +186,16 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
fork_strategy,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode),
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
|
|
@ -242,6 +278,7 @@ pub fn resolver_options(
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -289,7 +326,7 @@ pub fn resolver_options(
|
|||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
}),
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "no-upgrade"),
|
||||
upgrade_package: Some(upgrade_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
|
|
@ -303,13 +340,18 @@ pub fn resolver_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
no_build: flag(no_build, build),
|
||||
no_build: flag(no_build, build, "build"),
|
||||
no_build_package: Some(no_build_package),
|
||||
no_binary: flag(no_binary, binary),
|
||||
no_binary: flag(no_binary, binary, "binary"),
|
||||
no_binary_package: Some(no_binary_package),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
}
|
||||
|
|
@ -335,6 +377,7 @@ pub fn resolver_installer_options(
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -386,13 +429,13 @@ pub fn resolver_installer_options(
|
|||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
}),
|
||||
upgrade: flag(upgrade, no_upgrade),
|
||||
upgrade: flag(upgrade, no_upgrade, "upgrade"),
|
||||
upgrade_package: if upgrade_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(upgrade_package)
|
||||
},
|
||||
reinstall: flag(reinstall, no_reinstall),
|
||||
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package: if reinstall_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
|
@ -410,7 +453,12 @@ pub fn resolver_installer_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: if no_build_isolation_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
|
@ -418,14 +466,14 @@ pub fn resolver_installer_options(
|
|||
},
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode),
|
||||
no_build: flag(no_build, build),
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_build: flag(no_build, build, "build"),
|
||||
no_build_package: if no_build_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(no_build_package)
|
||||
},
|
||||
no_binary: flag(no_binary, binary),
|
||||
no_binary: flag(no_binary, binary, "binary"),
|
||||
no_binary_package: if no_binary_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -60,8 +60,9 @@ url = { workspace = true }
|
|||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
http-body-util = { version = "0.1.2" }
|
||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||
hyper-util = { version = "0.1.8", features = ["tokio"] }
|
||||
insta = { version = "1.40.0", features = ["filters", "json", "redactions"] }
|
||||
http-body-util = { workspace = true }
|
||||
hyper = { workspace = true }
|
||||
hyper-util = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -6,16 +6,27 @@ use std::sync::Arc;
|
|||
use std::time::Duration;
|
||||
use std::{env, io, iter};
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::anyhow;
|
||||
use http::{
|
||||
HeaderMap, HeaderName, HeaderValue, Method, StatusCode,
|
||||
header::{
|
||||
AUTHORIZATION, CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE, COOKIE, LOCATION,
|
||||
PROXY_AUTHORIZATION, REFERER, TRANSFER_ENCODING, WWW_AUTHENTICATE,
|
||||
},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use reqwest::{Client, ClientBuilder, Proxy, Response};
|
||||
use reqwest::{Client, ClientBuilder, IntoUrl, Proxy, Request, Response, multipart};
|
||||
use reqwest_middleware::{ClientWithMiddleware, Middleware};
|
||||
use reqwest_retry::policies::ExponentialBackoff;
|
||||
use reqwest_retry::{
|
||||
DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy,
|
||||
};
|
||||
use tracing::{debug, trace};
|
||||
use url::ParseError;
|
||||
use url::Url;
|
||||
|
||||
use uv_auth::Credentials;
|
||||
use uv_auth::{AuthMiddleware, Indexes};
|
||||
use uv_configuration::{KeyringProviderType, TrustedHost};
|
||||
use uv_fs::Simplified;
|
||||
|
|
@ -32,6 +43,10 @@ use crate::middleware::OfflineMiddleware;
|
|||
use crate::tls::read_identity;
|
||||
|
||||
pub const DEFAULT_RETRIES: u32 = 3;
|
||||
/// Maximum number of redirects to follow before giving up.
|
||||
///
|
||||
/// This is the default used by [`reqwest`].
|
||||
const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||
|
||||
/// Selectively skip parts or the entire auth middleware.
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
|
|
@ -52,6 +67,7 @@ pub struct BaseClientBuilder<'a> {
|
|||
keyring: KeyringProviderType,
|
||||
allow_insecure_host: Vec<TrustedHost>,
|
||||
native_tls: bool,
|
||||
built_in_root_certs: bool,
|
||||
retries: u32,
|
||||
pub connectivity: Connectivity,
|
||||
markers: Option<&'a MarkerEnvironment>,
|
||||
|
|
@ -61,6 +77,31 @@ pub struct BaseClientBuilder<'a> {
|
|||
default_timeout: Duration,
|
||||
extra_middleware: Option<ExtraMiddleware>,
|
||||
proxies: Vec<Proxy>,
|
||||
redirect_policy: RedirectPolicy,
|
||||
/// Whether credentials should be propagated during cross-origin redirects.
|
||||
///
|
||||
/// A policy allowing propagation is insecure and should only be available for test code.
|
||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
||||
}
|
||||
|
||||
/// The policy for handling HTTP redirects.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub enum RedirectPolicy {
|
||||
/// Use reqwest's built-in redirect handling. This bypasses our custom middleware
|
||||
/// on redirect.
|
||||
#[default]
|
||||
BypassMiddleware,
|
||||
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
||||
RetriggerMiddleware,
|
||||
}
|
||||
|
||||
impl RedirectPolicy {
|
||||
pub fn reqwest_policy(self) -> reqwest::redirect::Policy {
|
||||
match self {
|
||||
RedirectPolicy::BypassMiddleware => reqwest::redirect::Policy::default(),
|
||||
RedirectPolicy::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A list of user-defined middlewares to be applied to the client.
|
||||
|
|
@ -87,6 +128,7 @@ impl BaseClientBuilder<'_> {
|
|||
keyring: KeyringProviderType::default(),
|
||||
allow_insecure_host: vec![],
|
||||
native_tls: false,
|
||||
built_in_root_certs: false,
|
||||
connectivity: Connectivity::Online,
|
||||
retries: DEFAULT_RETRIES,
|
||||
markers: None,
|
||||
|
|
@ -96,6 +138,8 @@ impl BaseClientBuilder<'_> {
|
|||
default_timeout: Duration::from_secs(30),
|
||||
extra_middleware: None,
|
||||
proxies: vec![],
|
||||
redirect_policy: RedirectPolicy::default(),
|
||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -125,12 +169,37 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change.
|
||||
///
|
||||
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||
pub fn retries_from_env(self) -> anyhow::Result<Self> {
|
||||
// TODO(zanieb): We should probably parse this in another layer, but there's not a natural
|
||||
// fit for it right now
|
||||
if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) {
|
||||
Ok(self.retries(
|
||||
value
|
||||
.to_string_lossy()
|
||||
.as_ref()
|
||||
.parse::<u32>()
|
||||
.context("Failed to parse `UV_HTTP_RETRIES`")?,
|
||||
))
|
||||
} else {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||
self.native_tls = native_tls;
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn built_in_root_certs(mut self, built_in_root_certs: bool) -> Self {
|
||||
self.built_in_root_certs = built_in_root_certs;
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn markers(mut self, markers: &'a MarkerEnvironment) -> Self {
|
||||
self.markers = Some(markers);
|
||||
|
|
@ -173,13 +242,35 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn redirect(mut self, policy: RedirectPolicy) -> Self {
|
||||
self.redirect_policy = policy;
|
||||
self
|
||||
}
|
||||
|
||||
/// Allows credentials to be propagated on cross-origin redirects.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, propagating credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
#[cfg(test)]
|
||||
#[must_use]
|
||||
pub fn allow_cross_origin_credentials(mut self) -> Self {
|
||||
self.cross_origin_credential_policy = CrossOriginCredentialsPolicy::Insecure;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn is_offline(&self) -> bool {
|
||||
matches!(self.connectivity, Connectivity::Offline)
|
||||
}
|
||||
|
||||
/// Create a [`RetryPolicy`] for the client.
|
||||
fn retry_policy(&self) -> ExponentialBackoff {
|
||||
ExponentialBackoff::builder().build_with_max_retries(self.retries)
|
||||
let mut builder = ExponentialBackoff::builder();
|
||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||
}
|
||||
builder.build_with_max_retries(self.retries)
|
||||
}
|
||||
|
||||
pub fn build(&self) -> BaseClient {
|
||||
|
|
@ -229,6 +320,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
Security::Secure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
// Create an insecure client that accepts invalid certificates.
|
||||
|
|
@ -237,11 +329,20 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
Security::Insecure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
// Wrap in any relevant middleware and handle connectivity.
|
||||
let client = self.apply_middleware(raw_client.clone());
|
||||
let dangerous_client = self.apply_middleware(raw_dangerous_client.clone());
|
||||
let client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(raw_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
let dangerous_client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(raw_dangerous_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
|
||||
BaseClient {
|
||||
connectivity: self.connectivity,
|
||||
|
|
@ -258,8 +359,16 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
/// Share the underlying client between two different middleware configurations.
|
||||
pub fn wrap_existing(&self, existing: &BaseClient) -> BaseClient {
|
||||
// Wrap in any relevant middleware and handle connectivity.
|
||||
let client = self.apply_middleware(existing.raw_client.clone());
|
||||
let dangerous_client = self.apply_middleware(existing.raw_dangerous_client.clone());
|
||||
let client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(existing.raw_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
let dangerous_client = RedirectClientWithMiddleware {
|
||||
client: self.apply_middleware(existing.raw_dangerous_client.clone()),
|
||||
redirect_policy: self.redirect_policy,
|
||||
cross_origin_credentials_policy: self.cross_origin_credential_policy,
|
||||
};
|
||||
|
||||
BaseClient {
|
||||
connectivity: self.connectivity,
|
||||
|
|
@ -279,6 +388,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
timeout: Duration,
|
||||
ssl_cert_file_exists: bool,
|
||||
security: Security,
|
||||
redirect_policy: RedirectPolicy,
|
||||
) -> Client {
|
||||
// Configure the builder.
|
||||
let client_builder = ClientBuilder::new()
|
||||
|
|
@ -286,7 +396,8 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
.user_agent(user_agent)
|
||||
.pool_max_idle_per_host(20)
|
||||
.read_timeout(timeout)
|
||||
.tls_built_in_root_certs(false);
|
||||
.tls_built_in_root_certs(self.built_in_root_certs)
|
||||
.redirect(redirect_policy.reqwest_policy());
|
||||
|
||||
// If necessary, accept invalid certificates.
|
||||
let client_builder = match security {
|
||||
|
|
@ -381,9 +492,9 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct BaseClient {
|
||||
/// The underlying HTTP client that enforces valid certificates.
|
||||
client: ClientWithMiddleware,
|
||||
client: RedirectClientWithMiddleware,
|
||||
/// The underlying HTTP client that accepts invalid certificates.
|
||||
dangerous_client: ClientWithMiddleware,
|
||||
dangerous_client: RedirectClientWithMiddleware,
|
||||
/// The HTTP client without middleware.
|
||||
raw_client: Client,
|
||||
/// The HTTP client that accepts invalid certificates without middleware.
|
||||
|
|
@ -408,7 +519,7 @@ enum Security {
|
|||
|
||||
impl BaseClient {
|
||||
/// Selects the appropriate client based on the host's trustworthiness.
|
||||
pub fn for_host(&self, url: &DisplaySafeUrl) -> &ClientWithMiddleware {
|
||||
pub fn for_host(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware {
|
||||
if self.disable_ssl(url) {
|
||||
&self.dangerous_client
|
||||
} else {
|
||||
|
|
@ -416,6 +527,12 @@ impl BaseClient {
|
|||
}
|
||||
}
|
||||
|
||||
/// Executes a request, applying redirect policy.
|
||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||
let client = self.for_host(&DisplaySafeUrl::from(req.url().clone()));
|
||||
client.execute(req).await
|
||||
}
|
||||
|
||||
/// Returns `true` if the host is trusted to use the insecure client.
|
||||
pub fn disable_ssl(&self, url: &DisplaySafeUrl) -> bool {
|
||||
self.allow_insecure_host
|
||||
|
|
@ -439,6 +556,326 @@ impl BaseClient {
|
|||
}
|
||||
}
|
||||
|
||||
/// Wrapper around [`ClientWithMiddleware`] that manages redirects.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RedirectClientWithMiddleware {
|
||||
client: ClientWithMiddleware,
|
||||
redirect_policy: RedirectPolicy,
|
||||
/// Whether credentials should be preserved during cross-origin redirects.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, preserving credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||
}
|
||||
|
||||
impl RedirectClientWithMiddleware {
|
||||
/// Convenience method to make a `GET` request to a URL.
|
||||
pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.get(url), self)
|
||||
}
|
||||
|
||||
/// Convenience method to make a `POST` request to a URL.
|
||||
pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.post(url), self)
|
||||
}
|
||||
|
||||
/// Convenience method to make a `HEAD` request to a URL.
|
||||
pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.head(url), self)
|
||||
}
|
||||
|
||||
/// Executes a request, applying the redirect policy.
|
||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||
match self.redirect_policy {
|
||||
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
||||
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
||||
}
|
||||
}
|
||||
|
||||
/// Executes a request. If the response is a redirect (one of HTTP 301, 302, 303, 307, or 308), the
|
||||
/// request is executed again with the redirect location URL (up to a maximum number of
|
||||
/// redirects).
|
||||
///
|
||||
/// Unlike the built-in reqwest redirect policies, this sends the redirect request through the
|
||||
/// entire middleware pipeline again.
|
||||
///
|
||||
/// See RFC 7231 7.1.2 <https://www.rfc-editor.org/rfc/rfc7231#section-7.1.2> for details on
|
||||
/// redirect semantics.
|
||||
async fn execute_with_redirect_handling(
|
||||
&self,
|
||||
req: Request,
|
||||
) -> reqwest_middleware::Result<Response> {
|
||||
let mut request = req;
|
||||
let mut redirects = 0;
|
||||
let max_redirects = DEFAULT_MAX_REDIRECTS;
|
||||
|
||||
loop {
|
||||
let result = self
|
||||
.client
|
||||
.execute(request.try_clone().expect("HTTP request must be cloneable"))
|
||||
.await;
|
||||
let Ok(response) = result else {
|
||||
return result;
|
||||
};
|
||||
|
||||
if redirects >= max_redirects {
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
let Some(redirect_request) =
|
||||
request_into_redirect(request, &response, self.cross_origin_credentials_policy)?
|
||||
else {
|
||||
return Ok(response);
|
||||
};
|
||||
|
||||
redirects += 1;
|
||||
request = redirect_request;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn raw_client(&self) -> &ClientWithMiddleware {
|
||||
&self.client
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RedirectClientWithMiddleware> for ClientWithMiddleware {
|
||||
fn from(item: RedirectClientWithMiddleware) -> ClientWithMiddleware {
|
||||
item.client
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this is should be a redirect and, if so, return a new redirect request.
|
||||
///
|
||||
/// This implementation is based on the [`reqwest`] crate redirect implementation.
|
||||
/// It takes ownership of the original [`Request`] and mutates it to create the new
|
||||
/// redirect [`Request`].
|
||||
fn request_into_redirect(
|
||||
mut req: Request,
|
||||
res: &Response,
|
||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||
) -> reqwest_middleware::Result<Option<Request>> {
|
||||
let original_req_url = DisplaySafeUrl::from(req.url().clone());
|
||||
let status = res.status();
|
||||
let should_redirect = match status {
|
||||
StatusCode::MOVED_PERMANENTLY
|
||||
| StatusCode::FOUND
|
||||
| StatusCode::TEMPORARY_REDIRECT
|
||||
| StatusCode::PERMANENT_REDIRECT => true,
|
||||
StatusCode::SEE_OTHER => {
|
||||
// Per RFC 7231, HTTP 303 is intended for the user agent
|
||||
// to perform a GET or HEAD request to the redirect target.
|
||||
// Historically, some browsers also changed method from POST
|
||||
// to GET on 301 or 302, but this is not required by RFC 7231
|
||||
// and was not intended by the HTTP spec.
|
||||
*req.body_mut() = None;
|
||||
for header in &[
|
||||
TRANSFER_ENCODING,
|
||||
CONTENT_ENCODING,
|
||||
CONTENT_TYPE,
|
||||
CONTENT_LENGTH,
|
||||
] {
|
||||
req.headers_mut().remove(header);
|
||||
}
|
||||
|
||||
match *req.method() {
|
||||
Method::GET | Method::HEAD => {}
|
||||
_ => {
|
||||
*req.method_mut() = Method::GET;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
if !should_redirect {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let location = res
|
||||
.headers()
|
||||
.get(LOCATION)
|
||||
.ok_or(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Server returned redirect (HTTP {status}) without destination URL. This may indicate a server configuration issue"
|
||||
)))?
|
||||
.to_str()
|
||||
.map_err(|_| {
|
||||
reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value: must only contain visible ascii characters"
|
||||
))
|
||||
})?;
|
||||
|
||||
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
||||
Ok(url) => url,
|
||||
// Per RFC 7231, URLs should be resolved against the request URL.
|
||||
Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| {
|
||||
reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
||||
))
|
||||
})?,
|
||||
Err(err) => {
|
||||
return Err(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{location}`: {err}"
|
||||
)));
|
||||
}
|
||||
};
|
||||
// Per RFC 7231, fragments must be propagated
|
||||
if let Some(fragment) = original_req_url.fragment() {
|
||||
redirect_url.set_fragment(Some(fragment));
|
||||
}
|
||||
|
||||
// Ensure the URL is a valid HTTP URI.
|
||||
if let Err(err) = redirect_url.as_str().parse::<http::Uri>() {
|
||||
return Err(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"HTTP {status} 'Location' value `{redirect_url}` is not a valid HTTP URI: {err}"
|
||||
)));
|
||||
}
|
||||
|
||||
if redirect_url.scheme() != "http" && redirect_url.scheme() != "https" {
|
||||
return Err(reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{redirect_url}`: scheme needs to be https or http"
|
||||
)));
|
||||
}
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
std::mem::swap(req.headers_mut(), &mut headers);
|
||||
|
||||
let cross_host = redirect_url.host_str() != original_req_url.host_str()
|
||||
|| redirect_url.port_or_known_default() != original_req_url.port_or_known_default();
|
||||
if cross_host {
|
||||
if cross_origin_credentials_policy == CrossOriginCredentialsPolicy::Secure {
|
||||
debug!("Received a cross-origin redirect. Removing sensitive headers.");
|
||||
headers.remove(AUTHORIZATION);
|
||||
headers.remove(COOKIE);
|
||||
headers.remove(PROXY_AUTHORIZATION);
|
||||
headers.remove(WWW_AUTHENTICATE);
|
||||
}
|
||||
// If the redirect request is not a cross-origin request and the original request already
|
||||
// had a Referer header, attempt to set the Referer header for the redirect request.
|
||||
} else if headers.contains_key(REFERER) {
|
||||
if let Some(referer) = make_referer(&redirect_url, &original_req_url) {
|
||||
headers.insert(REFERER, referer);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there are credentials on the redirect location itself.
|
||||
// If so, move them to Authorization header.
|
||||
if !redirect_url.username().is_empty() {
|
||||
if let Some(credentials) = Credentials::from_url(&redirect_url) {
|
||||
let _ = redirect_url.set_username("");
|
||||
let _ = redirect_url.set_password(None);
|
||||
headers.insert(AUTHORIZATION, credentials.to_header_value());
|
||||
}
|
||||
}
|
||||
|
||||
std::mem::swap(req.headers_mut(), &mut headers);
|
||||
*req.url_mut() = Url::from(redirect_url);
|
||||
debug!(
|
||||
"Received HTTP {status}. Redirecting to {}",
|
||||
DisplaySafeUrl::ref_cast(req.url())
|
||||
);
|
||||
Ok(Some(req))
|
||||
}
|
||||
|
||||
/// Return a Referer [`HeaderValue`] according to RFC 7231.
|
||||
///
|
||||
/// Return [`None`] if https has been downgraded in the redirect location.
|
||||
fn make_referer(
|
||||
redirect_url: &DisplaySafeUrl,
|
||||
original_url: &DisplaySafeUrl,
|
||||
) -> Option<HeaderValue> {
|
||||
if redirect_url.scheme() == "http" && original_url.scheme() == "https" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut referer = original_url.clone();
|
||||
referer.remove_credentials();
|
||||
referer.set_fragment(None);
|
||||
referer.as_str().parse().ok()
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub(crate) enum CrossOriginCredentialsPolicy {
|
||||
/// Do not propagate credentials on cross-origin requests.
|
||||
#[default]
|
||||
Secure,
|
||||
|
||||
/// Propagate credentials on cross-origin requests.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, preserving credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
#[cfg(test)]
|
||||
Insecure,
|
||||
}
|
||||
|
||||
/// A builder to construct the properties of a `Request`.
|
||||
///
|
||||
/// This wraps [`reqwest_middleware::RequestBuilder`] to ensure that the [`BaseClient`]
|
||||
/// redirect policy is respected if `send()` is called.
|
||||
#[derive(Debug)]
|
||||
#[must_use]
|
||||
pub struct RequestBuilder<'a> {
|
||||
builder: reqwest_middleware::RequestBuilder,
|
||||
client: &'a RedirectClientWithMiddleware,
|
||||
}
|
||||
|
||||
impl<'a> RequestBuilder<'a> {
|
||||
pub fn new(
|
||||
builder: reqwest_middleware::RequestBuilder,
|
||||
client: &'a RedirectClientWithMiddleware,
|
||||
) -> Self {
|
||||
Self { builder, client }
|
||||
}
|
||||
|
||||
/// Add a `Header` to this Request.
|
||||
pub fn header<K, V>(mut self, key: K, value: V) -> Self
|
||||
where
|
||||
HeaderName: TryFrom<K>,
|
||||
<HeaderName as TryFrom<K>>::Error: Into<http::Error>,
|
||||
HeaderValue: TryFrom<V>,
|
||||
<HeaderValue as TryFrom<V>>::Error: Into<http::Error>,
|
||||
{
|
||||
self.builder = self.builder.header(key, value);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a set of Headers to the existing ones on this Request.
|
||||
///
|
||||
/// The headers will be merged in to any already set.
|
||||
pub fn headers(mut self, headers: HeaderMap) -> Self {
|
||||
self.builder = self.builder.headers(headers);
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn version(mut self, version: reqwest::Version) -> Self {
|
||||
self.builder = self.builder.version(version);
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "multipart")))]
|
||||
pub fn multipart(mut self, multipart: multipart::Form) -> Self {
|
||||
self.builder = self.builder.multipart(multipart);
|
||||
self
|
||||
}
|
||||
|
||||
/// Build a `Request`.
|
||||
pub fn build(self) -> reqwest::Result<Request> {
|
||||
self.builder.build()
|
||||
}
|
||||
|
||||
/// Constructs the Request and sends it to the target URL, returning a
|
||||
/// future Response.
|
||||
pub async fn send(self) -> reqwest_middleware::Result<Response> {
|
||||
self.client.execute(self.build()?).await
|
||||
}
|
||||
|
||||
pub fn raw_builder(&self) -> &reqwest_middleware::RequestBuilder {
|
||||
&self.builder
|
||||
}
|
||||
}
|
||||
|
||||
/// Extends [`DefaultRetryableStrategy`], to log transient request failures and additional retry cases.
|
||||
pub struct UvRetryableStrategy;
|
||||
|
||||
|
|
@ -491,18 +928,34 @@ pub fn is_extended_transient_error(err: &dyn Error) -> bool {
|
|||
}
|
||||
|
||||
// IO Errors may be nested through custom IO errors.
|
||||
let mut has_io_error = false;
|
||||
for io_err in find_sources::<io::Error>(&err) {
|
||||
if io_err.kind() == io::ErrorKind::ConnectionReset
|
||||
|| io_err.kind() == io::ErrorKind::UnexpectedEof
|
||||
|| io_err.kind() == io::ErrorKind::BrokenPipe
|
||||
{
|
||||
trace!("Retrying error: `ConnectionReset` or `UnexpectedEof`");
|
||||
has_io_error = true;
|
||||
let retryable_io_err_kinds = [
|
||||
// https://github.com/astral-sh/uv/issues/12054
|
||||
io::ErrorKind::BrokenPipe,
|
||||
// From reqwest-middleware
|
||||
io::ErrorKind::ConnectionAborted,
|
||||
// https://github.com/astral-sh/uv/issues/3514
|
||||
io::ErrorKind::ConnectionReset,
|
||||
// https://github.com/astral-sh/uv/issues/14699
|
||||
io::ErrorKind::InvalidData,
|
||||
// https://github.com/astral-sh/uv/issues/9246
|
||||
io::ErrorKind::UnexpectedEof,
|
||||
];
|
||||
if retryable_io_err_kinds.contains(&io_err.kind()) {
|
||||
trace!("Retrying error: `{}`", io_err.kind());
|
||||
return true;
|
||||
}
|
||||
trace!("Cannot retry IO error: not one of `ConnectionReset` or `UnexpectedEof`");
|
||||
trace!(
|
||||
"Cannot retry IO error `{}`, not a retryable IO error kind",
|
||||
io_err.kind()
|
||||
);
|
||||
}
|
||||
|
||||
trace!("Cannot retry error: not an IO error");
|
||||
if !has_io_error {
|
||||
trace!("Cannot retry error: not an extended IO error");
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
|
|
@ -528,3 +981,204 @@ fn find_source<E: Error + 'static>(orig: &dyn Error) -> Option<&E> {
|
|||
fn find_sources<E: Error + 'static>(orig: &dyn Error) -> impl Iterator<Item = &E> {
|
||||
iter::successors(find_source::<E>(orig), |&err| find_source(err))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use anyhow::Result;
|
||||
|
||||
use reqwest::{Client, Method};
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
use crate::base_client::request_into_redirect;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_preserves_authorization_header_on_same_origin() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.get(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert!(request.headers().contains_key(AUTHORIZATION));
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert!(redirect_request.headers().contains_key(AUTHORIZATION));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_preserves_fragment() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.get(format!("{}#fragment", server.uri()))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert!(
|
||||
redirect_request
|
||||
.url()
|
||||
.fragment()
|
||||
.is_some_and(|fragment| fragment == "fragment")
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_removes_authorization_header_on_cross_origin() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", "https://cross-origin.com/simple"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.get(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert!(request.headers().contains_key(AUTHORIZATION));
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert!(!redirect_request.headers().contains_key(AUTHORIZATION));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_303_changes_post_to_get() -> Result<()> {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("POST"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(303)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::new()
|
||||
.post(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(request.method(), Method::POST);
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
assert_eq!(redirect_request.method(), Method::GET);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_no_referer_if_disabled() -> Result<()> {
|
||||
for status in &[301, 302, 303, 307, 308] {
|
||||
let server = MockServer::start().await;
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(*status)
|
||||
.insert_header("location", format!("{}/redirect", server.uri())),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let request = Client::builder()
|
||||
.referer(false)
|
||||
.build()
|
||||
.unwrap()
|
||||
.get(server.uri())
|
||||
.basic_auth("username", Some("password"))
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
assert!(!request.headers().contains_key(REFERER));
|
||||
|
||||
let response = Client::builder()
|
||||
.redirect(reqwest::redirect::Policy::none())
|
||||
.build()
|
||||
.unwrap()
|
||||
.execute(request.try_clone().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let redirect_request =
|
||||
request_into_redirect(request, &response, CrossOriginCredentialsPolicy::Secure)?
|
||||
.unwrap();
|
||||
|
||||
assert!(!redirect_request.headers().contains_key(REFERER));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -196,16 +196,18 @@ impl<E: Into<Self> + std::error::Error + 'static> From<CachedClientError<E>> for
|
|||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum CacheControl {
|
||||
pub enum CacheControl<'a> {
|
||||
/// Respect the `cache-control` header from the response.
|
||||
None,
|
||||
/// Apply `max-age=0, must-revalidate` to the request.
|
||||
MustRevalidate,
|
||||
/// Allow the client to return stale responses.
|
||||
AllowStale,
|
||||
/// Override the cache control header with a custom value.
|
||||
Override(&'a str),
|
||||
}
|
||||
|
||||
impl From<Freshness> for CacheControl {
|
||||
impl From<Freshness> for CacheControl<'_> {
|
||||
fn from(value: Freshness) -> Self {
|
||||
match value {
|
||||
Freshness::Fresh => Self::None,
|
||||
|
|
@ -259,7 +261,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let payload = self
|
||||
|
|
@ -292,7 +294,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
||||
|
|
@ -302,7 +304,7 @@ impl CachedClient {
|
|||
.await?
|
||||
} else {
|
||||
debug!("No cache entry for: {}", req.url());
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -316,7 +318,12 @@ impl CachedClient {
|
|||
"Broken fresh cache entry (for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
}
|
||||
},
|
||||
|
|
@ -337,7 +344,12 @@ impl CachedClient {
|
|||
(for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
|
@ -353,7 +365,12 @@ impl CachedClient {
|
|||
// ETag didn't match). We need to make a fresh request.
|
||||
if response.status() == http::StatusCode::NOT_MODIFIED {
|
||||
warn!("Server returned unusable 304 for: {}", fresh_req.url());
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
self.run_response_callback(
|
||||
|
|
@ -377,9 +394,10 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
|
||||
let payload = self
|
||||
.run_response_callback(cache_entry, cache_policy, response, async |resp| {
|
||||
|
|
@ -399,10 +417,11 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let _ = fs_err::tokio::remove_file(&cache_entry.path()).await;
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
self.run_response_callback(cache_entry, cache_policy, response, response_callback)
|
||||
.await
|
||||
}
|
||||
|
|
@ -469,12 +488,12 @@ impl CachedClient {
|
|||
async fn send_cached(
|
||||
&self,
|
||||
mut req: Request,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
cached: DataWithCachePolicy,
|
||||
) -> Result<CachedResponse, Error> {
|
||||
// Apply the cache control header, if necessary.
|
||||
match cache_control {
|
||||
CacheControl::None | CacheControl::AllowStale => {}
|
||||
CacheControl::None | CacheControl::AllowStale | CacheControl::Override(..) => {}
|
||||
CacheControl::MustRevalidate => {
|
||||
req.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
|
|
@ -488,9 +507,14 @@ impl CachedClient {
|
|||
CachedResponse::FreshCache(cached)
|
||||
}
|
||||
BeforeRequest::Stale(new_cache_policy_builder) => match cache_control {
|
||||
CacheControl::None | CacheControl::MustRevalidate => {
|
||||
CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => {
|
||||
debug!("Found stale response for: {}", req.url());
|
||||
self.send_cached_handle_stale(req, cached, new_cache_policy_builder)
|
||||
self.send_cached_handle_stale(
|
||||
req,
|
||||
cache_control,
|
||||
cached,
|
||||
new_cache_policy_builder,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
CacheControl::AllowStale => {
|
||||
|
|
@ -504,7 +528,7 @@ impl CachedClient {
|
|||
"Cached request doesn't match current request for: {}",
|
||||
req.url()
|
||||
);
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -516,20 +540,30 @@ impl CachedClient {
|
|||
async fn send_cached_handle_stale(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
cached: DataWithCachePolicy,
|
||||
new_cache_policy_builder: CachePolicyBuilder,
|
||||
) -> Result<CachedResponse, Error> {
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
debug!("Sending revalidation request for: {url}");
|
||||
let response = self
|
||||
let mut response = self
|
||||
.0
|
||||
.for_host(&url)
|
||||
.execute(req)
|
||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||
.error_for_status()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
match cached
|
||||
.cache_policy
|
||||
.after_response(new_cache_policy_builder, &response)
|
||||
|
|
@ -558,17 +592,26 @@ impl CachedClient {
|
|||
async fn fresh_request(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
trace!("Sending fresh {} request for {}", req.method(), url);
|
||||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||
let response = self
|
||||
let mut response = self
|
||||
.0
|
||||
.for_host(&url)
|
||||
.execute(req)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
let retry_count = response
|
||||
.extensions()
|
||||
.get::<reqwest_retry::RetryCount>()
|
||||
|
|
@ -601,7 +644,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let payload = self
|
||||
|
|
@ -625,7 +668,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let mut past_retries = 0;
|
||||
|
|
@ -683,6 +726,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let mut past_retries = 0;
|
||||
|
|
@ -691,7 +735,7 @@ impl CachedClient {
|
|||
loop {
|
||||
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
||||
let result = self
|
||||
.skip_cache(fresh_req, cache_entry, &response_callback)
|
||||
.skip_cache(fresh_req, cache_entry, cache_control, &response_callback)
|
||||
.await;
|
||||
|
||||
// Check if the middleware already performed retries
|
||||
|
|
|
|||
|
|
@ -152,9 +152,6 @@ pub enum ErrorKind {
|
|||
#[error(transparent)]
|
||||
InvalidUrl(#[from] uv_distribution_types::ToUrlError),
|
||||
|
||||
#[error(transparent)]
|
||||
JoinRelativeUrl(#[from] uv_pypi_types::JoinRelativeError),
|
||||
|
||||
#[error(transparent)]
|
||||
Flat(#[from] FlatIndexError),
|
||||
|
||||
|
|
@ -262,6 +259,9 @@ pub enum ErrorKind {
|
|||
"Network connectivity is disabled, but the requested data wasn't found in the cache for: `{0}`"
|
||||
)]
|
||||
Offline(String),
|
||||
|
||||
#[error("Invalid cache control header: `{0}`")]
|
||||
InvalidCacheControl(String),
|
||||
}
|
||||
|
||||
impl ErrorKind {
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ use crate::rkyvutil::OwnedArchive;
|
|||
rkyv::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
#[allow(clippy::struct_excessive_bools)]
|
||||
pub struct CacheControl {
|
||||
// directives for requests and responses
|
||||
/// * <https://www.rfc-editor.org/rfc/rfc9111.html#name-max-age>
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
pub use base_client::{
|
||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
|
||||
UvRetryableStrategy, is_extended_transient_error,
|
||||
RedirectClientWithMiddleware, RequestBuilder, UvRetryableStrategy, is_extended_transient_error,
|
||||
};
|
||||
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
||||
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ use futures::{FutureExt, StreamExt, TryStreamExt};
|
|||
use http::{HeaderMap, StatusCode};
|
||||
use itertools::Either;
|
||||
use reqwest::{Proxy, Response};
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use rustc_hash::FxHashMap;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
||||
|
|
@ -22,8 +21,8 @@ use uv_configuration::KeyringProviderType;
|
|||
use uv_configuration::{IndexStrategy, TrustedHost};
|
||||
use uv_distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||
use uv_distribution_types::{
|
||||
BuiltDist, File, FileLocation, IndexCapabilities, IndexFormat, IndexLocations,
|
||||
IndexMetadataRef, IndexStatusCodeDecision, IndexStatusCodeStrategy, IndexUrl, IndexUrls, Name,
|
||||
BuiltDist, File, IndexCapabilities, IndexFormat, IndexLocations, IndexMetadataRef,
|
||||
IndexStatusCodeDecision, IndexStatusCodeStrategy, IndexUrl, IndexUrls, Name,
|
||||
};
|
||||
use uv_metadata::{read_metadata_async_seek, read_metadata_async_stream};
|
||||
use uv_normalize::PackageName;
|
||||
|
|
@ -35,13 +34,16 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use uv_small_str::SmallString;
|
||||
use uv_torch::TorchStrategy;
|
||||
|
||||
use crate::base_client::{BaseClientBuilder, ExtraMiddleware};
|
||||
use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy};
|
||||
use crate::cached_client::CacheControl;
|
||||
use crate::flat_index::FlatIndexEntry;
|
||||
use crate::html::SimpleHtml;
|
||||
use crate::remote_metadata::wheel_metadata_from_remote_zip;
|
||||
use crate::rkyvutil::OwnedArchive;
|
||||
use crate::{BaseClient, CachedClient, Error, ErrorKind, FlatIndexClient, FlatIndexEntries};
|
||||
use crate::{
|
||||
BaseClient, CachedClient, Error, ErrorKind, FlatIndexClient, FlatIndexEntries,
|
||||
RedirectClientWithMiddleware,
|
||||
};
|
||||
|
||||
/// A builder for an [`RegistryClient`].
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -113,12 +115,25 @@ impl<'a> RegistryClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn retries_from_env(mut self) -> anyhow::Result<Self> {
|
||||
self.base_client_builder = self.base_client_builder.retries_from_env()?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||
self.base_client_builder = self.base_client_builder.native_tls(native_tls);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn built_in_root_certs(mut self, built_in_root_certs: bool) -> Self {
|
||||
self.base_client_builder = self
|
||||
.base_client_builder
|
||||
.built_in_root_certs(built_in_root_certs);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn cache(mut self, cache: Cache) -> Self {
|
||||
self.cache = cache;
|
||||
|
|
@ -149,9 +164,23 @@ impl<'a> RegistryClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Allows credentials to be propagated on cross-origin redirects.
|
||||
///
|
||||
/// WARNING: This should only be available for tests. In production code, propagating credentials
|
||||
/// during cross-origin redirects can lead to security vulnerabilities including credential
|
||||
/// leakage to untrusted domains.
|
||||
#[cfg(test)]
|
||||
#[must_use]
|
||||
pub fn allow_cross_origin_credentials(mut self) -> Self {
|
||||
self.base_client_builder = self.base_client_builder.allow_cross_origin_credentials();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> RegistryClient {
|
||||
// Build a base client
|
||||
let builder = self.base_client_builder;
|
||||
let builder = self
|
||||
.base_client_builder
|
||||
.redirect(RedirectPolicy::RetriggerMiddleware);
|
||||
|
||||
let client = builder.build();
|
||||
|
||||
|
|
@ -248,7 +277,7 @@ impl RegistryClient {
|
|||
}
|
||||
|
||||
/// Return the [`BaseClient`] used by this client.
|
||||
pub fn uncached_client(&self, url: &DisplaySafeUrl) -> &ClientWithMiddleware {
|
||||
pub fn uncached_client(&self, url: &DisplaySafeUrl) -> &RedirectClientWithMiddleware {
|
||||
self.client.uncached().for_host(url)
|
||||
}
|
||||
|
||||
|
|
@ -490,11 +519,17 @@ impl RegistryClient {
|
|||
format!("{package_name}.rkyv"),
|
||||
);
|
||||
let cache_control = match self.connectivity {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = self.index_urls.simple_api_cache_control_for(index) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(package_name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -550,7 +585,7 @@ impl RegistryClient {
|
|||
package_name: &PackageName,
|
||||
url: &DisplaySafeUrl,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||
let simple_request = self
|
||||
.uncached_client(url)
|
||||
|
|
@ -666,30 +701,14 @@ impl RegistryClient {
|
|||
|
||||
let wheel = wheels.best_wheel();
|
||||
|
||||
let location = match &wheel.file.url {
|
||||
FileLocation::RelativeUrl(base, url) => {
|
||||
let url = uv_pypi_types::base_url_join_relative(base, url)
|
||||
.map_err(ErrorKind::JoinRelativeUrl)?;
|
||||
if url.scheme() == "file" {
|
||||
let url = wheel.file.url.to_url().map_err(ErrorKind::InvalidUrl)?;
|
||||
let location = if url.scheme() == "file" {
|
||||
let path = url
|
||||
.to_file_path()
|
||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?;
|
||||
WheelLocation::Path(path)
|
||||
} else {
|
||||
WheelLocation::Url(url)
|
||||
}
|
||||
}
|
||||
FileLocation::AbsoluteUrl(url) => {
|
||||
let url = url.to_url().map_err(ErrorKind::InvalidUrl)?;
|
||||
if url.scheme() == "file" {
|
||||
let path = url
|
||||
.to_file_path()
|
||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?;
|
||||
WheelLocation::Path(path)
|
||||
} else {
|
||||
WheelLocation::Url(url)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match location {
|
||||
|
|
@ -778,11 +797,17 @@ impl RegistryClient {
|
|||
format!("{}.msgpack", filename.cache_key()),
|
||||
);
|
||||
let cache_control = match self.connectivity {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = self.index_urls.artifact_cache_control_for(index) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(&filename.name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -848,11 +873,25 @@ impl RegistryClient {
|
|||
format!("{}.msgpack", filename.cache_key()),
|
||||
);
|
||||
let cache_control = match self.connectivity {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(index) = index {
|
||||
if let Some(header) = self.index_urls.artifact_cache_control_for(index) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(&filename.name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, Some(&filename.name), None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -1215,12 +1254,192 @@ impl Connectivity {
|
|||
mod tests {
|
||||
use std::str::FromStr;
|
||||
|
||||
use url::Url;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{JoinRelativeError, SimpleJson};
|
||||
use uv_pypi_types::SimpleJson;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::{SimpleMetadata, SimpleMetadatum, html::SimpleHtml};
|
||||
|
||||
use crate::RegistryClientBuilder;
|
||||
use uv_cache::Cache;
|
||||
use uv_distribution_types::{FileLocation, ToUrlError};
|
||||
use uv_small_str::SmallString;
|
||||
use wiremock::matchers::{basic_auth, method, path_regex};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
type Error = Box<dyn std::error::Error>;
|
||||
|
||||
async fn start_test_server(username: &'static str, password: &'static str) -> MockServer {
|
||||
let server = MockServer::start().await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(ResponseTemplate::new(200))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(ResponseTemplate::new(401))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
server
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_to_server_with_credentials() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
let auth_server = start_test_server(username, password).await;
|
||||
let auth_base_url = DisplaySafeUrl::parse(&auth_server.uri())?;
|
||||
|
||||
let redirect_server = MockServer::start().await;
|
||||
|
||||
// Configure the redirect server to respond with a 302 to the auth server
|
||||
Mock::given(method("GET"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(302).insert_header("Location", format!("{auth_base_url}")),
|
||||
)
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?;
|
||||
|
||||
let cache = Cache::temp()?;
|
||||
let registry_client = RegistryClientBuilder::new(cache)
|
||||
.allow_cross_origin_credentials()
|
||||
.build();
|
||||
let client = registry_client.cached_client().uncached();
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&redirect_server_url)
|
||||
.get(redirect_server.uri())
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
401,
|
||||
"Requests should fail if credentials are missing"
|
||||
);
|
||||
|
||||
let mut url = redirect_server_url.clone();
|
||||
let _ = url.set_username(username);
|
||||
let _ = url.set_password(Some(password));
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&redirect_server_url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
200,
|
||||
"Requests should succeed if credentials are present"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_root_relative_url() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
let redirect_server = MockServer::start().await;
|
||||
|
||||
// Configure the redirect server to respond with a 307 with a relative URL.
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/foo/"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(307).insert_header("Location", "/bar/baz/".to_string()),
|
||||
)
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/bar/baz/"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(ResponseTemplate::new(200))
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?;
|
||||
|
||||
let cache = Cache::temp()?;
|
||||
let registry_client = RegistryClientBuilder::new(cache)
|
||||
.allow_cross_origin_credentials()
|
||||
.build();
|
||||
let client = registry_client.cached_client().uncached();
|
||||
|
||||
let mut url = redirect_server_url.clone();
|
||||
let _ = url.set_username(username);
|
||||
let _ = url.set_password(Some(password));
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
200,
|
||||
"Requests should succeed for relative URL"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_redirect_relative_url() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
let redirect_server = MockServer::start().await;
|
||||
|
||||
// Configure the redirect server to respond with a 307 with a relative URL.
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/foo/bar/baz/"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(ResponseTemplate::new(200))
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path_regex("/foo/"))
|
||||
.and(basic_auth(username, password))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(307).insert_header("Location", "bar/baz/".to_string()),
|
||||
)
|
||||
.mount(&redirect_server)
|
||||
.await;
|
||||
|
||||
let cache = Cache::temp()?;
|
||||
let registry_client = RegistryClientBuilder::new(cache)
|
||||
.allow_cross_origin_credentials()
|
||||
.build();
|
||||
let client = registry_client.cached_client().uncached();
|
||||
|
||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?;
|
||||
let mut url = redirect_server_url.clone();
|
||||
let _ = url.set_username(username);
|
||||
let _ = url.set_password(Some(password));
|
||||
|
||||
assert_eq!(
|
||||
client
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await?
|
||||
.status(),
|
||||
200,
|
||||
"Requests should succeed for relative URL"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_failing_files() {
|
||||
// 1.7.7 has an invalid requires-python field (double comma), 1.7.8 is valid
|
||||
|
|
@ -1274,7 +1493,7 @@ mod tests {
|
|||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/1388>
|
||||
#[test]
|
||||
fn relative_urls_code_artifact() -> Result<(), JoinRelativeError> {
|
||||
fn relative_urls_code_artifact() -> Result<(), ToUrlError> {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
|
@ -1297,12 +1516,13 @@ mod tests {
|
|||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||
.unwrap();
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||
let base = SmallString::from(base.as_str());
|
||||
|
||||
// Test parsing of the file urls
|
||||
let urls = files
|
||||
.iter()
|
||||
.map(|file| uv_pypi_types::base_url_join_relative(base.as_url().as_str(), &file.url))
|
||||
.collect::<Result<Vec<_>, JoinRelativeError>>()?;
|
||||
.into_iter()
|
||||
.map(|file| FileLocation::new(file.url, &base).to_url())
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let urls = urls
|
||||
.iter()
|
||||
.map(DisplaySafeUrl::to_string)
|
||||
|
|
|
|||
|
|
@ -27,7 +27,9 @@ uv-pep440 = { workspace = true }
|
|||
uv-pep508 = { workspace = true, features = ["schemars"] }
|
||||
uv-platform-tags = { workspace = true }
|
||||
uv-static = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
bitflags = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"], optional = true }
|
||||
either = { workspace = true }
|
||||
fs-err = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use uv_pep508::PackageName;
|
|||
|
||||
use crate::{PackageNameSpecifier, PackageNameSpecifiers};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, Hash)]
|
||||
pub enum BuildKind {
|
||||
/// A PEP 517 wheel build.
|
||||
#[default]
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ use std::{
|
|||
str::FromStr,
|
||||
};
|
||||
use uv_cache_key::CacheKeyHasher;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConfigSettingEntry {
|
||||
|
|
@ -28,6 +29,32 @@ impl FromStr for ConfigSettingEntry {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ConfigSettingPackageEntry {
|
||||
/// The package name to apply the setting to.
|
||||
package: PackageName,
|
||||
/// The config setting entry.
|
||||
setting: ConfigSettingEntry,
|
||||
}
|
||||
|
||||
impl FromStr for ConfigSettingPackageEntry {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let Some((package_str, config_str)) = s.split_once(':') else {
|
||||
return Err(format!(
|
||||
"Invalid config setting: {s} (expected `PACKAGE:KEY=VALUE`)"
|
||||
));
|
||||
};
|
||||
|
||||
let package = PackageName::from_str(package_str.trim())
|
||||
.map_err(|e| format!("Invalid package name: {e}"))?;
|
||||
let setting = ConfigSettingEntry::from_str(config_str)?;
|
||||
|
||||
Ok(Self { package, setting })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema), schemars(untagged))]
|
||||
enum ConfigSettingValue {
|
||||
|
|
@ -212,6 +239,111 @@ impl<'de> serde::Deserialize<'de> for ConfigSettings {
|
|||
}
|
||||
}
|
||||
|
||||
/// Settings to pass to PEP 517 build backends on a per-package basis.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct PackageConfigSettings(BTreeMap<PackageName, ConfigSettings>);
|
||||
|
||||
impl FromIterator<ConfigSettingPackageEntry> for PackageConfigSettings {
|
||||
fn from_iter<T: IntoIterator<Item = ConfigSettingPackageEntry>>(iter: T) -> Self {
|
||||
let mut package_configs: BTreeMap<PackageName, Vec<ConfigSettingEntry>> = BTreeMap::new();
|
||||
|
||||
for entry in iter {
|
||||
package_configs
|
||||
.entry(entry.package)
|
||||
.or_default()
|
||||
.push(entry.setting);
|
||||
}
|
||||
|
||||
let configs = package_configs
|
||||
.into_iter()
|
||||
.map(|(package, entries)| (package, entries.into_iter().collect()))
|
||||
.collect();
|
||||
|
||||
Self(configs)
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageConfigSettings {
|
||||
/// Returns the config settings for a specific package, if any.
|
||||
pub fn get(&self, package: &PackageName) -> Option<&ConfigSettings> {
|
||||
self.0.get(package)
|
||||
}
|
||||
|
||||
/// Returns `true` if there are no package-specific settings.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Merge two sets of package config settings, with the values in `self` taking precedence.
|
||||
#[must_use]
|
||||
pub fn merge(mut self, other: PackageConfigSettings) -> PackageConfigSettings {
|
||||
for (package, settings) in other.0 {
|
||||
match self.0.entry(package) {
|
||||
Entry::Vacant(vacant) => {
|
||||
vacant.insert(settings);
|
||||
}
|
||||
Entry::Occupied(mut occupied) => {
|
||||
let merged = occupied.get().clone().merge(settings);
|
||||
occupied.insert(merged);
|
||||
}
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl uv_cache_key::CacheKey for PackageConfigSettings {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
for (package, settings) in &self.0 {
|
||||
package.to_string().cache_key(state);
|
||||
settings.cache_key(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for PackageConfigSettings {
|
||||
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
use serde::ser::SerializeMap;
|
||||
|
||||
let mut map = serializer.serialize_map(Some(self.0.len()))?;
|
||||
for (key, value) in &self.0 {
|
||||
map.serialize_entry(&key.to_string(), value)?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for PackageConfigSettings {
|
||||
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||
struct Visitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||
type Value = PackageConfigSettings;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a map from package name to config settings")
|
||||
}
|
||||
|
||||
fn visit_map<A: serde::de::MapAccess<'de>>(
|
||||
self,
|
||||
mut map: A,
|
||||
) -> Result<Self::Value, A::Error> {
|
||||
let mut config = BTreeMap::default();
|
||||
while let Some((key, value)) = map.next_entry::<String, ConfigSettings>()? {
|
||||
let package = PackageName::from_str(&key).map_err(|e| {
|
||||
serde::de::Error::custom(format!("Invalid package name: {e}"))
|
||||
})?;
|
||||
config.insert(package, value);
|
||||
}
|
||||
Ok(PackageConfigSettings(config))
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_map(Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
@ -291,4 +423,56 @@ mod tests {
|
|||
);
|
||||
assert_eq!(settings.escape_for_python(), r#"{"key":"val\\1 {}value"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_config_setting_package_entry() {
|
||||
// Test valid parsing
|
||||
let entry = ConfigSettingPackageEntry::from_str("numpy:editable_mode=compat").unwrap();
|
||||
assert_eq!(entry.package.as_ref(), "numpy");
|
||||
assert_eq!(entry.setting.key, "editable_mode");
|
||||
assert_eq!(entry.setting.value, "compat");
|
||||
|
||||
// Test with package name containing hyphens
|
||||
let entry = ConfigSettingPackageEntry::from_str("my-package:some_key=value").unwrap();
|
||||
assert_eq!(entry.package.as_ref(), "my-package");
|
||||
assert_eq!(entry.setting.key, "some_key");
|
||||
assert_eq!(entry.setting.value, "value");
|
||||
|
||||
// Test with spaces around values
|
||||
let entry = ConfigSettingPackageEntry::from_str(" numpy : key = value ").unwrap();
|
||||
assert_eq!(entry.package.as_ref(), "numpy");
|
||||
assert_eq!(entry.setting.key, "key");
|
||||
assert_eq!(entry.setting.value, "value");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collect_config_settings_package() {
|
||||
let settings: PackageConfigSettings = vec![
|
||||
ConfigSettingPackageEntry::from_str("numpy:editable_mode=compat").unwrap(),
|
||||
ConfigSettingPackageEntry::from_str("numpy:another_key=value").unwrap(),
|
||||
ConfigSettingPackageEntry::from_str("scipy:build_option=fast").unwrap(),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let numpy_settings = settings
|
||||
.get(&PackageName::from_str("numpy").unwrap())
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
numpy_settings.0.get("editable_mode"),
|
||||
Some(&ConfigSettingValue::String("compat".to_string()))
|
||||
);
|
||||
assert_eq!(
|
||||
numpy_settings.0.get("another_key"),
|
||||
Some(&ConfigSettingValue::String("value".to_string()))
|
||||
);
|
||||
|
||||
let scipy_settings = settings
|
||||
.get(&PackageName::from_str("scipy").unwrap())
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
scipy_settings.0.get("build_option"),
|
||||
Some(&ConfigSettingValue::String("fast".to_string()))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -186,6 +186,18 @@ impl DependencyGroupsInner {
|
|||
self.include.names().chain(&self.exclude)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all groups that are included in the specification,
|
||||
/// assuming `all_names` is an iterator over all groups.
|
||||
pub fn group_names<'a, Names>(
|
||||
&'a self,
|
||||
all_names: Names,
|
||||
) -> impl Iterator<Item = &'a GroupName> + 'a
|
||||
where
|
||||
Names: Iterator<Item = &'a GroupName> + 'a,
|
||||
{
|
||||
all_names.filter(move |name| self.contains(name))
|
||||
}
|
||||
|
||||
/// Iterate over all groups the user explicitly asked for on the CLI
|
||||
pub fn explicit_names(&self) -> impl Iterator<Item = &GroupName> {
|
||||
let DependencyGroupsHistory {
|
||||
|
|
|
|||
|
|
@ -155,7 +155,8 @@ impl ExtrasSpecificationInner {
|
|||
self.include.names().chain(&self.exclude)
|
||||
}
|
||||
|
||||
/// Returns `true` if the specification includes the given extra.
|
||||
/// Returns an iterator over all extras that are included in the specification,
|
||||
/// assuming `all_names` is an iterator over all extras.
|
||||
pub fn extra_names<'a, Names>(
|
||||
&'a self,
|
||||
all_names: Names,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
|
||||
use uv_pep508::PackageName;
|
||||
|
|
@ -63,28 +65,16 @@ impl<'de> serde::Deserialize<'de> for PackageNameSpecifier {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for PackageNameSpecifier {
|
||||
fn schema_name() -> String {
|
||||
"PackageNameSpecifier".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("PackageNameSpecifier")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
string: Some(Box::new(schemars::schema::StringValidation {
|
||||
// See: https://packaging.python.org/en/latest/specifications/name-normalization/#name-format
|
||||
pattern: Some(
|
||||
r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$"
|
||||
.to_string(),
|
||||
),
|
||||
..schemars::schema::StringValidation::default()
|
||||
})),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"pattern": r"^(:none:|:all:|([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]))$",
|
||||
"description": "The name of a package, or `:all:` or `:none:` to select or omit all packages, respectively.",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,37 +1,243 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::{
|
||||
fmt::{Display, Formatter},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use thiserror::Error;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
bitflags::bitflags! {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub struct PreviewFeatures: u32 {
|
||||
const PYTHON_INSTALL_DEFAULT = 1 << 0;
|
||||
const PYTHON_UPGRADE = 1 << 1;
|
||||
const JSON_OUTPUT = 1 << 2;
|
||||
const PYLOCK = 1 << 3;
|
||||
const ADD_BOUNDS = 1 << 4;
|
||||
}
|
||||
}
|
||||
|
||||
impl PreviewFeatures {
|
||||
/// Returns the string representation of a single preview feature flag.
|
||||
///
|
||||
/// Panics if given a combination of flags.
|
||||
fn flag_as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::PYTHON_INSTALL_DEFAULT => "python-install-default",
|
||||
Self::PYTHON_UPGRADE => "python-upgrade",
|
||||
Self::JSON_OUTPUT => "json-output",
|
||||
Self::PYLOCK => "pylock",
|
||||
Self::ADD_BOUNDS => "add-bounds",
|
||||
_ => panic!("`flag_as_str` can only be used for exactly one feature flag"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for PreviewFeatures {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
if self.is_empty() {
|
||||
write!(f, "none")
|
||||
} else {
|
||||
let features: Vec<&str> = self.iter().map(PreviewFeatures::flag_as_str).collect();
|
||||
write!(f, "{}", features.join(","))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error, Clone)]
|
||||
pub enum PreviewFeaturesParseError {
|
||||
#[error("Empty string in preview features: {0}")]
|
||||
Empty(String),
|
||||
}
|
||||
|
||||
impl FromStr for PreviewFeatures {
|
||||
type Err = PreviewFeaturesParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut flags = PreviewFeatures::empty();
|
||||
|
||||
for part in s.split(',') {
|
||||
let part = part.trim();
|
||||
if part.is_empty() {
|
||||
return Err(PreviewFeaturesParseError::Empty(
|
||||
"Empty string in preview features".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let flag = match part {
|
||||
"python-install-default" => Self::PYTHON_INSTALL_DEFAULT,
|
||||
"python-upgrade" => Self::PYTHON_UPGRADE,
|
||||
"json-output" => Self::JSON_OUTPUT,
|
||||
"pylock" => Self::PYLOCK,
|
||||
"add-bounds" => Self::ADD_BOUNDS,
|
||||
_ => {
|
||||
warn_user_once!("Unknown preview feature: `{part}`");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
flags |= flag;
|
||||
}
|
||||
|
||||
Ok(flags)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub enum PreviewMode {
|
||||
#[default]
|
||||
Disabled,
|
||||
Enabled,
|
||||
pub struct Preview {
|
||||
flags: PreviewFeatures,
|
||||
}
|
||||
|
||||
impl PreviewMode {
|
||||
pub fn is_enabled(&self) -> bool {
|
||||
matches!(self, Self::Enabled)
|
||||
impl Preview {
|
||||
pub fn new(flags: PreviewFeatures) -> Self {
|
||||
Self { flags }
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self) -> bool {
|
||||
matches!(self, Self::Disabled)
|
||||
pub fn all() -> Self {
|
||||
Self::new(PreviewFeatures::all())
|
||||
}
|
||||
|
||||
pub fn from_args(
|
||||
preview: bool,
|
||||
no_preview: bool,
|
||||
preview_features: &[PreviewFeatures],
|
||||
) -> Self {
|
||||
if no_preview {
|
||||
return Self::default();
|
||||
}
|
||||
|
||||
if preview {
|
||||
return Self::all();
|
||||
}
|
||||
|
||||
let mut flags = PreviewFeatures::empty();
|
||||
|
||||
for features in preview_features {
|
||||
flags |= *features;
|
||||
}
|
||||
|
||||
Self { flags }
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self, flag: PreviewFeatures) -> bool {
|
||||
self.flags.contains(flag)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<bool> for PreviewMode {
|
||||
fn from(version: bool) -> Self {
|
||||
if version {
|
||||
PreviewMode::Enabled
|
||||
} else {
|
||||
PreviewMode::Disabled
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for PreviewMode {
|
||||
impl Display for Preview {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Disabled => write!(f, "disabled"),
|
||||
Self::Enabled => write!(f, "enabled"),
|
||||
if self.flags.is_empty() {
|
||||
write!(f, "disabled")
|
||||
} else if self.flags == PreviewFeatures::all() {
|
||||
write!(f, "enabled")
|
||||
} else {
|
||||
write!(f, "{}", self.flags)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_preview_features_from_str() {
|
||||
// Test single feature
|
||||
let features = PreviewFeatures::from_str("python-install-default").unwrap();
|
||||
assert_eq!(features, PreviewFeatures::PYTHON_INSTALL_DEFAULT);
|
||||
|
||||
// Test multiple features
|
||||
let features = PreviewFeatures::from_str("python-upgrade,json-output").unwrap();
|
||||
assert!(features.contains(PreviewFeatures::PYTHON_UPGRADE));
|
||||
assert!(features.contains(PreviewFeatures::JSON_OUTPUT));
|
||||
assert!(!features.contains(PreviewFeatures::PYLOCK));
|
||||
|
||||
// Test with whitespace
|
||||
let features = PreviewFeatures::from_str("pylock , add-bounds").unwrap();
|
||||
assert!(features.contains(PreviewFeatures::PYLOCK));
|
||||
assert!(features.contains(PreviewFeatures::ADD_BOUNDS));
|
||||
|
||||
// Test empty string error
|
||||
assert!(PreviewFeatures::from_str("").is_err());
|
||||
assert!(PreviewFeatures::from_str("pylock,").is_err());
|
||||
assert!(PreviewFeatures::from_str(",pylock").is_err());
|
||||
|
||||
// Test unknown feature (should be ignored with warning)
|
||||
let features = PreviewFeatures::from_str("unknown-feature,pylock").unwrap();
|
||||
assert!(features.contains(PreviewFeatures::PYLOCK));
|
||||
assert_eq!(features.bits().count_ones(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preview_features_display() {
|
||||
// Test empty
|
||||
let features = PreviewFeatures::empty();
|
||||
assert_eq!(features.to_string(), "none");
|
||||
|
||||
// Test single feature
|
||||
let features = PreviewFeatures::PYTHON_INSTALL_DEFAULT;
|
||||
assert_eq!(features.to_string(), "python-install-default");
|
||||
|
||||
// Test multiple features
|
||||
let features = PreviewFeatures::PYTHON_UPGRADE | PreviewFeatures::JSON_OUTPUT;
|
||||
assert_eq!(features.to_string(), "python-upgrade,json-output");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preview_display() {
|
||||
// Test disabled
|
||||
let preview = Preview::default();
|
||||
assert_eq!(preview.to_string(), "disabled");
|
||||
|
||||
// Test enabled (all features)
|
||||
let preview = Preview::all();
|
||||
assert_eq!(preview.to_string(), "enabled");
|
||||
|
||||
// Test specific features
|
||||
let preview = Preview::new(PreviewFeatures::PYTHON_UPGRADE | PreviewFeatures::PYLOCK);
|
||||
assert_eq!(preview.to_string(), "python-upgrade,pylock");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_preview_from_args() {
|
||||
// Test no_preview
|
||||
let preview = Preview::from_args(true, true, &[]);
|
||||
assert_eq!(preview.to_string(), "disabled");
|
||||
|
||||
// Test preview (all features)
|
||||
let preview = Preview::from_args(true, false, &[]);
|
||||
assert_eq!(preview.to_string(), "enabled");
|
||||
|
||||
// Test specific features
|
||||
let features = vec![
|
||||
PreviewFeatures::PYTHON_UPGRADE,
|
||||
PreviewFeatures::JSON_OUTPUT,
|
||||
];
|
||||
let preview = Preview::from_args(false, false, &features);
|
||||
assert!(preview.is_enabled(PreviewFeatures::PYTHON_UPGRADE));
|
||||
assert!(preview.is_enabled(PreviewFeatures::JSON_OUTPUT));
|
||||
assert!(!preview.is_enabled(PreviewFeatures::PYLOCK));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_as_str_single_flags() {
|
||||
assert_eq!(
|
||||
PreviewFeatures::PYTHON_INSTALL_DEFAULT.flag_as_str(),
|
||||
"python-install-default"
|
||||
);
|
||||
assert_eq!(
|
||||
PreviewFeatures::PYTHON_UPGRADE.flag_as_str(),
|
||||
"python-upgrade"
|
||||
);
|
||||
assert_eq!(PreviewFeatures::JSON_OUTPUT.flag_as_str(), "json-output");
|
||||
assert_eq!(PreviewFeatures::PYLOCK.flag_as_str(), "pylock");
|
||||
assert_eq!(PreviewFeatures::ADD_BOUNDS.flag_as_str(), "add-bounds");
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "`flag_as_str` can only be used for exactly one feature flag")]
|
||||
fn test_as_str_multiple_flags_panics() {
|
||||
let features = PreviewFeatures::PYTHON_UPGRADE | PreviewFeatures::JSON_OUTPUT;
|
||||
let _ = features.flag_as_str();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
use std::fmt::Formatter;
|
||||
use std::str::FromStr;
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::{fmt::Formatter, str::FromStr};
|
||||
|
||||
use uv_pep440::{Version, VersionSpecifier, VersionSpecifiers, VersionSpecifiersParseError};
|
||||
|
||||
|
|
@ -36,20 +37,15 @@ impl FromStr for RequiredVersion {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for RequiredVersion {
|
||||
fn schema_name() -> String {
|
||||
String::from("RequiredVersion")
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("RequiredVersion")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("A version specifier, e.g. `>=0.5.0` or `==0.5.0`.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "A version specifier, e.g. `>=0.5.0` or `==0.5.0`."
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
|
||||
#[derive(
|
||||
Debug, Default, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub enum SourceStrategy {
|
||||
/// Use `tool.uv.sources` when resolving dependencies.
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ pub enum TargetTriple {
|
|||
#[serde(rename = "i686-pc-windows-msvc")]
|
||||
I686PcWindowsMsvc,
|
||||
|
||||
/// An x86 Linux target. Equivalent to `x86_64-manylinux_2_17`.
|
||||
/// An x86 Linux target. Equivalent to `x86_64-manylinux_2_28`.
|
||||
#[cfg_attr(feature = "clap", value(name = "x86_64-unknown-linux-gnu"))]
|
||||
#[serde(rename = "x86_64-unknown-linux-gnu")]
|
||||
#[serde(alias = "x8664-unknown-linux-gnu")]
|
||||
|
|
@ -56,7 +56,7 @@ pub enum TargetTriple {
|
|||
#[serde(alias = "x8664-apple-darwin")]
|
||||
X8664AppleDarwin,
|
||||
|
||||
/// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_17`.
|
||||
/// An ARM64 Linux target. Equivalent to `aarch64-manylinux_2_28`.
|
||||
#[cfg_attr(feature = "clap", value(name = "aarch64-unknown-linux-gnu"))]
|
||||
#[serde(rename = "aarch64-unknown-linux-gnu")]
|
||||
Aarch64UnknownLinuxGnu,
|
||||
|
|
@ -227,7 +227,7 @@ pub enum TargetTriple {
|
|||
#[serde(alias = "aarch64-manylinux240")]
|
||||
Aarch64Manylinux240,
|
||||
|
||||
/// A wasm32 target using the the Pyodide 2024 platform. Meant for use with Python 3.12.
|
||||
/// A wasm32 target using the Pyodide 2024 platform. Meant for use with Python 3.12.
|
||||
#[cfg_attr(feature = "clap", value(name = "wasm32-pyodide2024"))]
|
||||
Wasm32Pyodide2024,
|
||||
}
|
||||
|
|
@ -240,7 +240,7 @@ impl TargetTriple {
|
|||
Self::Linux | Self::X8664UnknownLinuxGnu => Platform::new(
|
||||
Os::Manylinux {
|
||||
major: 2,
|
||||
minor: 17,
|
||||
minor: 28,
|
||||
},
|
||||
Arch::X86_64,
|
||||
),
|
||||
|
|
@ -262,7 +262,7 @@ impl TargetTriple {
|
|||
Self::Aarch64UnknownLinuxGnu => Platform::new(
|
||||
Os::Manylinux {
|
||||
major: 2,
|
||||
minor: 17,
|
||||
minor: 28,
|
||||
},
|
||||
Arch::Aarch64,
|
||||
),
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ pub static RAYON_PARALLELISM: AtomicUsize = AtomicUsize::new(0);
|
|||
/// `LazyLock::force(&RAYON_INITIALIZE)`.
|
||||
pub static RAYON_INITIALIZE: LazyLock<()> = LazyLock::new(|| {
|
||||
rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(RAYON_PARALLELISM.load(Ordering::SeqCst))
|
||||
.num_threads(RAYON_PARALLELISM.load(Ordering::Relaxed))
|
||||
.stack_size(min_stack_size())
|
||||
.build_global()
|
||||
.expect("failed to initialize global rayon pool");
|
||||
|
|
|
|||
|
|
@ -1,4 +1,6 @@
|
|||
use serde::{Deserialize, Deserializer};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::str::FromStr;
|
||||
use url::Url;
|
||||
|
||||
|
|
@ -143,20 +145,15 @@ impl std::fmt::Display for TrustedHost {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for TrustedHost {
|
||||
fn schema_name() -> String {
|
||||
"TrustedHost".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("TrustedHost")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("A host or host-port pair.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "A host or host-port pair."
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,25 @@ use std::{cmp::Ordering, iter};
|
|||
/// This is a slimmed-down version of `dialoguer::Confirm`, with the post-confirmation report
|
||||
/// enabled.
|
||||
pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result<bool> {
|
||||
confirm_inner(message, None, term, default)
|
||||
}
|
||||
|
||||
/// Prompt the user for confirmation in the given [`Term`], with a hint.
|
||||
pub fn confirm_with_hint(
|
||||
message: &str,
|
||||
hint: &str,
|
||||
term: &Term,
|
||||
default: bool,
|
||||
) -> std::io::Result<bool> {
|
||||
confirm_inner(message, Some(hint), term, default)
|
||||
}
|
||||
|
||||
fn confirm_inner(
|
||||
message: &str,
|
||||
hint: Option<&str>,
|
||||
term: &Term,
|
||||
default: bool,
|
||||
) -> std::io::Result<bool> {
|
||||
let prompt = format!(
|
||||
"{} {} {} {} {}",
|
||||
style("?".to_string()).for_stderr().yellow(),
|
||||
|
|
@ -18,6 +37,13 @@ pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result<boo
|
|||
);
|
||||
|
||||
term.write_str(&prompt)?;
|
||||
if let Some(hint) = hint {
|
||||
term.write_str(&format!(
|
||||
"\n\n{}{} {hint}",
|
||||
style("hint").for_stderr().bold().cyan(),
|
||||
style(":").for_stderr().bold()
|
||||
))?;
|
||||
}
|
||||
term.hide_cursor()?;
|
||||
term.flush()?;
|
||||
|
||||
|
|
@ -56,7 +82,14 @@ pub fn confirm(message: &str, term: &Term, default: bool) -> std::io::Result<boo
|
|||
.cyan(),
|
||||
);
|
||||
|
||||
if hint.is_some() {
|
||||
term.clear_last_lines(2)?;
|
||||
// It's not clear why we need to clear to the end of the screen here, but it fixes lingering
|
||||
// display of the hint on `bash` (the issue did not reproduce on `zsh`).
|
||||
term.clear_to_end_of_screen()?;
|
||||
} else {
|
||||
term.clear_line()?;
|
||||
}
|
||||
term.write_line(&report)?;
|
||||
term.show_cursor()?;
|
||||
term.flush()?;
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use clap::Parser;
|
|||
use tracing::info;
|
||||
|
||||
use uv_cache::{Cache, CacheArgs};
|
||||
use uv_configuration::Concurrency;
|
||||
use uv_configuration::{Concurrency, Preview};
|
||||
use uv_python::{EnvironmentPreference, PythonEnvironment, PythonRequest};
|
||||
|
||||
#[derive(Parser)]
|
||||
|
|
@ -26,6 +26,7 @@ pub(crate) async fn compile(args: CompileArgs) -> anyhow::Result<()> {
|
|||
&PythonRequest::default(),
|
||||
EnvironmentPreference::OnlyVirtual,
|
||||
&cache,
|
||||
Preview::default(),
|
||||
)?
|
||||
.into_interpreter();
|
||||
interpreter.sys_executable().to_path_buf()
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use std::path::PathBuf;
|
|||
use anstream::println;
|
||||
use anyhow::{Result, bail};
|
||||
use pretty_assertions::StrComparison;
|
||||
use schemars::{JsonSchema, schema_for};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
|
||||
use uv_settings::Options as SettingsOptions;
|
||||
|
|
@ -91,7 +91,10 @@ const REPLACEMENTS: &[(&str, &str)] = &[
|
|||
|
||||
/// Generate the JSON schema for the combined options as a string.
|
||||
fn generate() -> String {
|
||||
let schema = schema_for!(CombinedOptions);
|
||||
let settings = schemars::generate::SchemaSettings::draft07();
|
||||
let generator = schemars::SchemaGenerator::new(settings);
|
||||
let schema = generator.into_root_schema_for::<CombinedOptions>();
|
||||
|
||||
let mut output = serde_json::to_string_pretty(&schema).unwrap();
|
||||
|
||||
for (value, replacement) in REPLACEMENTS {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use crate::ROOT_DIR;
|
|||
use crate::generate_all::Mode;
|
||||
|
||||
/// Contains current supported targets
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250612/cpython-unix/targets.yml";
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250723/cpython-unix/targets.yml";
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
|
|
@ -130,7 +130,7 @@ async fn generate() -> Result<String> {
|
|||
output.push_str("//! DO NOT EDIT\n");
|
||||
output.push_str("//!\n");
|
||||
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250612/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250723/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//!\n");
|
||||
|
||||
// Disable clippy/fmt
|
||||
|
|
|
|||
|
|
@ -24,5 +24,5 @@ fs-err = { workspace = true }
|
|||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
assert_fs = { version = "1.1.2" }
|
||||
assert_fs = { workspace = true }
|
||||
indoc = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -11,13 +11,14 @@ use itertools::Itertools;
|
|||
use rustc_hash::FxHashMap;
|
||||
use thiserror::Error;
|
||||
use tracing::{debug, instrument, trace};
|
||||
|
||||
use uv_build_backend::check_direct_build;
|
||||
use uv_build_frontend::{SourceBuild, SourceBuildContext};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_configuration::{
|
||||
BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, PreviewMode, Reinstall,
|
||||
SourceStrategy,
|
||||
BuildKind, BuildOptions, ConfigSettings, Constraints, IndexStrategy, PackageConfigSettings,
|
||||
Preview, Reinstall, SourceStrategy,
|
||||
};
|
||||
use uv_configuration::{BuildOutput, Concurrency};
|
||||
use uv_distribution::DistributionDatabase;
|
||||
|
|
@ -35,8 +36,8 @@ use uv_resolver::{
|
|||
PythonRequirement, Resolver, ResolverEnvironment,
|
||||
};
|
||||
use uv_types::{
|
||||
AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages, HashStrategy,
|
||||
InFlight,
|
||||
AnyErrorBuild, BuildArena, BuildContext, BuildIsolation, BuildStack, EmptyInstalledPackages,
|
||||
HashStrategy, InFlight,
|
||||
};
|
||||
use uv_workspace::WorkspaceCache;
|
||||
|
||||
|
|
@ -90,6 +91,7 @@ pub struct BuildDispatch<'a> {
|
|||
link_mode: uv_install_wheel::LinkMode,
|
||||
build_options: &'a BuildOptions,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
hasher: &'a HashStrategy,
|
||||
exclude_newer: Option<ExcludeNewer>,
|
||||
source_build_context: SourceBuildContext,
|
||||
|
|
@ -97,7 +99,7 @@ pub struct BuildDispatch<'a> {
|
|||
sources: SourceStrategy,
|
||||
workspace_cache: WorkspaceCache,
|
||||
concurrency: Concurrency,
|
||||
preview: PreviewMode,
|
||||
preview: Preview,
|
||||
}
|
||||
|
||||
impl<'a> BuildDispatch<'a> {
|
||||
|
|
@ -112,6 +114,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
shared_state: SharedState,
|
||||
index_strategy: IndexStrategy,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
build_isolation: BuildIsolation<'a>,
|
||||
link_mode: uv_install_wheel::LinkMode,
|
||||
build_options: &'a BuildOptions,
|
||||
|
|
@ -120,7 +123,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
sources: SourceStrategy,
|
||||
workspace_cache: WorkspaceCache,
|
||||
concurrency: Concurrency,
|
||||
preview: PreviewMode,
|
||||
preview: Preview,
|
||||
) -> Self {
|
||||
Self {
|
||||
client,
|
||||
|
|
@ -133,6 +136,7 @@ impl<'a> BuildDispatch<'a> {
|
|||
dependency_metadata,
|
||||
index_strategy,
|
||||
config_settings,
|
||||
config_settings_package,
|
||||
build_isolation,
|
||||
link_mode,
|
||||
build_options,
|
||||
|
|
@ -179,6 +183,10 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
&self.shared_state.git
|
||||
}
|
||||
|
||||
fn build_arena(&self) -> &BuildArena<SourceBuild> {
|
||||
&self.shared_state.build_arena
|
||||
}
|
||||
|
||||
fn capabilities(&self) -> &IndexCapabilities {
|
||||
&self.shared_state.capabilities
|
||||
}
|
||||
|
|
@ -195,6 +203,10 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
self.config_settings
|
||||
}
|
||||
|
||||
fn config_settings_package(&self) -> &PackageConfigSettings {
|
||||
self.config_settings_package
|
||||
}
|
||||
|
||||
fn sources(&self) -> SourceStrategy {
|
||||
self.sources
|
||||
}
|
||||
|
|
@ -290,6 +302,7 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
self.hasher,
|
||||
self.index_locations,
|
||||
self.config_settings,
|
||||
self.config_settings_package,
|
||||
self.cache(),
|
||||
venv,
|
||||
tags,
|
||||
|
|
@ -413,6 +426,17 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
build_stack.insert(dist.distribution_id());
|
||||
}
|
||||
|
||||
// Get package-specific config settings if available; otherwise, use global settings.
|
||||
let config_settings = if let Some(name) = dist_name {
|
||||
if let Some(package_settings) = self.config_settings_package.get(name) {
|
||||
package_settings.clone().merge(self.config_settings.clone())
|
||||
} else {
|
||||
self.config_settings.clone()
|
||||
}
|
||||
} else {
|
||||
self.config_settings.clone()
|
||||
};
|
||||
|
||||
let builder = SourceBuild::setup(
|
||||
source,
|
||||
subdirectory,
|
||||
|
|
@ -426,13 +450,14 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
self.index_locations,
|
||||
sources,
|
||||
self.workspace_cache(),
|
||||
self.config_settings.clone(),
|
||||
config_settings,
|
||||
self.build_isolation,
|
||||
&build_stack,
|
||||
build_kind,
|
||||
self.build_extra_env_vars.clone(),
|
||||
build_output,
|
||||
self.concurrency.builds,
|
||||
self.preview,
|
||||
)
|
||||
.boxed_local()
|
||||
.await?;
|
||||
|
|
@ -447,12 +472,6 @@ impl BuildContext for BuildDispatch<'_> {
|
|||
build_kind: BuildKind,
|
||||
version_id: Option<&'data str>,
|
||||
) -> Result<Option<DistFilename>, BuildDispatchError> {
|
||||
// Direct builds are a preview feature with the uv build backend.
|
||||
if self.preview.is_disabled() {
|
||||
trace!("Preview is disabled, not checking for direct build");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let source_tree = if let Some(subdir) = subdirectory {
|
||||
source.join(subdir)
|
||||
} else {
|
||||
|
|
@ -520,6 +539,8 @@ pub struct SharedState {
|
|||
index: InMemoryIndex,
|
||||
/// The downloaded distributions.
|
||||
in_flight: InFlight,
|
||||
/// Build directories for any PEP 517 builds executed during resolution or installation.
|
||||
build_arena: BuildArena<SourceBuild>,
|
||||
}
|
||||
|
||||
impl SharedState {
|
||||
|
|
@ -532,6 +553,7 @@ impl SharedState {
|
|||
Self {
|
||||
git: self.git.clone(),
|
||||
capabilities: self.capabilities.clone(),
|
||||
build_arena: self.build_arena.clone(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
|
@ -555,4 +577,9 @@ impl SharedState {
|
|||
pub fn capabilities(&self) -> &IndexCapabilities {
|
||||
&self.capabilities
|
||||
}
|
||||
|
||||
/// Return the [`BuildArena`] used by the [`SharedState`].
|
||||
pub fn build_arena(&self) -> &BuildArena<SourceBuild> {
|
||||
&self.build_arena
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ rkyv = { workspace = true, features = ["smallvec-1"] }
|
|||
serde = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.40.0" }
|
||||
insta = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -5,7 +5,6 @@ use std::str::FromStr;
|
|||
use memchr::memchr;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_normalize::{InvalidNameError, PackageName};
|
||||
|
|
@ -300,29 +299,6 @@ impl WheelFilename {
|
|||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&Url> for WheelFilename {
|
||||
type Error = WheelFilenameError;
|
||||
|
||||
fn try_from(url: &Url) -> Result<Self, Self::Error> {
|
||||
let filename = url
|
||||
.path_segments()
|
||||
.ok_or_else(|| {
|
||||
WheelFilenameError::InvalidWheelFileName(
|
||||
url.to_string(),
|
||||
"URL must have a path".to_string(),
|
||||
)
|
||||
})?
|
||||
.next_back()
|
||||
.ok_or_else(|| {
|
||||
WheelFilenameError::InvalidWheelFileName(
|
||||
url.to_string(),
|
||||
"URL must contain a filename".to_string(),
|
||||
)
|
||||
})?;
|
||||
Self::from_str(filename)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for WheelFilename {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ uv-platform-tags = { workspace = true }
|
|||
uv-pypi-types = { workspace = true }
|
||||
uv-redacted = { workspace = true }
|
||||
uv-small-str = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
arcstr = { workspace = true }
|
||||
bitflags = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -124,7 +124,10 @@ impl SourceUrl<'_> {
|
|||
pub fn is_editable(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::Directory(DirectorySourceUrl { editable: true, .. })
|
||||
Self::Directory(DirectorySourceUrl {
|
||||
editable: Some(true),
|
||||
..
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
|
|
@ -210,7 +213,7 @@ impl<'a> From<&'a PathSourceDist> for PathSourceUrl<'a> {
|
|||
pub struct DirectorySourceUrl<'a> {
|
||||
pub url: &'a DisplaySafeUrl,
|
||||
pub install_path: Cow<'a, Path>,
|
||||
pub editable: bool,
|
||||
pub editable: Option<bool>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DirectorySourceUrl<'_> {
|
||||
|
|
|
|||
|
|
@ -30,21 +30,20 @@ impl DependencyMetadata {
|
|||
|
||||
if let Some(version) = version {
|
||||
// If a specific version was requested, search for an exact match, then a global match.
|
||||
let metadata = versions
|
||||
let metadata = if let Some(metadata) = versions
|
||||
.iter()
|
||||
.find(|v| v.version.as_ref() == Some(version))
|
||||
.inspect(|_| {
|
||||
.find(|entry| entry.version.as_ref() == Some(version))
|
||||
{
|
||||
debug!("Found dependency metadata entry for `{package}=={version}`");
|
||||
})
|
||||
.or_else(|| versions.iter().find(|v| v.version.is_none()))
|
||||
.inspect(|_| {
|
||||
metadata
|
||||
} else if let Some(metadata) = versions.iter().find(|entry| entry.version.is_none()) {
|
||||
debug!("Found global metadata entry for `{package}`");
|
||||
});
|
||||
let Some(metadata) = metadata else {
|
||||
metadata
|
||||
} else {
|
||||
warn!("No dependency metadata entry found for `{package}=={version}`");
|
||||
return None;
|
||||
};
|
||||
debug!("Found dependency metadata entry for `{package}=={version}`");
|
||||
|
||||
Some(ResolutionMetadata {
|
||||
name: metadata.name.clone(),
|
||||
version: version.clone(),
|
||||
|
|
@ -65,6 +64,7 @@ impl DependencyMetadata {
|
|||
return None;
|
||||
};
|
||||
debug!("Found dependency metadata entry for `{package}` (assuming: `{version}`)");
|
||||
|
||||
Some(ResolutionMetadata {
|
||||
name: metadata.name.clone(),
|
||||
version,
|
||||
|
|
@ -86,7 +86,7 @@ impl DependencyMetadata {
|
|||
/// <https://packaging.python.org/specifications/core-metadata/>.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct StaticMetadata {
|
||||
// Mandatory fields
|
||||
pub name: PackageName,
|
||||
|
|
|
|||
|
|
@ -131,11 +131,11 @@ impl DerivationChain {
|
|||
));
|
||||
let target = edge.source();
|
||||
let extra = match edge.weight() {
|
||||
Edge::Optional(extra, ..) => Some(extra.clone()),
|
||||
Edge::Optional(extra) => Some(extra.clone()),
|
||||
_ => None,
|
||||
};
|
||||
let group = match edge.weight() {
|
||||
Edge::Dev(group, ..) => Some(group.clone()),
|
||||
Edge::Dev(group) => Some(group.clone()),
|
||||
_ => None,
|
||||
};
|
||||
queue.push_back((target, extra, group, path));
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
use std::borrow::Cow;
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
|
||||
|
|
@ -56,10 +57,7 @@ impl File {
|
|||
.map_err(|err| FileConversionError::RequiresPython(err.line().clone(), err))?,
|
||||
size: file.size,
|
||||
upload_time_utc_ms: file.upload_time.map(Timestamp::as_millisecond),
|
||||
url: match split_scheme(&file.url) {
|
||||
Some(..) => FileLocation::AbsoluteUrl(UrlString::new(file.url)),
|
||||
None => FileLocation::RelativeUrl(base.clone(), file.url),
|
||||
},
|
||||
url: FileLocation::new(file.url, base),
|
||||
yanked: file.yanked,
|
||||
})
|
||||
}
|
||||
|
|
@ -76,6 +74,17 @@ pub enum FileLocation {
|
|||
}
|
||||
|
||||
impl FileLocation {
|
||||
/// Parse a relative or absolute URL on a page with a base URL.
|
||||
///
|
||||
/// This follows the HTML semantics where a link on a page is resolved relative to the URL of
|
||||
/// that page.
|
||||
pub fn new(url: SmallString, base: &SmallString) -> Self {
|
||||
match split_scheme(&url) {
|
||||
Some(..) => FileLocation::AbsoluteUrl(UrlString::new(url)),
|
||||
None => FileLocation::RelativeUrl(base.clone(), url),
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert this location to a URL.
|
||||
///
|
||||
/// A relative URL has its base joined to the path. An absolute URL is
|
||||
|
|
@ -160,16 +169,13 @@ impl UrlString {
|
|||
.unwrap_or(self.as_ref())
|
||||
}
|
||||
|
||||
/// Return the [`UrlString`] with any fragments removed.
|
||||
/// Return the [`UrlString`] (as a [`Cow`]) with any fragments removed.
|
||||
#[must_use]
|
||||
pub fn without_fragment(&self) -> Self {
|
||||
Self(
|
||||
pub fn without_fragment(&self) -> Cow<'_, Self> {
|
||||
self.as_ref()
|
||||
.split_once('#')
|
||||
.map(|(path, _)| path)
|
||||
.map(SmallString::from)
|
||||
.unwrap_or_else(|| self.0.clone()),
|
||||
)
|
||||
.map(|(path, _)| Cow::Owned(UrlString(SmallString::from(path))))
|
||||
.unwrap_or(Cow::Borrowed(self))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -252,16 +258,17 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn without_fragment() {
|
||||
// Borrows a URL without a fragment
|
||||
let url = UrlString("https://example.com/path".into());
|
||||
assert_eq!(&*url.without_fragment(), &url);
|
||||
assert!(matches!(url.without_fragment(), Cow::Borrowed(_)));
|
||||
|
||||
// Removes the fragment if present on the URL
|
||||
let url = UrlString("https://example.com/path?query#fragment".into());
|
||||
assert_eq!(
|
||||
url.without_fragment(),
|
||||
UrlString("https://example.com/path?query".into())
|
||||
&*url.without_fragment(),
|
||||
&UrlString("https://example.com/path?query".into())
|
||||
);
|
||||
|
||||
let url = UrlString("https://example.com/path#fragment".into());
|
||||
assert_eq!(url.base_str(), "https://example.com/path");
|
||||
|
||||
let url = UrlString("https://example.com/path".into());
|
||||
assert_eq!(url.base_str(), "https://example.com/path");
|
||||
assert!(matches!(url.without_fragment(), Cow::Owned(_)));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,11 +6,23 @@ use thiserror::Error;
|
|||
|
||||
use uv_auth::{AuthPolicy, Credentials};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::index_name::{IndexName, IndexNameError};
|
||||
use crate::origin::Origin;
|
||||
use crate::{IndexStatusCodeStrategy, IndexUrl, IndexUrlError, SerializableStatusCode};
|
||||
|
||||
/// Cache control configuration for an index.
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Default)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct IndexCacheControl {
|
||||
/// Cache control header for Simple API requests.
|
||||
pub api: Option<SmallString>,
|
||||
/// Cache control header for file downloads.
|
||||
pub files: Option<SmallString>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
|
|
@ -104,6 +116,19 @@ pub struct Index {
|
|||
/// ```
|
||||
#[serde(default)]
|
||||
pub ignore_error_codes: Option<Vec<SerializableStatusCode>>,
|
||||
/// Cache control configuration for this index.
|
||||
///
|
||||
/// When set, these headers will override the server's cache control headers
|
||||
/// for both package metadata requests and artifact downloads.
|
||||
///
|
||||
/// ```toml
|
||||
/// [[tool.uv.index]]
|
||||
/// name = "my-index"
|
||||
/// url = "https://<omitted>/simple"
|
||||
/// cache-control = { api = "max-age=600", files = "max-age=3600" }
|
||||
/// ```
|
||||
#[serde(default)]
|
||||
pub cache_control: Option<IndexCacheControl>,
|
||||
}
|
||||
|
||||
#[derive(
|
||||
|
|
@ -142,6 +167,7 @@ impl Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -157,6 +183,7 @@ impl Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -172,6 +199,7 @@ impl Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -250,6 +278,7 @@ impl From<IndexUrl> for Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -273,6 +302,7 @@ impl FromStr for Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -289,6 +319,7 @@ impl FromStr for Index {
|
|||
publish_url: None,
|
||||
authenticate: AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -384,3 +415,55 @@ pub enum IndexSourceError {
|
|||
#[error("Index included a name, but the name was empty")]
|
||||
EmptyName,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_index_cache_control_headers() {
|
||||
// Test that cache control headers are properly parsed from TOML
|
||||
let toml_str = r#"
|
||||
name = "test-index"
|
||||
url = "https://test.example.com/simple"
|
||||
cache-control = { api = "max-age=600", files = "max-age=3600" }
|
||||
"#;
|
||||
|
||||
let index: Index = toml::from_str(toml_str).unwrap();
|
||||
assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
|
||||
assert!(index.cache_control.is_some());
|
||||
let cache_control = index.cache_control.as_ref().unwrap();
|
||||
assert_eq!(cache_control.api.as_deref(), Some("max-age=600"));
|
||||
assert_eq!(cache_control.files.as_deref(), Some("max-age=3600"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_without_cache_control() {
|
||||
// Test that indexes work without cache control headers
|
||||
let toml_str = r#"
|
||||
name = "test-index"
|
||||
url = "https://test.example.com/simple"
|
||||
"#;
|
||||
|
||||
let index: Index = toml::from_str(toml_str).unwrap();
|
||||
assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
|
||||
assert_eq!(index.cache_control, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_partial_cache_control() {
|
||||
// Test that cache control can have just one field
|
||||
let toml_str = r#"
|
||||
name = "test-index"
|
||||
url = "https://test.example.com/simple"
|
||||
cache-control = { api = "max-age=300" }
|
||||
"#;
|
||||
|
||||
let index: Index = toml::from_str(toml_str).unwrap();
|
||||
assert_eq!(index.name.as_ref().unwrap().as_ref(), "test-index");
|
||||
assert!(index.cache_control.is_some());
|
||||
let cache_control = index.cache_control.as_ref().unwrap();
|
||||
assert_eq!(cache_control.api.as_deref(), Some("max-age=300"));
|
||||
assert_eq!(cache_control.files, None);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ use url::{ParseError, Url};
|
|||
|
||||
use uv_pep508::{Scheme, VerbatimUrl, VerbatimUrlError, split_scheme};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_warnings::warn_user;
|
||||
|
||||
use crate::{Index, IndexStatusCodeStrategy, Verbatim};
|
||||
|
||||
|
|
@ -38,33 +39,8 @@ impl IndexUrl {
|
|||
/// If no root directory is provided, relative paths are resolved against the current working
|
||||
/// directory.
|
||||
pub fn parse(path: &str, root_dir: Option<&Path>) -> Result<Self, IndexUrlError> {
|
||||
let url = match split_scheme(path) {
|
||||
Some((scheme, ..)) => {
|
||||
match Scheme::parse(scheme) {
|
||||
Some(_) => {
|
||||
// Ex) `https://pypi.org/simple`
|
||||
VerbatimUrl::parse_url(path)?
|
||||
}
|
||||
None => {
|
||||
// Ex) `C:\Users\user\index`
|
||||
if let Some(root_dir) = root_dir {
|
||||
VerbatimUrl::from_path(path, root_dir)?
|
||||
} else {
|
||||
VerbatimUrl::from_absolute_path(std::path::absolute(path)?)?
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
// Ex) `/Users/user/index`
|
||||
if let Some(root_dir) = root_dir {
|
||||
VerbatimUrl::from_path(path, root_dir)?
|
||||
} else {
|
||||
VerbatimUrl::from_absolute_path(std::path::absolute(path)?)?
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(Self::from(url.with_given(path)))
|
||||
let url = VerbatimUrl::from_url_or_path(path, root_dir)?;
|
||||
Ok(Self::from(url))
|
||||
}
|
||||
|
||||
/// Return the root [`Url`] of the index, if applicable.
|
||||
|
|
@ -92,20 +68,15 @@ impl IndexUrl {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for IndexUrl {
|
||||
fn schema_name() -> String {
|
||||
"IndexUrl".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("IndexUrl")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
schemars::schema::SchemaObject {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
metadata: Some(Box::new(schemars::schema::Metadata {
|
||||
description: Some("The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path.".to_string()),
|
||||
..schemars::schema::Metadata::default()
|
||||
})),
|
||||
..schemars::schema::SchemaObject::default()
|
||||
}
|
||||
.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "string",
|
||||
"description": "The URL of an index to use for fetching packages (e.g., `https://pypi.org/simple`), or a local path."
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -140,6 +111,30 @@ impl IndexUrl {
|
|||
Cow::Owned(url)
|
||||
}
|
||||
}
|
||||
|
||||
/// Warn user if the given URL was provided as an ambiguous relative path.
|
||||
///
|
||||
/// This is a temporary warning. Ambiguous values will not be
|
||||
/// accepted in the future.
|
||||
pub fn warn_on_disambiguated_relative_path(&self) {
|
||||
let Self::Path(verbatim_url) = &self else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some(path) = verbatim_url.given() {
|
||||
if !is_disambiguated_path(path) {
|
||||
if cfg!(windows) {
|
||||
warn_user!(
|
||||
"Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `.\\{path}` or `./{path}`). Support for ambiguous values will be removed in the future"
|
||||
);
|
||||
} else {
|
||||
warn_user!(
|
||||
"Relative paths passed to `--index` or `--default-index` should be disambiguated from index names (use `./{path}`). Support for ambiguous values will be removed in the future"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for IndexUrl {
|
||||
|
|
@ -162,6 +157,28 @@ impl Verbatim for IndexUrl {
|
|||
}
|
||||
}
|
||||
|
||||
/// Checks if a path is disambiguated.
|
||||
///
|
||||
/// Disambiguated paths are absolute paths, paths with valid schemes,
|
||||
/// and paths starting with "./" or "../" on Unix or ".\\", "..\\",
|
||||
/// "./", or "../" on Windows.
|
||||
fn is_disambiguated_path(path: &str) -> bool {
|
||||
if cfg!(windows) {
|
||||
if path.starts_with(".\\") || path.starts_with("..\\") || path.starts_with('/') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if path.starts_with("./") || path.starts_with("../") || Path::new(path).is_absolute() {
|
||||
return true;
|
||||
}
|
||||
// Check if the path has a scheme (like `file://`)
|
||||
if let Some((scheme, _)) = split_scheme(path) {
|
||||
return Scheme::parse(scheme).is_some();
|
||||
}
|
||||
// This is an ambiguous relative path
|
||||
false
|
||||
}
|
||||
|
||||
/// An error that can occur when parsing an [`IndexUrl`].
|
||||
#[derive(Error, Debug)]
|
||||
pub enum IndexUrlError {
|
||||
|
|
@ -186,7 +203,11 @@ impl serde::ser::Serialize for IndexUrl {
|
|||
where
|
||||
S: serde::ser::Serializer,
|
||||
{
|
||||
self.to_string().serialize(serializer)
|
||||
match self {
|
||||
Self::Pypi(url) => url.without_credentials().serialize(serializer),
|
||||
Self::Url(url) => url.without_credentials().serialize(serializer),
|
||||
Self::Path(url) => url.without_credentials().serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -379,14 +400,17 @@ impl<'a> IndexLocations {
|
|||
///
|
||||
/// This includes explicit indexes, implicit indexes, flat indexes, and the default index.
|
||||
///
|
||||
/// The indexes will be returned in the order in which they were defined, such that the
|
||||
/// last-defined index is the last item in the vector.
|
||||
/// The indexes will be returned in the reverse of the order in which they were defined, such
|
||||
/// that the last-defined index is the first item in the vector.
|
||||
pub fn allowed_indexes(&'a self) -> Vec<&'a Index> {
|
||||
if self.no_index {
|
||||
self.flat_index.iter().rev().collect()
|
||||
} else {
|
||||
let mut indexes = vec![];
|
||||
|
||||
// TODO(charlie): By only yielding the first default URL, we'll drop credentials if,
|
||||
// e.g., an authenticated default URL is provided in a configuration file, but an
|
||||
// unauthenticated default URL is present in the receipt.
|
||||
let mut seen = FxHashSet::default();
|
||||
let mut default = false;
|
||||
for index in {
|
||||
|
|
@ -411,6 +435,59 @@ impl<'a> IndexLocations {
|
|||
indexes
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a vector containing all known [`Index`] entries.
|
||||
///
|
||||
/// This includes explicit indexes, implicit indexes, flat indexes, and default indexes;
|
||||
/// in short, it includes all defined indexes, even if they're overridden by some other index
|
||||
/// definition.
|
||||
///
|
||||
/// The indexes will be returned in the reverse of the order in which they were defined, such
|
||||
/// that the last-defined index is the first item in the vector.
|
||||
pub fn known_indexes(&'a self) -> impl Iterator<Item = &'a Index> {
|
||||
if self.no_index {
|
||||
Either::Left(self.flat_index.iter().rev())
|
||||
} else {
|
||||
Either::Right(
|
||||
std::iter::once(&*DEFAULT_INDEX)
|
||||
.chain(self.flat_index.iter().rev())
|
||||
.chain(self.indexes.iter().rev()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Add all authenticated sources to the cache.
|
||||
pub fn cache_index_credentials(&self) {
|
||||
for index in self.known_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
let credentials = Arc::new(credentials);
|
||||
uv_auth::store_credentials(index.raw_url(), credentials.clone());
|
||||
if let Some(root_url) = index.root_url() {
|
||||
uv_auth::store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the Simple API cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.api.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Return the artifact cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.files.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&IndexLocations> for uv_auth::Indexes {
|
||||
|
|
@ -511,30 +588,23 @@ impl<'a> IndexUrls {
|
|||
/// iterator.
|
||||
pub fn defined_indexes(&'a self) -> impl Iterator<Item = &'a Index> + 'a {
|
||||
if self.no_index {
|
||||
Either::Left(std::iter::empty())
|
||||
return Either::Left(std::iter::empty());
|
||||
}
|
||||
|
||||
let mut seen = FxHashSet::default();
|
||||
let (non_default, default) = self
|
||||
.indexes
|
||||
.iter()
|
||||
.filter(move |index| {
|
||||
if let Some(name) = &index.name {
|
||||
seen.insert(name)
|
||||
} else {
|
||||
Either::Right(
|
||||
{
|
||||
let mut seen = FxHashSet::default();
|
||||
self.indexes
|
||||
.iter()
|
||||
.filter(move |index| {
|
||||
index.name.as_ref().is_none_or(|name| seen.insert(name))
|
||||
})
|
||||
.filter(|index| !index.default)
|
||||
true
|
||||
}
|
||||
.chain({
|
||||
let mut seen = FxHashSet::default();
|
||||
self.indexes
|
||||
.iter()
|
||||
.filter(move |index| {
|
||||
index.name.as_ref().is_none_or(|name| seen.insert(name))
|
||||
})
|
||||
.find(|index| index.default)
|
||||
.into_iter()
|
||||
}),
|
||||
)
|
||||
}
|
||||
.partition::<Vec<_>, _>(|index| !index.default);
|
||||
|
||||
Either::Right(non_default.into_iter().chain(default))
|
||||
}
|
||||
|
||||
/// Return the `--no-index` flag.
|
||||
|
|
@ -551,6 +621,26 @@ impl<'a> IndexUrls {
|
|||
}
|
||||
IndexStatusCodeStrategy::Default
|
||||
}
|
||||
|
||||
/// Return the Simple API cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.api.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Return the artifact cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.files.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
bitflags::bitflags! {
|
||||
|
|
@ -632,3 +722,101 @@ impl IndexCapabilities {
|
|||
.insert(Flags::FORBIDDEN);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_index_url_parse_valid_paths() {
|
||||
// Absolute path
|
||||
assert!(is_disambiguated_path("/absolute/path"));
|
||||
// Relative path
|
||||
assert!(is_disambiguated_path("./relative/path"));
|
||||
assert!(is_disambiguated_path("../../relative/path"));
|
||||
if cfg!(windows) {
|
||||
// Windows absolute path
|
||||
assert!(is_disambiguated_path("C:/absolute/path"));
|
||||
// Windows relative path
|
||||
assert!(is_disambiguated_path(".\\relative\\path"));
|
||||
assert!(is_disambiguated_path("..\\..\\relative\\path"));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_url_parse_ambiguous_paths() {
|
||||
// Test single-segment ambiguous path
|
||||
assert!(!is_disambiguated_path("index"));
|
||||
// Test multi-segment ambiguous path
|
||||
assert!(!is_disambiguated_path("relative/path"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_url_parse_with_schemes() {
|
||||
assert!(is_disambiguated_path("file:///absolute/path"));
|
||||
assert!(is_disambiguated_path("https://registry.com/simple/"));
|
||||
assert!(is_disambiguated_path(
|
||||
"git+https://github.com/example/repo.git"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_control_lookup() {
|
||||
use std::str::FromStr;
|
||||
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::IndexFormat;
|
||||
use crate::index_name::IndexName;
|
||||
|
||||
let indexes = vec![
|
||||
Index {
|
||||
name: Some(IndexName::from_str("index1").unwrap()),
|
||||
url: IndexUrl::from_str("https://index1.example.com/simple").unwrap(),
|
||||
cache_control: Some(crate::IndexCacheControl {
|
||||
api: Some(SmallString::from("max-age=300")),
|
||||
files: Some(SmallString::from("max-age=1800")),
|
||||
}),
|
||||
explicit: false,
|
||||
default: false,
|
||||
origin: None,
|
||||
format: IndexFormat::Simple,
|
||||
publish_url: None,
|
||||
authenticate: uv_auth::AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
},
|
||||
Index {
|
||||
name: Some(IndexName::from_str("index2").unwrap()),
|
||||
url: IndexUrl::from_str("https://index2.example.com/simple").unwrap(),
|
||||
cache_control: None,
|
||||
explicit: false,
|
||||
default: false,
|
||||
origin: None,
|
||||
format: IndexFormat::Simple,
|
||||
publish_url: None,
|
||||
authenticate: uv_auth::AuthPolicy::default(),
|
||||
ignore_error_codes: None,
|
||||
},
|
||||
];
|
||||
|
||||
let index_urls = IndexUrls::from_indexes(indexes);
|
||||
|
||||
let url1 = IndexUrl::from_str("https://index1.example.com/simple").unwrap();
|
||||
assert_eq!(
|
||||
index_urls.simple_api_cache_control_for(&url1),
|
||||
Some("max-age=300")
|
||||
);
|
||||
assert_eq!(
|
||||
index_urls.artifact_cache_control_for(&url1),
|
||||
Some("max-age=1800")
|
||||
);
|
||||
|
||||
let url2 = IndexUrl::from_str("https://index2.example.com/simple").unwrap();
|
||||
assert_eq!(index_urls.simple_api_cache_control_for(&url2), None);
|
||||
assert_eq!(index_urls.artifact_cache_control_for(&url2), None);
|
||||
|
||||
let url3 = IndexUrl::from_str("https://index3.example.com/simple").unwrap();
|
||||
assert_eq!(index_urls.simple_api_cache_control_for(&url3), None);
|
||||
assert_eq!(index_urls.artifact_cache_control_for(&url3), None);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -365,7 +365,7 @@ impl InstalledDist {
|
|||
pub fn installer(&self) -> Result<Option<String>, InstalledDistError> {
|
||||
let path = self.install_path().join("INSTALLER");
|
||||
match fs::read_to_string(path) {
|
||||
Ok(installer) => Ok(Some(installer)),
|
||||
Ok(installer) => Ok(Some(installer.trim().to_owned())),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -343,9 +343,9 @@ pub struct DirectorySourceDist {
|
|||
/// The absolute path to the distribution which we use for installing.
|
||||
pub install_path: Box<Path>,
|
||||
/// Whether the package should be installed in editable mode.
|
||||
pub editable: bool,
|
||||
pub editable: Option<bool>,
|
||||
/// Whether the package should be built and installed.
|
||||
pub r#virtual: bool,
|
||||
pub r#virtual: Option<bool>,
|
||||
/// The URL as it was provided by the user.
|
||||
pub url: VerbatimUrl,
|
||||
}
|
||||
|
|
@ -452,8 +452,8 @@ impl Dist {
|
|||
name: PackageName,
|
||||
url: VerbatimUrl,
|
||||
install_path: &Path,
|
||||
editable: bool,
|
||||
r#virtual: bool,
|
||||
editable: Option<bool>,
|
||||
r#virtual: Option<bool>,
|
||||
) -> Result<Dist, Error> {
|
||||
// Convert to an absolute path.
|
||||
let install_path = path::absolute(install_path)?;
|
||||
|
|
@ -655,7 +655,7 @@ impl SourceDist {
|
|||
/// Returns `true` if the distribution is editable.
|
||||
pub fn is_editable(&self) -> bool {
|
||||
match self {
|
||||
Self::Directory(DirectorySourceDist { editable, .. }) => *editable,
|
||||
Self::Directory(DirectorySourceDist { editable, .. }) => editable.unwrap_or(false),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
@ -663,7 +663,7 @@ impl SourceDist {
|
|||
/// Returns `true` if the distribution is virtual.
|
||||
pub fn is_virtual(&self) -> bool {
|
||||
match self {
|
||||
Self::Directory(DirectorySourceDist { r#virtual, .. }) => *r#virtual,
|
||||
Self::Directory(DirectorySourceDist { r#virtual, .. }) => r#virtual.unwrap_or(false),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
//! flags set.
|
||||
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::{Index, IndexUrl};
|
||||
|
|
@ -50,14 +52,14 @@ macro_rules! impl_index {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for $name {
|
||||
fn schema_name() -> String {
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
IndexUrl::schema_name()
|
||||
}
|
||||
|
||||
fn json_schema(
|
||||
r#gen: &mut schemars::r#gen::SchemaGenerator,
|
||||
) -> schemars::schema::Schema {
|
||||
IndexUrl::json_schema(r#gen)
|
||||
generator: &mut schemars::generate::SchemaGenerator,
|
||||
) -> schemars::Schema {
|
||||
IndexUrl::json_schema(generator)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -91,13 +91,21 @@ impl CompatibleDist<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
// For installable distributions, return the prioritized distribution it was derived from.
|
||||
pub fn prioritized(&self) -> Option<&PrioritizedDist> {
|
||||
match self {
|
||||
CompatibleDist::InstalledDist(_) => None,
|
||||
CompatibleDist::SourceDist { prioritized, .. }
|
||||
| CompatibleDist::CompatibleWheel { prioritized, .. }
|
||||
| CompatibleDist::IncompatibleWheel { prioritized, .. } => Some(prioritized),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the set of supported platform the distribution, in terms of their markers.
|
||||
pub fn implied_markers(&self) -> MarkerTree {
|
||||
match self {
|
||||
CompatibleDist::InstalledDist(_) => MarkerTree::TRUE,
|
||||
CompatibleDist::SourceDist { prioritized, .. } => prioritized.0.markers,
|
||||
CompatibleDist::CompatibleWheel { prioritized, .. } => prioritized.0.markers,
|
||||
CompatibleDist::IncompatibleWheel { prioritized, .. } => prioritized.0.markers,
|
||||
match self.prioritized() {
|
||||
Some(prioritized) => prioritized.0.markers,
|
||||
None => MarkerTree::TRUE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -429,9 +429,9 @@ pub enum RequirementSource {
|
|||
/// The absolute path to the distribution which we use for installing.
|
||||
install_path: Box<Path>,
|
||||
/// For a source tree (a directory), whether to install as an editable.
|
||||
editable: bool,
|
||||
editable: Option<bool>,
|
||||
/// For a source tree (a directory), whether the project should be built and installed.
|
||||
r#virtual: bool,
|
||||
r#virtual: Option<bool>,
|
||||
/// The PEP 508 style URL in the format
|
||||
/// `file:///<path>#subdirectory=<subdirectory>`.
|
||||
url: VerbatimUrl,
|
||||
|
|
@ -545,7 +545,13 @@ impl RequirementSource {
|
|||
|
||||
/// Returns `true` if the source is editable.
|
||||
pub fn is_editable(&self) -> bool {
|
||||
matches!(self, Self::Directory { editable: true, .. })
|
||||
matches!(
|
||||
self,
|
||||
Self::Directory {
|
||||
editable: Some(true),
|
||||
..
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns `true` if the source is empty.
|
||||
|
|
@ -792,11 +798,11 @@ impl From<RequirementSource> for RequirementSourceWire {
|
|||
r#virtual,
|
||||
url: _,
|
||||
} => {
|
||||
if editable {
|
||||
if editable.unwrap_or(false) {
|
||||
Self::Editable {
|
||||
editable: PortablePathBuf::from(install_path),
|
||||
}
|
||||
} else if r#virtual {
|
||||
} else if r#virtual.unwrap_or(false) {
|
||||
Self::Virtual {
|
||||
r#virtual: PortablePathBuf::from(install_path),
|
||||
}
|
||||
|
|
@ -908,8 +914,8 @@ impl TryFrom<RequirementSourceWire> for RequirementSource {
|
|||
))?;
|
||||
Ok(Self::Directory {
|
||||
install_path: directory,
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(false),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
|
@ -920,8 +926,8 @@ impl TryFrom<RequirementSourceWire> for RequirementSource {
|
|||
))?;
|
||||
Ok(Self::Directory {
|
||||
install_path: editable,
|
||||
editable: true,
|
||||
r#virtual: false,
|
||||
editable: Some(true),
|
||||
r#virtual: Some(false),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
|
@ -932,8 +938,8 @@ impl TryFrom<RequirementSourceWire> for RequirementSource {
|
|||
))?;
|
||||
Ok(Self::Directory {
|
||||
install_path: r#virtual,
|
||||
editable: false,
|
||||
r#virtual: true,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(true),
|
||||
url,
|
||||
})
|
||||
}
|
||||
|
|
@ -980,8 +986,8 @@ mod tests {
|
|||
marker: MarkerTree::TRUE,
|
||||
source: RequirementSource::Directory {
|
||||
install_path: PathBuf::from(path).into_boxed_path(),
|
||||
editable: false,
|
||||
r#virtual: false,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(false),
|
||||
url: VerbatimUrl::from_absolute_path(path).unwrap(),
|
||||
},
|
||||
origin: None,
|
||||
|
|
|
|||
|
|
@ -66,15 +66,8 @@ impl RequiresPython {
|
|||
) -> Option<Self> {
|
||||
// Convert to PubGrub range and perform an intersection.
|
||||
let range = specifiers
|
||||
.into_iter()
|
||||
.map(|specifier| release_specifiers_to_ranges(specifier.clone()))
|
||||
.fold(None, |range: Option<Ranges<Version>>, requires_python| {
|
||||
if let Some(range) = range {
|
||||
Some(range.intersection(&requires_python))
|
||||
} else {
|
||||
Some(requires_python)
|
||||
}
|
||||
})?;
|
||||
.map(|specs| release_specifiers_to_ranges(specs.clone()))
|
||||
.reduce(|acc, r| acc.intersection(&r))?;
|
||||
|
||||
// If the intersection is empty, return `None`.
|
||||
if range.is_empty() {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use uv_distribution_filename::DistExtension;
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep508::MarkerTree;
|
||||
use uv_pypi_types::{HashDigest, HashDigests};
|
||||
|
||||
use crate::{
|
||||
|
|
@ -202,12 +201,12 @@ impl Node {
|
|||
}
|
||||
}
|
||||
|
||||
/// An edge in the resolution graph, along with the marker that must be satisfied to traverse it.
|
||||
/// An edge in the resolution graph.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Edge {
|
||||
Prod(MarkerTree),
|
||||
Optional(ExtraName, MarkerTree),
|
||||
Dev(GroupName, MarkerTree),
|
||||
Prod,
|
||||
Optional(ExtraName),
|
||||
Dev(GroupName),
|
||||
}
|
||||
|
||||
impl From<&ResolvedDist> for RequirementSource {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
#[cfg(feature = "schemars")]
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Deref;
|
||||
|
||||
use http::StatusCode;
|
||||
|
|
@ -136,17 +138,17 @@ impl<'de> Deserialize<'de> for SerializableStatusCode {
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for SerializableStatusCode {
|
||||
fn schema_name() -> String {
|
||||
"StatusCode".to_string()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("StatusCode")
|
||||
}
|
||||
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
let mut schema = r#gen.subschema_for::<u16>().into_object();
|
||||
schema.metadata().description = Some("HTTP status code (100-599)".to_string());
|
||||
schema.number().minimum = Some(100.0);
|
||||
schema.number().maximum = Some(599.0);
|
||||
|
||||
schema.into()
|
||||
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
schemars::json_schema!({
|
||||
"type": "number",
|
||||
"minimum": 100,
|
||||
"maximum": 599,
|
||||
"description": "HTTP status code (100-599)"
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ zip = { workspace = true }
|
|||
|
||||
[dev-dependencies]
|
||||
indoc = { workspace = true }
|
||||
insta = { version = "1.40.0", features = ["filters", "json", "redactions"] }
|
||||
insta = { workspace = true }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
|
|
|||
|
|
@ -20,8 +20,7 @@ use uv_client::{
|
|||
};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_distribution_types::{
|
||||
BuildableSource, BuiltDist, Dist, FileLocation, HashPolicy, Hashed, InstalledDist, Name,
|
||||
SourceDist,
|
||||
BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, IndexUrl, InstalledDist, Name, SourceDist,
|
||||
};
|
||||
use uv_extract::hash::Hasher;
|
||||
use uv_fs::write_atomic;
|
||||
|
|
@ -179,12 +178,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
match dist {
|
||||
BuiltDist::Registry(wheels) => {
|
||||
let wheel = wheels.best_wheel();
|
||||
let url = match &wheel.file.url {
|
||||
FileLocation::RelativeUrl(base, url) => {
|
||||
uv_pypi_types::base_url_join_relative(base, url)?
|
||||
}
|
||||
FileLocation::AbsoluteUrl(url) => url.to_url()?,
|
||||
};
|
||||
let url = wheel.file.url.to_url()?;
|
||||
|
||||
// Create a cache entry for the wheel.
|
||||
let wheel_entry = self.build_context.cache().entry(
|
||||
|
|
@ -207,6 +201,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
match self
|
||||
.stream_wheel(
|
||||
url.clone(),
|
||||
dist.index(),
|
||||
&wheel.filename,
|
||||
wheel.file.size,
|
||||
&wheel_entry,
|
||||
|
|
@ -242,6 +237,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
let archive = self
|
||||
.download_wheel(
|
||||
url,
|
||||
dist.index(),
|
||||
&wheel.filename,
|
||||
wheel.file.size,
|
||||
&wheel_entry,
|
||||
|
|
@ -278,6 +274,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
match self
|
||||
.stream_wheel(
|
||||
wheel.url.raw().clone(),
|
||||
None,
|
||||
&wheel.filename,
|
||||
None,
|
||||
&wheel_entry,
|
||||
|
|
@ -307,6 +304,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
let archive = self
|
||||
.download_wheel(
|
||||
wheel.url.raw().clone(),
|
||||
None,
|
||||
&wheel.filename,
|
||||
None,
|
||||
&wheel_entry,
|
||||
|
|
@ -540,6 +538,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
async fn stream_wheel(
|
||||
&self,
|
||||
url: DisplaySafeUrl,
|
||||
index: Option<&IndexUrl>,
|
||||
filename: &WheelFilename,
|
||||
size: Option<u64>,
|
||||
wheel_entry: &CacheEntry,
|
||||
|
|
@ -622,13 +621,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
// Fetch the archive from the cache, or download it if necessary.
|
||||
let req = self.request(url.clone())?;
|
||||
|
||||
// Determine the cache control policy for the URL.
|
||||
let cache_control = match self.client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -660,7 +670,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.skip_cache_with_retry(
|
||||
self.request(url)?,
|
||||
&http_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
|
|
@ -677,6 +692,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
async fn download_wheel(
|
||||
&self,
|
||||
url: DisplaySafeUrl,
|
||||
index: Option<&IndexUrl>,
|
||||
filename: &WheelFilename,
|
||||
size: Option<u64>,
|
||||
wheel_entry: &CacheEntry,
|
||||
|
|
@ -789,13 +805,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
// Fetch the archive from the cache, or download it if necessary.
|
||||
let req = self.request(url.clone())?;
|
||||
|
||||
// Determine the cache control policy for the URL.
|
||||
let cache_control = match self.client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -827,7 +854,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.skip_cache_with_retry(
|
||||
self.request(url)?,
|
||||
&http_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
|
|
|
|||
|
|
@ -25,8 +25,6 @@ pub enum Error {
|
|||
RelativePath(PathBuf),
|
||||
#[error(transparent)]
|
||||
InvalidUrl(#[from] uv_distribution_types::ToUrlError),
|
||||
#[error(transparent)]
|
||||
JoinRelativeUrl(#[from] uv_pypi_types::JoinRelativeError),
|
||||
#[error("Expected a file URL, but received: {0}")]
|
||||
NonFileUrl(DisplaySafeUrl),
|
||||
#[error(transparent)]
|
||||
|
|
@ -108,6 +106,8 @@ pub enum Error {
|
|||
CacheHeal(String, HashAlgorithm),
|
||||
#[error("The source distribution requires Python {0}, but {1} is installed")]
|
||||
RequiresPython(VersionSpecifiers, Version),
|
||||
#[error("Failed to identify base Python interpreter")]
|
||||
BaseInterpreter(#[source] std::io::Error),
|
||||
|
||||
/// A generic request middleware error happened while making a request.
|
||||
/// Refer to the error message for more details.
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
use std::borrow::Cow;
|
||||
use uv_cache::{Cache, CacheBucket, CacheShard, WheelCache};
|
||||
use uv_cache_info::CacheInfo;
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_configuration::ConfigSettings;
|
||||
use uv_configuration::{ConfigSettings, PackageConfigSettings};
|
||||
use uv_distribution_types::{
|
||||
DirectUrlSourceDist, DirectorySourceDist, GitSourceDist, Hashed, PathSourceDist,
|
||||
};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_types::HashStrategy;
|
||||
|
||||
|
|
@ -18,7 +20,8 @@ pub struct BuiltWheelIndex<'a> {
|
|||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
hasher: &'a HashStrategy,
|
||||
build_configuration: &'a ConfigSettings,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
}
|
||||
|
||||
impl<'a> BuiltWheelIndex<'a> {
|
||||
|
|
@ -27,13 +30,15 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
cache: &'a Cache,
|
||||
tags: &'a Tags,
|
||||
hasher: &'a HashStrategy,
|
||||
build_configuration: &'a ConfigSettings,
|
||||
config_settings: &'a ConfigSettings,
|
||||
config_settings_package: &'a PackageConfigSettings,
|
||||
) -> Self {
|
||||
Self {
|
||||
cache,
|
||||
tags,
|
||||
hasher,
|
||||
build_configuration,
|
||||
config_settings,
|
||||
config_settings_package,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -63,10 +68,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
Ok(self.find(&cache_shard))
|
||||
|
|
@ -100,10 +106,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
Ok(self
|
||||
|
|
@ -119,7 +126,7 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
) -> Result<Option<CachedWheel>, Error> {
|
||||
let cache_shard = self.cache.shard(
|
||||
CacheBucket::SourceDistributions,
|
||||
if source_dist.editable {
|
||||
if source_dist.editable.unwrap_or(false) {
|
||||
WheelCache::Editable(&source_dist.url).root()
|
||||
} else {
|
||||
WheelCache::Path(&source_dist.url).root()
|
||||
|
|
@ -148,10 +155,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
Ok(self
|
||||
|
|
@ -174,10 +182,11 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
);
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let cache_shard = if self.build_configuration.is_empty() {
|
||||
let config_settings = self.config_settings_for(&source_dist.name);
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(self.build_configuration))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
self.find(&cache_shard)
|
||||
|
|
@ -239,4 +248,13 @@ impl<'a> BuiltWheelIndex<'a> {
|
|||
|
||||
candidate
|
||||
}
|
||||
|
||||
/// Determine the [`ConfigSettings`] for the given package name.
|
||||
fn config_settings_for(&self, name: &PackageName) -> Cow<'_, ConfigSettings> {
|
||||
if let Some(package_settings) = self.config_settings_package.get(name) {
|
||||
Cow::Owned(package_settings.clone().merge(self.config_settings.clone()))
|
||||
} else {
|
||||
Cow::Borrowed(self.config_settings)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ use uv_git_types::{GitReference, GitUrl, GitUrlParseError};
|
|||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_pep440::VersionSpecifiers;
|
||||
use uv_pep508::{MarkerTree, VerbatimUrl, VersionOrUrl, looks_like_git_repository};
|
||||
use uv_pypi_types::{ConflictItem, ParsedUrlError, VerbatimParsedUrl};
|
||||
use uv_pypi_types::{ConflictItem, ParsedGitUrl, ParsedUrlError, VerbatimParsedUrl};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_workspace::Workspace;
|
||||
use uv_workspace::pyproject::{PyProjectToml, Source, Sources};
|
||||
|
|
@ -306,19 +306,22 @@ impl LoweredRequirement {
|
|||
},
|
||||
url,
|
||||
}
|
||||
} else if member.pyproject_toml().is_package() {
|
||||
} else if member
|
||||
.pyproject_toml()
|
||||
.is_package(!workspace.is_required_member(&requirement.name))
|
||||
{
|
||||
RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: true,
|
||||
r#virtual: false,
|
||||
editable: Some(true),
|
||||
r#virtual: Some(false),
|
||||
}
|
||||
} else {
|
||||
RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: false,
|
||||
r#virtual: true,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(true),
|
||||
}
|
||||
};
|
||||
(source, marker)
|
||||
|
|
@ -700,17 +703,23 @@ fn path_source(
|
|||
};
|
||||
if is_dir {
|
||||
if let Some(git_member) = git_member {
|
||||
let git = git_member.git_source.git.clone();
|
||||
let subdirectory = uv_fs::relative_to(install_path, git_member.fetch_root)
|
||||
.expect("Workspace member must be relative");
|
||||
let subdirectory = uv_fs::normalize_path_buf(subdirectory);
|
||||
return Ok(RequirementSource::Git {
|
||||
git: git_member.git_source.git.clone(),
|
||||
subdirectory: if subdirectory == PathBuf::new() {
|
||||
let subdirectory = if subdirectory == PathBuf::new() {
|
||||
None
|
||||
} else {
|
||||
Some(subdirectory.into_boxed_path())
|
||||
},
|
||||
url,
|
||||
};
|
||||
let url = DisplaySafeUrl::from(ParsedGitUrl {
|
||||
url: git.clone(),
|
||||
subdirectory: subdirectory.clone(),
|
||||
});
|
||||
return Ok(RequirementSource::Git {
|
||||
git,
|
||||
subdirectory,
|
||||
url: VerbatimUrl::from_url(url),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -718,26 +727,31 @@ fn path_source(
|
|||
Ok(RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: true,
|
||||
r#virtual: false,
|
||||
editable,
|
||||
r#virtual: Some(false),
|
||||
})
|
||||
} else {
|
||||
// Determine whether the project is a package or virtual.
|
||||
// If the `package` option is unset, check if `tool.uv.package` is set
|
||||
// on the path source (otherwise, default to `true`).
|
||||
let is_package = package.unwrap_or_else(|| {
|
||||
let pyproject_path = install_path.join("pyproject.toml");
|
||||
fs_err::read_to_string(&pyproject_path)
|
||||
.ok()
|
||||
.and_then(|contents| PyProjectToml::from_string(contents).ok())
|
||||
.map(|pyproject_toml| pyproject_toml.is_package())
|
||||
// We don't require a build system for path dependencies
|
||||
.map(|pyproject_toml| pyproject_toml.is_package(false))
|
||||
.unwrap_or(true)
|
||||
});
|
||||
|
||||
// If the project is not a package, treat it as a virtual dependency.
|
||||
let r#virtual = !is_package;
|
||||
|
||||
Ok(RequirementSource::Directory {
|
||||
install_path: install_path.into_boxed_path(),
|
||||
url,
|
||||
editable: false,
|
||||
// If a project is not a package, treat it as a virtual dependency.
|
||||
r#virtual: !is_package,
|
||||
editable: Some(false),
|
||||
r#virtual: Some(r#virtual),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -618,14 +618,13 @@ mod test {
|
|||
tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
|
||||
"#};
|
||||
|
||||
assert_snapshot!(format_err(input).await, @r###"
|
||||
error: TOML parse error at line 8, column 16
|
||||
assert_snapshot!(format_err(input).await, @r#"
|
||||
error: TOML parse error at line 8, column 28
|
||||
|
|
||||
8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
"###);
|
||||
missing comma between key-value pairs, expected `,`
|
||||
"#);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
|||
|
|
@ -29,11 +29,11 @@ use uv_cache_key::cache_digest;
|
|||
use uv_client::{
|
||||
CacheControl, CachedClientError, Connectivity, DataWithCachePolicy, RegistryClient,
|
||||
};
|
||||
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||
use uv_distribution_filename::{SourceDistExtension, WheelFilename};
|
||||
use uv_distribution_types::{
|
||||
BuildableSource, DirectorySourceUrl, FileLocation, GitSourceUrl, HashPolicy, Hashed,
|
||||
PathSourceUrl, SourceDist, SourceUrl,
|
||||
BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, IndexUrl, PathSourceUrl,
|
||||
SourceDist, SourceUrl,
|
||||
};
|
||||
use uv_extract::hash::Hasher;
|
||||
use uv_fs::{rename_with_retry, write_atomic};
|
||||
|
|
@ -43,7 +43,7 @@ use uv_normalize::PackageName;
|
|||
use uv_pep440::{Version, release_specifiers_to_ranges};
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::{HashAlgorithm, HashDigest, HashDigests, PyProjectToml, ResolutionMetadata};
|
||||
use uv_types::{BuildContext, BuildStack, SourceBuildTrait};
|
||||
use uv_types::{BuildContext, BuildKey, BuildStack, SourceBuildTrait};
|
||||
use uv_workspace::pyproject::ToolUvSources;
|
||||
|
||||
use crate::distribution_database::ManagedClient;
|
||||
|
|
@ -122,12 +122,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.join(dist.version.to_string()),
|
||||
);
|
||||
|
||||
let url = match &dist.file.url {
|
||||
FileLocation::RelativeUrl(base, url) => {
|
||||
uv_pypi_types::base_url_join_relative(base, url)?
|
||||
}
|
||||
FileLocation::AbsoluteUrl(url) => url.to_url()?,
|
||||
};
|
||||
let url = dist.file.url.to_url()?;
|
||||
|
||||
// If the URL is a file URL, use the local path directly.
|
||||
if url.scheme() == "file" {
|
||||
|
|
@ -153,6 +148,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
&url,
|
||||
Some(&dist.index),
|
||||
&cache_shard,
|
||||
None,
|
||||
dist.ext,
|
||||
|
|
@ -173,6 +169,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
&dist.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
dist.subdirectory.as_deref(),
|
||||
dist.ext,
|
||||
|
|
@ -218,6 +215,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
resource.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
resource.subdirectory,
|
||||
resource.ext,
|
||||
|
|
@ -271,12 +269,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.join(dist.version.to_string()),
|
||||
);
|
||||
|
||||
let url = match &dist.file.url {
|
||||
FileLocation::RelativeUrl(base, url) => {
|
||||
uv_pypi_types::base_url_join_relative(base, url)?
|
||||
}
|
||||
FileLocation::AbsoluteUrl(url) => url.to_url()?,
|
||||
};
|
||||
let url = dist.file.url.to_url()?;
|
||||
|
||||
// If the URL is a file URL, use the local path directly.
|
||||
if url.scheme() == "file" {
|
||||
|
|
@ -298,7 +291,16 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.await;
|
||||
}
|
||||
|
||||
self.url_metadata(source, &url, &cache_shard, None, dist.ext, hashes, client)
|
||||
self.url_metadata(
|
||||
source,
|
||||
&url,
|
||||
Some(&dist.index),
|
||||
&cache_shard,
|
||||
None,
|
||||
dist.ext,
|
||||
hashes,
|
||||
client,
|
||||
)
|
||||
.boxed_local()
|
||||
.await?
|
||||
}
|
||||
|
|
@ -312,6 +314,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url_metadata(
|
||||
source,
|
||||
&dist.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
dist.subdirectory.as_deref(),
|
||||
dist.ext,
|
||||
|
|
@ -350,6 +353,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url_metadata(
|
||||
source,
|
||||
resource.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
resource.subdirectory,
|
||||
resource.ext,
|
||||
|
|
@ -383,11 +387,29 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
Ok(metadata)
|
||||
}
|
||||
|
||||
/// Determine the [`ConfigSettings`] for the given package name.
|
||||
fn config_settings_for(&self, name: Option<&PackageName>) -> Cow<'_, ConfigSettings> {
|
||||
if let Some(name) = name {
|
||||
if let Some(package_settings) = self.build_context.config_settings_package().get(name) {
|
||||
Cow::Owned(
|
||||
package_settings
|
||||
.clone()
|
||||
.merge(self.build_context.config_settings().clone()),
|
||||
)
|
||||
} else {
|
||||
Cow::Borrowed(self.build_context.config_settings())
|
||||
}
|
||||
} else {
|
||||
Cow::Borrowed(self.build_context.config_settings())
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a source distribution from a remote URL.
|
||||
async fn url<'data>(
|
||||
&self,
|
||||
source: &BuildableSource<'data>,
|
||||
url: &'data DisplaySafeUrl,
|
||||
index: Option<&'data IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
ext: SourceDistExtension,
|
||||
|
|
@ -399,7 +421,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
.url_revision(source, ext, url, cache_shard, hashes, client)
|
||||
.url_revision(source, ext, url, index, cache_shard, hashes, client)
|
||||
.await?;
|
||||
|
||||
// Before running the build, check that the hashes match.
|
||||
|
|
@ -417,11 +439,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let source_dist_entry = cache_shard.entry(SOURCE);
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -441,6 +463,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source,
|
||||
ext,
|
||||
url,
|
||||
index,
|
||||
&source_dist_entry,
|
||||
revision,
|
||||
hashes,
|
||||
|
|
@ -504,6 +527,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'data>,
|
||||
url: &'data Url,
|
||||
index: Option<&'data IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
ext: SourceDistExtension,
|
||||
|
|
@ -514,7 +538,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
.url_revision(source, ext, url, cache_shard, hashes, client)
|
||||
.url_revision(source, ext, url, index, cache_shard, hashes, client)
|
||||
.await?;
|
||||
|
||||
// Before running the build, check that the hashes match.
|
||||
|
|
@ -571,6 +595,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source,
|
||||
ext,
|
||||
url,
|
||||
index,
|
||||
&source_dist_entry,
|
||||
revision,
|
||||
hashes,
|
||||
|
|
@ -590,11 +615,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we either need to build the metadata.
|
||||
|
|
@ -682,18 +707,31 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
ext: SourceDistExtension,
|
||||
url: &Url,
|
||||
index: Option<&IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
hashes: HashPolicy<'_>,
|
||||
client: &ManagedClient<'_>,
|
||||
) -> Result<Revision, Error> {
|
||||
let cache_entry = cache_shard.entry(HTTP_REVISION);
|
||||
|
||||
// Determine the cache control policy for the request.
|
||||
let cache_control = match client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&cache_entry, source.name(), source.source_tree())
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -743,6 +781,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.skip_cache_with_retry(
|
||||
Self::request(DisplaySafeUrl::from(url.clone()), client)?,
|
||||
&cache_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
|
|
@ -789,11 +828,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let source_entry = cache_shard.entry(SOURCE);
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -951,11 +990,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we need to build a wheel.
|
||||
|
|
@ -1070,7 +1109,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::SourceDistributions,
|
||||
if resource.editable {
|
||||
if resource.editable.unwrap_or(false) {
|
||||
WheelCache::Editable(resource.url).root()
|
||||
} else {
|
||||
WheelCache::Path(resource.url).root()
|
||||
|
|
@ -1093,11 +1132,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let cache_shard = cache_shard.shard(revision.id());
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -1183,7 +1222,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
let cache_shard = self.build_context.cache().shard(
|
||||
CacheBucket::SourceDistributions,
|
||||
if resource.editable {
|
||||
if resource.editable.unwrap_or(false) {
|
||||
WheelCache::Editable(resource.url).root()
|
||||
} else {
|
||||
WheelCache::Path(resource.url).root()
|
||||
|
|
@ -1281,11 +1320,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we need to build a wheel.
|
||||
|
|
@ -1486,11 +1525,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
let _lock = cache_shard.lock().await.map_err(Error::CacheWrite)?;
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// If the cache contains a compatible wheel, return it.
|
||||
|
|
@ -1583,7 +1622,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
client
|
||||
.unmanaged
|
||||
.uncached_client(resource.git.repository())
|
||||
.clone(),
|
||||
.raw_client(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
|
|
@ -1789,11 +1828,11 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
|
||||
// If there are build settings, we need to scope to a cache shard.
|
||||
let config_settings = self.build_context.config_settings();
|
||||
let config_settings = self.config_settings_for(source.name());
|
||||
let cache_shard = if config_settings.is_empty() {
|
||||
cache_shard
|
||||
} else {
|
||||
cache_shard.shard(cache_digest(config_settings))
|
||||
cache_shard.shard(cache_digest(&config_settings))
|
||||
};
|
||||
|
||||
// Otherwise, we need to build a wheel.
|
||||
|
|
@ -1860,13 +1899,22 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
};
|
||||
|
||||
// If the URL is already precise, return it.
|
||||
if self.build_context.git().get_precise(git).is_some() {
|
||||
debug!("Precise commit already known: {source}");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If this is GitHub URL, attempt to resolve to a precise commit using the GitHub API.
|
||||
if self
|
||||
.build_context
|
||||
.git()
|
||||
.github_fast_path(
|
||||
git,
|
||||
client.unmanaged.uncached_client(git.repository()).clone(),
|
||||
client
|
||||
.unmanaged
|
||||
.uncached_client(git.repository())
|
||||
.raw_client(),
|
||||
)
|
||||
.await?
|
||||
.is_some()
|
||||
|
|
@ -2040,6 +2088,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
ext: SourceDistExtension,
|
||||
url: &Url,
|
||||
index: Option<&IndexUrl>,
|
||||
entry: &CacheEntry,
|
||||
revision: Revision,
|
||||
hashes: HashPolicy<'_>,
|
||||
|
|
@ -2047,6 +2096,28 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
) -> Result<Revision, Error> {
|
||||
warn!("Re-downloading missing source distribution: {source}");
|
||||
let cache_entry = entry.shard().entry(HTTP_REVISION);
|
||||
|
||||
// Determine the cache control policy for the request.
|
||||
let cache_control = match client.unmanaged.connectivity() {
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&cache_entry, source.name(), source.source_tree())
|
||||
.map_err(Error::CacheRead)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
let download = |response| {
|
||||
async {
|
||||
// Take the union of the requested and existing hash algorithms.
|
||||
|
|
@ -2080,6 +2151,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.skip_cache_with_retry(
|
||||
Self::request(DisplaySafeUrl::from(url.clone()), client)?,
|
||||
&cache_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
|
|
@ -2267,6 +2339,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
fs::create_dir_all(&cache_shard)
|
||||
.await
|
||||
.map_err(Error::CacheWrite)?;
|
||||
|
||||
// Try a direct build if that isn't disabled and the uv build backend is used.
|
||||
let disk_filename = if let Some(name) = self
|
||||
.build_context
|
||||
|
|
@ -2287,7 +2360,47 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
// In the uv build backend, the normalized filename and the disk filename are the same.
|
||||
name.to_string()
|
||||
} else {
|
||||
// Identify the base Python interpreter to use in the cache key.
|
||||
let base_python = if cfg!(unix) {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.find_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
} else {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.to_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
};
|
||||
|
||||
let build_kind = if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
};
|
||||
|
||||
let build_key = BuildKey {
|
||||
base_python: base_python.into_boxed_path(),
|
||||
source_root: source_root.to_path_buf().into_boxed_path(),
|
||||
subdirectory: subdirectory
|
||||
.map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()),
|
||||
source_strategy,
|
||||
build_kind,
|
||||
};
|
||||
|
||||
if let Some(builder) = self.build_context.build_arena().remove(&build_key) {
|
||||
debug!("Creating build environment for: {source}");
|
||||
let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?;
|
||||
|
||||
// Store the build context.
|
||||
self.build_context.build_arena().insert(build_key, builder);
|
||||
|
||||
wheel
|
||||
} else {
|
||||
debug!("Reusing existing build environment for: {source}");
|
||||
|
||||
let builder = self
|
||||
.build_context
|
||||
.setup_build(
|
||||
source_root,
|
||||
subdirectory,
|
||||
|
|
@ -2304,10 +2417,16 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.build_stack.cloned().unwrap_or_default(),
|
||||
)
|
||||
.await
|
||||
.map_err(|err| Error::Build(err.into()))?
|
||||
.wheel(temp_dir.path())
|
||||
.await
|
||||
.map_err(Error::Build)?
|
||||
.map_err(|err| Error::Build(err.into()))?;
|
||||
|
||||
// Build the wheel.
|
||||
let wheel = builder.wheel(temp_dir.path()).await.map_err(Error::Build)?;
|
||||
|
||||
// Store the build context.
|
||||
self.build_context.build_arena().insert(build_key, builder);
|
||||
|
||||
wheel
|
||||
}
|
||||
};
|
||||
|
||||
// Read the metadata from the wheel.
|
||||
|
|
@ -2362,6 +2481,26 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
}
|
||||
}
|
||||
|
||||
// Identify the base Python interpreter to use in the cache key.
|
||||
let base_python = if cfg!(unix) {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.find_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
} else {
|
||||
self.build_context
|
||||
.interpreter()
|
||||
.to_base_python()
|
||||
.map_err(Error::BaseInterpreter)?
|
||||
};
|
||||
|
||||
// Determine whether this is an editable or non-editable build.
|
||||
let build_kind = if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
};
|
||||
|
||||
// Set up the builder.
|
||||
let mut builder = self
|
||||
.build_context
|
||||
|
|
@ -2372,11 +2511,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
Some(&source.to_string()),
|
||||
source.as_dist(),
|
||||
source_strategy,
|
||||
if source.is_editable() {
|
||||
BuildKind::Editable
|
||||
} else {
|
||||
BuildKind::Wheel
|
||||
},
|
||||
build_kind,
|
||||
BuildOutput::Debug,
|
||||
self.build_stack.cloned().unwrap_or_default(),
|
||||
)
|
||||
|
|
@ -2385,6 +2520,21 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Build the metadata.
|
||||
let dist_info = builder.metadata().await.map_err(Error::Build)?;
|
||||
|
||||
// Store the build context.
|
||||
self.build_context.build_arena().insert(
|
||||
BuildKey {
|
||||
base_python: base_python.into_boxed_path(),
|
||||
source_root: source_root.to_path_buf().into_boxed_path(),
|
||||
subdirectory: subdirectory
|
||||
.map(|subdirectory| subdirectory.to_path_buf().into_boxed_path()),
|
||||
source_strategy,
|
||||
build_kind,
|
||||
},
|
||||
builder,
|
||||
);
|
||||
|
||||
// Return the `.dist-info` directory, if it exists.
|
||||
let Some(dist_info) = dist_info else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ use std::{ffi::OsString, path::PathBuf};
|
|||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
#[error("Failed to read from zip file")]
|
||||
Zip(#[from] zip::result::ZipError),
|
||||
#[error(transparent)]
|
||||
#[error("Failed to read from zip file")]
|
||||
AsyncZip(#[from] async_zip::error::ZipError),
|
||||
#[error(transparent)]
|
||||
#[error("I/O operation failed during extraction")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error(
|
||||
"The top-level of the archive must only contain a list directory, but it contains: {0:?}"
|
||||
|
|
|
|||
|
|
@ -236,6 +236,7 @@ pub async fn untar_gz<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
Ok(untar_in(archive, target.as_ref()).await?)
|
||||
}
|
||||
|
|
@ -255,6 +256,7 @@ pub async fn untar_bz2<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
Ok(untar_in(archive, target.as_ref()).await?)
|
||||
}
|
||||
|
|
@ -274,6 +276,7 @@ pub async fn untar_zst<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
Ok(untar_in(archive, target.as_ref()).await?)
|
||||
}
|
||||
|
|
@ -293,6 +296,7 @@ pub async fn untar_xz<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
untar_in(archive, target.as_ref()).await?;
|
||||
Ok(())
|
||||
|
|
@ -311,6 +315,7 @@ pub async fn untar<R: tokio::io::AsyncRead + Unpin>(
|
|||
tokio_tar::ArchiveBuilder::new(&mut reader as &mut (dyn tokio::io::AsyncRead + Unpin))
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
untar_in(archive, target.as_ref()).await?;
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
|
||||
dunce = { workspace = true }
|
||||
either = { workspace = true }
|
||||
encoding_rs_io = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -84,6 +84,8 @@ pub async fn read_to_string_transcode(path: impl AsRef<Path>) -> std::io::Result
|
|||
/// junction at the same path.
|
||||
///
|
||||
/// Note that because junctions are used, the source must be a directory.
|
||||
///
|
||||
/// Changes to this function should be reflected in [`create_symlink`].
|
||||
#[cfg(windows)]
|
||||
pub fn replace_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
// If the source is a file, we can't create a junction
|
||||
|
|
@ -138,6 +140,38 @@ pub fn replace_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io:
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a symlink at `dst` pointing to `src`.
|
||||
///
|
||||
/// On Windows, this uses the `junction` crate to create a junction point.
|
||||
///
|
||||
/// Note that because junctions are used, the source must be a directory.
|
||||
///
|
||||
/// Changes to this function should be reflected in [`replace_symlink`].
|
||||
#[cfg(windows)]
|
||||
pub fn create_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
// If the source is a file, we can't create a junction
|
||||
if src.as_ref().is_file() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Cannot create a junction for {}: is not a directory",
|
||||
src.as_ref().display()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
junction::create(
|
||||
dunce::simplified(src.as_ref()),
|
||||
dunce::simplified(dst.as_ref()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a symlink at `dst` pointing to `src`.
|
||||
#[cfg(unix)]
|
||||
pub fn create_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
fs_err::os::unix::fs::symlink(src.as_ref(), dst.as_ref())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn remove_symlink(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
fs_err::remove_file(path.as_ref())
|
||||
|
|
@ -575,8 +609,33 @@ pub fn is_temporary(path: impl AsRef<Path>) -> bool {
|
|||
.is_some_and(|name| name.starts_with(".tmp"))
|
||||
}
|
||||
|
||||
/// Checks if the grandparent directory of the given executable is the base
|
||||
/// of a virtual environment.
|
||||
///
|
||||
/// The procedure described in PEP 405 includes checking both the parent and
|
||||
/// grandparent directory of an executable, but in practice we've found this to
|
||||
/// be unnecessary.
|
||||
pub fn is_virtualenv_executable(executable: impl AsRef<Path>) -> bool {
|
||||
executable
|
||||
.as_ref()
|
||||
.parent()
|
||||
.and_then(Path::parent)
|
||||
.is_some_and(is_virtualenv_base)
|
||||
}
|
||||
|
||||
/// Returns `true` if a path is the base path of a virtual environment,
|
||||
/// indicated by the presence of a `pyvenv.cfg` file.
|
||||
///
|
||||
/// The procedure described in PEP 405 includes scanning `pyvenv.cfg`
|
||||
/// for a `home` key, but in practice we've found this to be
|
||||
/// unnecessary.
|
||||
pub fn is_virtualenv_base(path: impl AsRef<Path>) -> bool {
|
||||
path.as_ref().join("pyvenv.cfg").is_file()
|
||||
}
|
||||
|
||||
/// A file lock that is automatically released when dropped.
|
||||
#[derive(Debug)]
|
||||
#[must_use]
|
||||
pub struct LockedFile(fs_err::File);
|
||||
|
||||
impl LockedFile {
|
||||
|
|
|
|||
|
|
@ -277,21 +277,6 @@ fn normalized(path: &Path) -> PathBuf {
|
|||
normalized
|
||||
}
|
||||
|
||||
/// Like `fs_err::canonicalize`, but avoids attempting to resolve symlinks on Windows.
|
||||
pub fn canonicalize_executable(path: impl AsRef<Path>) -> std::io::Result<PathBuf> {
|
||||
let path = path.as_ref();
|
||||
debug_assert!(
|
||||
path.is_absolute(),
|
||||
"path must be absolute: {}",
|
||||
path.display()
|
||||
);
|
||||
if cfg!(windows) {
|
||||
Ok(path.to_path_buf())
|
||||
} else {
|
||||
fs_err::canonicalize(path)
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute a path describing `path` relative to `base`.
|
||||
///
|
||||
/// `lib/python/site-packages/foo/__init__.py` and `lib/python/site-packages` -> `foo/__init__.py`
|
||||
|
|
@ -345,11 +330,11 @@ pub struct PortablePathBuf(Box<Path>);
|
|||
|
||||
#[cfg(feature = "schemars")]
|
||||
impl schemars::JsonSchema for PortablePathBuf {
|
||||
fn schema_name() -> String {
|
||||
PathBuf::schema_name()
|
||||
fn schema_name() -> Cow<'static, str> {
|
||||
Cow::Borrowed("PortablePathBuf")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
|
||||
fn json_schema(_gen: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {
|
||||
PathBuf::json_schema(_gen)
|
||||
}
|
||||
}
|
||||
|
|
@ -413,6 +398,12 @@ impl From<Box<Path>> for PortablePathBuf {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Path> for PortablePathBuf {
|
||||
fn from(path: &'a Path) -> Self {
|
||||
Box::<Path>::from(path).into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
impl serde::Serialize for PortablePathBuf {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ fn get_binary_type(path: &Path) -> windows::core::Result<u32> {
|
|||
.chain(Some(0))
|
||||
.collect::<Vec<u16>>();
|
||||
// SAFETY: winapi call
|
||||
unsafe { GetBinaryTypeW(PCWSTR(name.as_ptr()), &mut binary_type)? };
|
||||
unsafe { GetBinaryTypeW(PCWSTR(name.as_ptr()), &raw mut binary_type)? };
|
||||
Ok(binary_type)
|
||||
}
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue