mirror of https://github.com/astral-sh/uv
Merge branch 'main' into zb/extra-build-dependencies
This commit is contained in:
commit
1d2d9aa193
|
|
@ -718,7 +718,7 @@ jobs:
|
|||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist --features self-update
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
@ -767,7 +767,7 @@ jobs:
|
|||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel uv-build"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ jobs:
|
|||
run: rustup component add rustfmt
|
||||
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
|
||||
- name: "rustfmt"
|
||||
run: cargo fmt --all --check
|
||||
|
|
@ -188,7 +188,7 @@ jobs:
|
|||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: "Install cargo shear"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-shear
|
||||
- run: cargo shear
|
||||
|
|
@ -213,12 +213,12 @@ jobs:
|
|||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
|
|
@ -249,12 +249,12 @@ jobs:
|
|||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
|
|
@ -286,7 +286,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
|
||||
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
|
|
@ -299,7 +299,7 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install cargo nextest"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
|
||||
|
|
@ -352,7 +352,7 @@ jobs:
|
|||
rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc
|
||||
|
||||
- name: "Install cargo-bloat"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-bloat
|
||||
|
||||
|
|
@ -439,7 +439,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
|
@ -1594,7 +1594,7 @@ jobs:
|
|||
run: chmod +x ./uv
|
||||
|
||||
- name: "Configure AWS credentials"
|
||||
uses: aws-actions/configure-aws-credentials@f503a1870408dcf2c35d5c2b8a68e69211042c7d
|
||||
uses: aws-actions/configure-aws-credentials@a159d7bb5354cf786f855f2f5d1d8d768d9a08d1
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
|
@ -1613,12 +1613,12 @@ jobs:
|
|||
|
||||
- name: "Authenticate with GCP"
|
||||
id: "auth"
|
||||
uses: "google-github-actions/auth@0920706a19e9d22c3d0da43d1db5939c6ad837a8"
|
||||
uses: "google-github-actions/auth@140bb5113ffb6b65a7e9b937a81fa96cf5064462"
|
||||
with:
|
||||
credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}"
|
||||
|
||||
- name: "Set up GCP SDK"
|
||||
uses: "google-github-actions/setup-gcloud@a8b58010a5b2a061afd605f50e88629c9ec7536b"
|
||||
uses: "google-github-actions/setup-gcloud@6a7c903a70c8625ed6700fa299f5ddb4ca6022e9"
|
||||
|
||||
- name: "Get GCP Artifact Registry token"
|
||||
id: get_token
|
||||
|
|
@ -2516,7 +2516,7 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
|
@ -2553,7 +2553,7 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: "Install codspeed"
|
||||
uses: taiki-e/install-action@7b20dfd705618832f20d29066e34aa2f2f6194c2 # v2.52.8
|
||||
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
|
||||
with:
|
||||
tool: cargo-codspeed
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv-*
|
||||
|
|
@ -43,7 +43,7 @@ jobs:
|
|||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv_build-*
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.3
|
||||
rev: v0.12.4
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
3.8.20
|
||||
# The following are required for packse scenarios
|
||||
3.9.20
|
||||
3.9.18
|
||||
3.9.12
|
||||
# The following is needed for `==3.13` request tests
|
||||
3.13.0
|
||||
|
|
|
|||
921
CHANGELOG.md
921
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
|
|
@ -500,22 +500,20 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bzip2"
|
||||
version = "0.5.0"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bafdbf26611df8c14810e268ddceda071c297570a5fb360ceddf617fe417ef58"
|
||||
checksum = "49ecfb22d906f800d4fe833b6282cf4dc1c298f5057ca0b5445e5c209735ca47"
|
||||
dependencies = [
|
||||
"bzip2-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bzip2-sys"
|
||||
version = "0.1.11+1.0.8"
|
||||
version = "0.1.13+1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc"
|
||||
checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
|
|
@ -690,9 +688,9 @@ checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
|
|||
|
||||
[[package]]
|
||||
name = "codspeed"
|
||||
version = "3.0.3"
|
||||
version = "3.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7524e02ff6173bc143d9abc01b518711b77addb60de871bbe5686843f88fb48"
|
||||
checksum = "d29180405ab3b37bb020246ea66bf8ae233708766fd59581ae929feaef10ce91"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
|
@ -708,9 +706,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat"
|
||||
version = "3.0.3"
|
||||
version = "3.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f71662331c4f854131a42b95055f3f8cbca53640348985f699635b1f96d8c26"
|
||||
checksum = "2454d874ca820ffd71273565530ad318f413195bbc99dce6c958ca07db362c63"
|
||||
dependencies = [
|
||||
"codspeed",
|
||||
"codspeed-criterion-compat-walltime",
|
||||
|
|
@ -719,9 +717,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "codspeed-criterion-compat-walltime"
|
||||
version = "3.0.3"
|
||||
version = "3.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3c9bd9e895e0aa263d139a8b5f58a4ea4abb86d5982ec7f58d3c7b8465c1e01"
|
||||
checksum = "093a9383cdd1a5a0bd1a47cdafb49ae0c6dcd0793c8fb8f79768bab423128c9c"
|
||||
dependencies = [
|
||||
"anes",
|
||||
"cast",
|
||||
|
|
@ -761,7 +759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1150,7 +1148,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1252,9 +1250,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
|||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.4"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
|
||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||
|
||||
[[package]]
|
||||
name = "fontconfig-parser"
|
||||
|
|
@ -1593,11 +1591,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
|
|||
|
||||
[[package]]
|
||||
name = "home"
|
||||
version = "0.5.11"
|
||||
version = "0.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf"
|
||||
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1989,7 +1987,7 @@ checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37"
|
|||
dependencies = [
|
||||
"hermit-abi 0.4.0",
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2049,7 +2047,7 @@ dependencies = [
|
|||
"portable-atomic",
|
||||
"portable-atomic-util",
|
||||
"serde",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2902,7 +2900,7 @@ dependencies = [
|
|||
"once_cell",
|
||||
"socket2",
|
||||
"tracing",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3033,7 +3031,7 @@ checksum = "78c81d000a2c524133cc00d2f92f019d399e57906c3b7119271a2495354fe895"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"windows 0.61.1",
|
||||
]
|
||||
|
||||
|
|
@ -3334,20 +3332,20 @@ dependencies = [
|
|||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "1.0.7"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266"
|
||||
checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
|
||||
dependencies = [
|
||||
"bitflags 2.9.1",
|
||||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys 0.9.2",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3584,9 +3582,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
version = "1.0.141"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
|
||||
checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
|
|
@ -3596,9 +3594,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.9"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
|
||||
checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
|
@ -3929,8 +3927,8 @@ dependencies = [
|
|||
"fastrand",
|
||||
"getrandom 0.3.1",
|
||||
"once_cell",
|
||||
"rustix 1.0.7",
|
||||
"windows-sys 0.52.0",
|
||||
"rustix 1.0.8",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -4218,44 +4216,58 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.8.23"
|
||||
version = "0.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
|
||||
checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac"
|
||||
dependencies = [
|
||||
"foldhash",
|
||||
"indexmap",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_edit",
|
||||
"toml_parser",
|
||||
"toml_writer",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_datetime"
|
||||
version = "0.6.11"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
|
||||
checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_edit"
|
||||
version = "0.22.27"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
|
||||
checksum = "d1dee9dc43ac2aaf7d3b774e2fba5148212bf2bd9374f4e50152ebe9afd03d42"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"serde",
|
||||
"serde_spanned",
|
||||
"toml_datetime",
|
||||
"toml_write",
|
||||
"toml_parser",
|
||||
"toml_writer",
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_write"
|
||||
version = "0.1.2"
|
||||
name = "toml_parser"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
|
||||
checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30"
|
||||
dependencies = [
|
||||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_writer"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64"
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
|
|
@ -4633,7 +4645,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anyhow",
|
||||
|
|
@ -4799,7 +4811,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv-build"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"uv-build-backend",
|
||||
|
|
@ -5289,7 +5301,7 @@ dependencies = [
|
|||
"junction",
|
||||
"path-slash",
|
||||
"percent-encoding",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"same-file",
|
||||
"schemars",
|
||||
"serde",
|
||||
|
|
@ -5994,7 +6006,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "uv-version"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
|
||||
[[package]]
|
||||
name = "uv-virtualenv"
|
||||
|
|
@ -6221,9 +6233,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtimer"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0048ad49a55b9deb3953841fa1fc5858f0efbcb7a18868c899a360269fac1b23"
|
||||
checksum = "d8d49b5d6c64e8558d9b1b065014426f35c18de636895d24893dbbd329743446"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"js-sys",
|
||||
|
|
@ -6285,7 +6297,7 @@ checksum = "d3fabb953106c3c8eea8306e4393700d7657561cb43122571b172bbfb7c7ba1d"
|
|||
dependencies = [
|
||||
"env_home",
|
||||
"regex",
|
||||
"rustix 1.0.7",
|
||||
"rustix 1.0.8",
|
||||
"winsafe",
|
||||
]
|
||||
|
||||
|
|
@ -6328,7 +6340,7 @@ version = "0.1.9"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -6959,7 +6971,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "84e9a772a54b54236b9b744aaaf8d7be01b4d6e99725523cb82cb32d1c81b1d7"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"bzip2 0.5.0",
|
||||
"bzip2 0.5.2",
|
||||
"crc32fast",
|
||||
"crossbeam-utils",
|
||||
"displaydoc",
|
||||
|
|
|
|||
|
|
@ -76,7 +76,7 @@ anstream = { version = "0.6.15" }
|
|||
anyhow = { version = "1.0.89" }
|
||||
arcstr = { version = "1.2.0" }
|
||||
arrayvec = { version = "0.7.6" }
|
||||
astral-tokio-tar = { version = "0.5.1" }
|
||||
astral-tokio-tar = { version = "0.5.2" }
|
||||
async-channel = { version = "2.3.1" }
|
||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||
async-trait = { version = "0.1.82" }
|
||||
|
|
@ -172,8 +172,8 @@ tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101
|
|||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] }
|
||||
tokio-stream = { version = "0.1.16" }
|
||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||
toml = { version = "0.8.19" }
|
||||
toml_edit = { version = "0.22.21", features = ["serde"] }
|
||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||
toml_edit = { version = "0.23.2", features = ["serde"] }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-durations-export = { version = "0.3.0", features = ["plot"] }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry"] }
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
# Changelog 0.6.x
|
||||
|
||||
## 0.6.0
|
||||
|
||||
There have been 31 releases and 1135 pull requests since
|
||||
|
|
|
|||
|
|
@ -0,0 +1,995 @@
|
|||
# Changelog 0.7.x
|
||||
|
||||
## 0.7.0
|
||||
|
||||
This release contains various changes that improve correctness and user experience, but could break
|
||||
some workflows; many changes have been marked as breaking out of an abundance of caution. We expect
|
||||
most users to be able to upgrade without making changes.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Update `uv version` to display and update project versions
|
||||
([#12349](https://github.com/astral-sh/uv/pull/12349))**
|
||||
|
||||
Previously, `uv version` displayed uv's version. Now, `uv version` will display or update the
|
||||
project's version. This interface was
|
||||
[heavily requested](https://github.com/astral-sh/uv/issues/6298) and, after much consideration, we
|
||||
decided that transitioning the top-level command was the best option.
|
||||
|
||||
Here's a brief example:
|
||||
|
||||
```console
|
||||
$ uv init example
|
||||
Initialized project `example` at `./example`
|
||||
$ cd example
|
||||
$ uv version
|
||||
example 0.1.0
|
||||
$ uv version --bump major
|
||||
example 0.1.0 => 1.0.0
|
||||
$ uv version --short
|
||||
1.0.0
|
||||
```
|
||||
|
||||
If used outside of a project, uv will fallback to showing its own version still:
|
||||
|
||||
```console
|
||||
$ uv version
|
||||
warning: failed to read project: No `pyproject.toml` found in current directory or any parent directory
|
||||
running `uv self version` for compatibility with old `uv version` command.
|
||||
this fallback will be removed soon, pass `--preview` to make this an error.
|
||||
|
||||
uv 0.7.0 (4433f41c9 2025-04-29)
|
||||
```
|
||||
|
||||
As described in the warning, `--preview` can be used to error instead:
|
||||
|
||||
```console
|
||||
$ uv version --preview
|
||||
error: No `pyproject.toml` found in current directory or any parent directory
|
||||
```
|
||||
|
||||
The previous functionality of `uv version` was moved to `uv self version`.
|
||||
|
||||
- **Avoid fallback to subsequent indexes on authentication failure
|
||||
([#12805](https://github.com/astral-sh/uv/pull/12805))**
|
||||
|
||||
When using the `first-index` strategy (the default), uv will stop searching indexes for a package
|
||||
once it is found on a single index. Previously, uv considered a package as "missing" from an index
|
||||
during authentication failures, such as an HTTP 401 or HTTP 403 (normally, missing packages are
|
||||
represented by an HTTP 404). This behavior was motivated by unusual responses from some package
|
||||
indexes, but reduces the safety of uv's index strategy when authentication fails. Now, uv will
|
||||
consider an authentication failure as a stop-point when searching for a package across indexes.
|
||||
The `index.ignore-error-codes` option can be used to recover the existing behavior, e.g.:
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
ignore-error-codes = [401, 403]
|
||||
```
|
||||
|
||||
Since PyTorch's indexes always return a HTTP 403 for missing packages, uv special-cases indexes on
|
||||
the `pytorch.org` domain to ignore that error code by default.
|
||||
|
||||
- **Require the command in `uvx <name>` to be available in the Python environment
|
||||
([#11603](https://github.com/astral-sh/uv/pull/11603))**
|
||||
|
||||
Previously, `uvx` would attempt to execute a command even if it was not provided by a Python
|
||||
package. For example, if we presume `foo` is an empty Python package which provides no command,
|
||||
`uvx foo` would invoke the `foo` command on the `PATH` (if present). Now, uv will error early if
|
||||
the `foo` executable is not provided by the requested Python package. This check is not enforced
|
||||
when `--from` is used, so patterns like `uvx --from foo bash -c "..."` are still valid. uv also
|
||||
still allows `uvx foo` where the `foo` executable is provided by a dependency of `foo` instead of
|
||||
`foo` itself, as this is fairly common for packages which depend on a dedicated package for their
|
||||
command-line interface.
|
||||
|
||||
- **Use index URL instead of package URL for keyring credential lookups
|
||||
([#12651](https://github.com/astral-sh/uv/pull/12651))**
|
||||
|
||||
When determining credentials for querying a package URL, uv previously sent the full URL to the
|
||||
`keyring` command. However, some keyring plugins expect to receive the _index URL_ (which is
|
||||
usually a parent of the package URL). Now, uv requests credentials for the index URL instead. This
|
||||
behavior matches `pip`.
|
||||
|
||||
- **Remove `--version` from subcommands ([#13108](https://github.com/astral-sh/uv/pull/13108))**
|
||||
|
||||
Previously, uv allowed the `--version` flag on arbitrary subcommands, e.g., `uv run --version`.
|
||||
However, the `--version` flag is useful for other operations since uv is a package manager.
|
||||
Consequently, we've removed the `--version` flag from subcommands — it is only available as
|
||||
`uv --version`.
|
||||
|
||||
- **Omit Python 3.7 downloads from managed versions
|
||||
([#13022](https://github.com/astral-sh/uv/pull/13022))**
|
||||
|
||||
Python 3.7 is EOL and not formally supported by uv; however, Python 3.7 was previously available
|
||||
for download on a subset of platforms.
|
||||
|
||||
- **Reject non-PEP 751 TOML files in install, compile, and export commands
|
||||
([#13120](https://github.com/astral-sh/uv/pull/13120),
|
||||
[#13119](https://github.com/astral-sh/uv/pull/13119))**
|
||||
|
||||
Previously, uv treated arbitrary `.toml` files passed to commands (e.g.,
|
||||
`uv pip install -r foo.toml` or `uv pip compile -o foo.toml`) as `requirements.txt`-formatted
|
||||
files. Now, uv will error instead. If using PEP 751 lockfiles, use the standardized format for
|
||||
custom names instead, e.g., `pylock.foo.toml`.
|
||||
|
||||
- **Ignore arbitrary Python requests in version files
|
||||
([#12909](https://github.com/astral-sh/uv/pull/12909))**
|
||||
|
||||
uv allows arbitrary strings to be used for Python version requests, in which they are treated as
|
||||
an executable name to search for in the `PATH`. However, using this form of request in
|
||||
`.python-version` files is non-standard and conflicts with `pyenv-virtualenv` which writes
|
||||
environment names to `.python-version` files. In this release, uv will now ignore requests that
|
||||
are arbitrary strings when found in `.python-version` files.
|
||||
|
||||
- **Error on unknown dependency object specifiers
|
||||
([12811](https://github.com/astral-sh/uv/pull/12811))**
|
||||
|
||||
The `[dependency-groups]` entries can include "object specifiers", e.g. `set-phasers-to = ...` in:
|
||||
|
||||
```toml
|
||||
[dependency-groups]
|
||||
foo = ["pyparsing"]
|
||||
bar = [{set-phasers-to = "stun"}]
|
||||
```
|
||||
|
||||
However, the only current spec-compliant object specifier is `include-group`. Previously, uv would
|
||||
ignore unknown object specifiers. Now, uv will error.
|
||||
|
||||
- **Make `--frozen` and `--no-sources` conflicting options
|
||||
([#12671](https://github.com/astral-sh/uv/pull/12671))**
|
||||
|
||||
Using `--no-sources` always requires a new resolution and `--frozen` will always fail when used
|
||||
with it. Now, this conflict is encoded in the CLI options for clarity.
|
||||
|
||||
- **Treat empty `UV_PYTHON_INSTALL_DIR` and `UV_TOOL_DIR` as unset
|
||||
([#12907](https://github.com/astral-sh/uv/pull/12907),
|
||||
[#12905](https://github.com/astral-sh/uv/pull/12905))**
|
||||
|
||||
Previously, these variables were treated as set to the current working directory when set to an
|
||||
empty string. Now, uv will ignore these variables when empty. This matches uv's behavior for other
|
||||
environment variables which configure directories.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Disallow mixing requirements across PyTorch indexes
|
||||
([#13179](https://github.com/astral-sh/uv/pull/13179))
|
||||
- Add optional managed Python archive download cache
|
||||
([#12175](https://github.com/astral-sh/uv/pull/12175))
|
||||
- Add `poetry-core` as a `uv init` build backend option
|
||||
([#12781](https://github.com/astral-sh/uv/pull/12781))
|
||||
- Show tag hints when failing to find a compatible wheel in `pylock.toml`
|
||||
([#13136](https://github.com/astral-sh/uv/pull/13136))
|
||||
- Report Python versions in `pyvenv.cfg` version mismatch
|
||||
([#13027](https://github.com/astral-sh/uv/pull/13027))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on omitted wheel-only packages in `pylock.toml`
|
||||
([#13132](https://github.com/astral-sh/uv/pull/13132))
|
||||
- Fix display name for `uvx --version` ([#13109](https://github.com/astral-sh/uv/pull/13109))
|
||||
- Restore handling of authentication when encountering redirects
|
||||
([#13050](https://github.com/astral-sh/uv/pull/13050))
|
||||
- Respect build options (`--no-binary` et al) in `pylock.toml`
|
||||
([#13134](https://github.com/astral-sh/uv/pull/13134))
|
||||
- Use `upload-time` rather than `upload_time` in `uv.lock`
|
||||
([#13176](https://github.com/astral-sh/uv/pull/13176))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Changed `fish` completions append `>>` to overwrite `>`
|
||||
([#13130](https://github.com/astral-sh/uv/pull/13130))
|
||||
- Add `pylock.toml` mentions where relevant ([#13115](https://github.com/astral-sh/uv/pull/13115))
|
||||
- Add ROCm example to the PyTorch guide ([#13200](https://github.com/astral-sh/uv/pull/13200))
|
||||
- Upgrade PyTorch guide to CUDA 12.8 and PyTorch 2.7
|
||||
([#13199](https://github.com/astral-sh/uv/pull/13199))
|
||||
|
||||
## 0.7.1
|
||||
|
||||
### Enhancement
|
||||
|
||||
- Add support for BLAKE2b-256 ([#13204](https://github.com/astral-sh/uv/pull/13204))
|
||||
|
||||
### Bugfix
|
||||
|
||||
- Revert fix handling of authentication when encountering redirects
|
||||
([#13215](https://github.com/astral-sh/uv/pull/13215))
|
||||
|
||||
## 0.7.2
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve trace log for retryable errors ([#13228](https://github.com/astral-sh/uv/pull/13228))
|
||||
- Use "error" instead of "warning" for self-update message
|
||||
([#13229](https://github.com/astral-sh/uv/pull/13229))
|
||||
- Error when `uv version` is used with project-specific flags but no project is found
|
||||
([#13203](https://github.com/astral-sh/uv/pull/13203))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix incorrect virtual environment invalidation for pre-release Python versions
|
||||
([#13234](https://github.com/astral-sh/uv/pull/13234))
|
||||
- Fix patching of `clang` in managed Python sysconfig
|
||||
([#13237](https://github.com/astral-sh/uv/pull/13237))
|
||||
- Respect `--project` in `uv version` ([#13230](https://github.com/astral-sh/uv/pull/13230))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--dry-run` support to `uv self update` ([#9829](https://github.com/astral-sh/uv/pull/9829))
|
||||
- Add `--show-with` to `uv tool list` to list packages included by `--with`
|
||||
([#13264](https://github.com/astral-sh/uv/pull/13264))
|
||||
- De-duplicate fetched index URLs ([#13205](https://github.com/astral-sh/uv/pull/13205))
|
||||
- Support more zip compression formats: bzip2, lzma, xz, zstd
|
||||
([#13285](https://github.com/astral-sh/uv/pull/13285))
|
||||
- Add support for downloading GraalPy ([#13172](https://github.com/astral-sh/uv/pull/13172))
|
||||
- Improve error message when a virtual environment Python symlink is broken
|
||||
([#12168](https://github.com/astral-sh/uv/pull/12168))
|
||||
- Use `fs_err` for paths in symlinking errors ([#13303](https://github.com/astral-sh/uv/pull/13303))
|
||||
- Minify and embed managed Python JSON at compile time
|
||||
([#12967](https://github.com/astral-sh/uv/pull/12967))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Make preview default and add configuration docs
|
||||
([#12804](https://github.com/astral-sh/uv/pull/12804))
|
||||
- Build backend: Allow escaping in globs ([#13313](https://github.com/astral-sh/uv/pull/13313))
|
||||
- Build backend: Make builds reproducible across operating systems
|
||||
([#13171](https://github.com/astral-sh/uv/pull/13171))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `python-downloads-json-url` option for `uv.toml` to configure custom Python installations via
|
||||
JSON URL ([#12974](https://github.com/astral-sh/uv/pull/12974))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check nested IO errors for retries ([#13260](https://github.com/astral-sh/uv/pull/13260))
|
||||
- Accept `musllinux_1_0` as a valid platform tag
|
||||
([#13289](https://github.com/astral-sh/uv/pull/13289))
|
||||
- Fix discovery of pre-release managed Python versions in range requests
|
||||
([#13330](https://github.com/astral-sh/uv/pull/13330))
|
||||
- Respect locked script preferences in `uv run --with`
|
||||
([#13283](https://github.com/astral-sh/uv/pull/13283))
|
||||
- Retry streaming downloads on broken pipe errors
|
||||
([#13281](https://github.com/astral-sh/uv/pull/13281))
|
||||
- Treat already-installed base environment packages as preferences in `uv run --with`
|
||||
([#13284](https://github.com/astral-sh/uv/pull/13284))
|
||||
- Avoid enumerating sources in errors for path Python requests
|
||||
([#13335](https://github.com/astral-sh/uv/pull/13335))
|
||||
- Avoid re-creating virtual environment with `--no-sync`
|
||||
([#13287](https://github.com/astral-sh/uv/pull/13287))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove outdated description of index strategy
|
||||
([#13326](https://github.com/astral-sh/uv/pull/13326))
|
||||
- Update "Viewing the version" docs ([#13241](https://github.com/astral-sh/uv/pull/13241))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add more context to external errors ([#13351](https://github.com/astral-sh/uv/pull/13351))
|
||||
- Align indentation of long arguments ([#13394](https://github.com/astral-sh/uv/pull/13394))
|
||||
- Preserve order of dependencies which are sorted naively
|
||||
([#13334](https://github.com/astral-sh/uv/pull/13334))
|
||||
- Align progress bars by largest name length ([#13266](https://github.com/astral-sh/uv/pull/13266))
|
||||
- Reinstall local packages in `uv add` ([#13462](https://github.com/astral-sh/uv/pull/13462))
|
||||
- Rename `--raw-sources` to `--raw` ([#13348](https://github.com/astral-sh/uv/pull/13348))
|
||||
- Show 'Downgraded' when `self update` is used to install an older version
|
||||
([#13340](https://github.com/astral-sh/uv/pull/13340))
|
||||
- Suggest `uv self update` if required uv version is newer
|
||||
([#13305](https://github.com/astral-sh/uv/pull/13305))
|
||||
- Add 3.14 beta images to uv Docker images ([#13390](https://github.com/astral-sh/uv/pull/13390))
|
||||
- Add comma after "i.e." in Conda environment error
|
||||
([#13423](https://github.com/astral-sh/uv/pull/13423))
|
||||
- Be more precise in unpinned packages warning
|
||||
([#13426](https://github.com/astral-sh/uv/pull/13426))
|
||||
- Fix detection of sorted dependencies when include-group is used
|
||||
([#13354](https://github.com/astral-sh/uv/pull/13354))
|
||||
- Fix display of HTTP responses in trace logs for retry of errors
|
||||
([#13339](https://github.com/astral-sh/uv/pull/13339))
|
||||
- Log skip reasons during Python installation key interpreter match checks
|
||||
([#13472](https://github.com/astral-sh/uv/pull/13472))
|
||||
- Redact credentials when displaying URLs ([#13333](https://github.com/astral-sh/uv/pull/13333))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on `pylock.toml` dependency entries
|
||||
([#13384](https://github.com/astral-sh/uv/pull/13384))
|
||||
- Avoid panics for cannot-be-a-base URLs ([#13406](https://github.com/astral-sh/uv/pull/13406))
|
||||
- Ensure cached realm credentials are applied if no password is found for index URL
|
||||
([#13463](https://github.com/astral-sh/uv/pull/13463))
|
||||
- Fix `.tgz` parsing to respect true extension
|
||||
([#13382](https://github.com/astral-sh/uv/pull/13382))
|
||||
- Fix double self-dependency ([#13366](https://github.com/astral-sh/uv/pull/13366))
|
||||
- Reject `pylock.toml` in `uv add -r` ([#13421](https://github.com/astral-sh/uv/pull/13421))
|
||||
- Retain dot-separated wheel tags during cache prune
|
||||
([#13379](https://github.com/astral-sh/uv/pull/13379))
|
||||
- Retain trailing comments after PEP 723 metadata block
|
||||
([#13460](https://github.com/astral-sh/uv/pull/13460))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "export" instead of "install" in `uv export` arguments
|
||||
([#13430](https://github.com/astral-sh/uv/pull/13430))
|
||||
- Remove extra newline ([#13461](https://github.com/astral-sh/uv/pull/13461))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Normalize glob paths ([#13465](https://github.com/astral-sh/uv/pull/13465))
|
||||
|
||||
## 0.7.5
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support case-sensitive module discovery in the build backend
|
||||
([#13468](https://github.com/astral-sh/uv/pull/13468))
|
||||
- Bump Simple cache bucket to v16 ([#13498](https://github.com/astral-sh/uv/pull/13498))
|
||||
- Don't error when the script is too short for the buffer
|
||||
([#13488](https://github.com/astral-sh/uv/pull/13488))
|
||||
- Add missing word in "script not supported" error
|
||||
([#13483](https://github.com/astral-sh/uv/pull/13483))
|
||||
|
||||
## 0.7.6
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- Add free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250517)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve compatibility of `VIRTUAL_ENV_PROMPT` value
|
||||
([#13501](https://github.com/astral-sh/uv/pull/13501))
|
||||
- Bump MSRV to 1.85 and Edition 2024 ([#13516](https://github.com/astral-sh/uv/pull/13516))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect default extras in uv remove ([#13380](https://github.com/astral-sh/uv/pull/13380))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix PowerShell code blocks ([#13511](https://github.com/astral-sh/uv/pull/13511))
|
||||
|
||||
## 0.7.7
|
||||
|
||||
### Python
|
||||
|
||||
- Work around third-party packages that (incorrectly) assume the interpreter is dynamically linking
|
||||
libpython
|
||||
- Allow the experimental JIT to be enabled at runtime on Python 3.13 and 3.14 on macOS on aarch64
|
||||
aka Apple Silicon
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250521)
|
||||
for more details.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `uv version` lock and sync ([#13317](https://github.com/astral-sh/uv/pull/13317))
|
||||
- Fix references to `ldd` in diagnostics to correctly refer to `ld.so`
|
||||
([#13552](https://github.com/astral-sh/uv/pull/13552))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Clarify adding SSH Git dependencies ([#13534](https://github.com/astral-sh/uv/pull/13534))
|
||||
|
||||
## 0.7.8
|
||||
|
||||
### Python
|
||||
|
||||
We are reverting most of our Python changes from `uv 0.7.6` and `uv 0.7.7` due to a miscompilation
|
||||
that makes the Python interpreter behave incorrectly, resulting in spurious type-errors involving
|
||||
str. This issue seems to be isolated to x86_64 Linux, and affected at least Python 3.12, 3.13, and
|
||||
3.14.
|
||||
|
||||
The following changes that were introduced in those versions of uv are temporarily being reverted
|
||||
while we test and deploy a proper fix for the miscompilation:
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See [the issue for details](https://github.com/astral-sh/uv/issues/13610).
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove misleading line in pin documentation ([#13611](https://github.com/astral-sh/uv/pull/13611))
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Python
|
||||
|
||||
The changes reverted in [0.7.8](#078) have been restored.
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250529)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve obfuscation of credentials in URLs ([#13560](https://github.com/astral-sh/uv/pull/13560))
|
||||
- Allow running non-default Python implementations via `uvx`
|
||||
([#13583](https://github.com/astral-sh/uv/pull/13583))
|
||||
- Add `uvw` as alias for `uv` without console window on Windows
|
||||
([#11786](https://github.com/astral-sh/uv/pull/11786))
|
||||
- Allow discovery of x86-64 managed Python builds on macOS
|
||||
([#13722](https://github.com/astral-sh/uv/pull/13722))
|
||||
- Differentiate between implicit vs explicit architecture requests
|
||||
([#13723](https://github.com/astral-sh/uv/pull/13723))
|
||||
- Implement ordering for Python architectures to prefer native installations
|
||||
([#13709](https://github.com/astral-sh/uv/pull/13709))
|
||||
- Only show the first match per platform (and architecture) by default in `uv python list`
|
||||
([#13721](https://github.com/astral-sh/uv/pull/13721))
|
||||
- Write the path of the parent environment to an `extends-environment` key in the `pyvenv.cfg` file
|
||||
of an ephemeral environment ([#13598](https://github.com/astral-sh/uv/pull/13598))
|
||||
- Improve the error message when libc cannot be found, e.g., when using the distroless containers
|
||||
([#13549](https://github.com/astral-sh/uv/pull/13549))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid rendering info log level ([#13642](https://github.com/astral-sh/uv/pull/13642))
|
||||
- Improve performance of `uv-python` crate's manylinux submodule
|
||||
([#11131](https://github.com/astral-sh/uv/pull/11131))
|
||||
- Optimize `Version` display ([#13643](https://github.com/astral-sh/uv/pull/13643))
|
||||
- Reduce number of reference-checks for `uv cache clean`
|
||||
([#13669](https://github.com/astral-sh/uv/pull/13669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reinstalling dependency group members with `--all-packages`
|
||||
([#13678](https://github.com/astral-sh/uv/pull/13678))
|
||||
- Don't fail direct URL hash checking with dependency metadata
|
||||
([#13736](https://github.com/astral-sh/uv/pull/13736))
|
||||
- Exit early on `self update` if global `--offline` is set
|
||||
([#13663](https://github.com/astral-sh/uv/pull/13663))
|
||||
- Fix cases where the uv lock is incorrectly marked as out of date
|
||||
([#13635](https://github.com/astral-sh/uv/pull/13635))
|
||||
- Include pre-release versions in `uv python install --reinstall`
|
||||
([#13645](https://github.com/astral-sh/uv/pull/13645))
|
||||
- Set `LC_ALL=C` for git when checking git worktree
|
||||
([#13637](https://github.com/astral-sh/uv/pull/13637))
|
||||
- Avoid rejecting Windows paths for remote Python download JSON targets
|
||||
([#13625](https://github.com/astral-sh/uv/pull/13625))
|
||||
|
||||
### Preview
|
||||
|
||||
- Add `uv add --bounds` to configure version constraints
|
||||
([#12946](https://github.com/astral-sh/uv/pull/12946))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation about Python versions to Tools concept page
|
||||
([#7673](https://github.com/astral-sh/uv/pull/7673))
|
||||
- Add example of enabling Dependabot ([#13692](https://github.com/astral-sh/uv/pull/13692))
|
||||
- Fix `exclude-newer` date format for persistent configuration files
|
||||
([#13706](https://github.com/astral-sh/uv/pull/13706))
|
||||
- Quote versions variables in GitLab documentation
|
||||
([#13679](https://github.com/astral-sh/uv/pull/13679))
|
||||
- Update Dependabot support status ([#13690](https://github.com/astral-sh/uv/pull/13690))
|
||||
- Explicitly specify to add a new repo entry to the repos list item in the `.pre-commit-config.yaml`
|
||||
([#10243](https://github.com/astral-sh/uv/pull/10243))
|
||||
- Add integration with marimo guide ([#13691](https://github.com/astral-sh/uv/pull/13691))
|
||||
- Add pronunciation to README ([#5336](https://github.com/astral-sh/uv/pull/5336))
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--show-extras` to `uv tool list` ([#13783](https://github.com/astral-sh/uv/pull/13783))
|
||||
- Add dynamically generated sysconfig replacement mappings
|
||||
([#13441](https://github.com/astral-sh/uv/pull/13441))
|
||||
- Add data locations to install wheel logs ([#13797](https://github.com/astral-sh/uv/pull/13797))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid redaction of placeholder `git` username when using SSH authentication
|
||||
([#13799](https://github.com/astral-sh/uv/pull/13799))
|
||||
- Propagate credentials to files on devpi indexes ending in `/+simple`
|
||||
([#13743](https://github.com/astral-sh/uv/pull/13743))
|
||||
- Restore retention of credentials for direct URLs in `uv export`
|
||||
([#13809](https://github.com/astral-sh/uv/pull/13809))
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b1
|
||||
- Add Python 3.13.4
|
||||
- Add Python 3.12.11
|
||||
- Add Python 3.11.13
|
||||
- Add Python 3.10.18
|
||||
- Add Python 3.9.23
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add Pyodide support ([#12731](https://github.com/astral-sh/uv/pull/12731))
|
||||
- Better error message for version specifier with missing operator
|
||||
([#13803](https://github.com/astral-sh/uv/pull/13803))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Downgrade `reqwest` and `hyper-util` to resolve connection reset errors over IPv6
|
||||
([#13835](https://github.com/astral-sh/uv/pull/13835))
|
||||
- Prefer `uv`'s binary's version when checking if it's up to date
|
||||
([#13840](https://github.com/astral-sh/uv/pull/13840))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "terminal driver" instead of "shell" in `SIGINT` docs
|
||||
([#13787](https://github.com/astral-sh/uv/pull/13787))
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv python pin --rm` to remove `.python-version` pins
|
||||
([#13860](https://github.com/astral-sh/uv/pull/13860))
|
||||
- Don't hint at versions removed by `excluded-newer`
|
||||
([#13884](https://github.com/astral-sh/uv/pull/13884))
|
||||
- Add hint to use `tool.uv.environments` on resolution error
|
||||
([#13455](https://github.com/astral-sh/uv/pull/13455))
|
||||
- Add hint to use `tool.uv.required-environments` on resolution error
|
||||
([#13575](https://github.com/astral-sh/uv/pull/13575))
|
||||
- Improve `python pin` error messages ([#13862](https://github.com/astral-sh/uv/pull/13862))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Lock environments during `uv sync`, `uv add` and `uv remove` to prevent race conditions
|
||||
([#13869](https://github.com/astral-sh/uv/pull/13869))
|
||||
- Add `--no-editable` to `uv export` for `pylock.toml`
|
||||
([#13852](https://github.com/astral-sh/uv/pull/13852))
|
||||
|
||||
### Documentation
|
||||
|
||||
- List `.gitignore` in project init files ([#13855](https://github.com/astral-sh/uv/pull/13855))
|
||||
- Move the pip interface documentation into the concepts section
|
||||
([#13841](https://github.com/astral-sh/uv/pull/13841))
|
||||
- Remove the configuration section in favor of concepts / reference
|
||||
([#13842](https://github.com/astral-sh/uv/pull/13842))
|
||||
- Update Git and GitHub Actions docs to mention `gh auth login`
|
||||
([#13850](https://github.com/astral-sh/uv/pull/13850))
|
||||
|
||||
### Preview
|
||||
|
||||
- Fix directory glob traversal fallback preventing exclusion of all files
|
||||
([#13882](https://github.com/astral-sh/uv/pull/13882))
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b2
|
||||
- Add Python 3.13.5
|
||||
- Fix stability of `uuid.getnode` on 3.13
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250612)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Download versions in `uv python pin` if not found
|
||||
([#13946](https://github.com/astral-sh/uv/pull/13946))
|
||||
- Use TTY detection to determine if SIGINT forwarding is enabled
|
||||
([#13925](https://github.com/astral-sh/uv/pull/13925))
|
||||
- Avoid fetching an exact, cached Git commit, even if it isn't locked
|
||||
([#13748](https://github.com/astral-sh/uv/pull/13748))
|
||||
- Add `zstd` and `deflate` to `Accept-Encoding`
|
||||
([#13982](https://github.com/astral-sh/uv/pull/13982))
|
||||
- Build binaries for riscv64 ([#12688](https://github.com/astral-sh/uv/pull/12688))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check if relative URL is valid directory before treating as index
|
||||
([#13917](https://github.com/astral-sh/uv/pull/13917))
|
||||
- Ignore Python discovery errors during `uv python pin`
|
||||
([#13944](https://github.com/astral-sh/uv/pull/13944))
|
||||
- Do not allow `uv add --group ... --script` ([#13997](https://github.com/astral-sh/uv/pull/13997))
|
||||
|
||||
### Preview changes
|
||||
|
||||
- Build backend: Support namespace packages ([#13833](https://github.com/astral-sh/uv/pull/13833))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add 3.14 to the supported platform reference
|
||||
([#13990](https://github.com/astral-sh/uv/pull/13990))
|
||||
- Add an `llms.txt` to uv ([#13929](https://github.com/astral-sh/uv/pull/13929))
|
||||
- Add supported macOS version to the platform reference
|
||||
([#13993](https://github.com/astral-sh/uv/pull/13993))
|
||||
- Update platform support reference to include Python implementation list
|
||||
([#13991](https://github.com/astral-sh/uv/pull/13991))
|
||||
- Update pytorch.md ([#13899](https://github.com/astral-sh/uv/pull/13899))
|
||||
- Update the CLI help and reference to include references to the Python bin directory
|
||||
([#13978](https://github.com/astral-sh/uv/pull/13978))
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add XPU to `--torch-backend` ([#14172](https://github.com/astral-sh/uv/pull/14172))
|
||||
- Add ROCm backends to `--torch-backend` ([#14120](https://github.com/astral-sh/uv/pull/14120))
|
||||
- Remove preview label from `--torch-backend` ([#14119](https://github.com/astral-sh/uv/pull/14119))
|
||||
- Add `[tool.uv.dependency-groups].mygroup.requires-python`
|
||||
([#13735](https://github.com/astral-sh/uv/pull/13735))
|
||||
- Add auto-detection for AMD GPUs ([#14176](https://github.com/astral-sh/uv/pull/14176))
|
||||
- Show retries for HTTP status code errors ([#13897](https://github.com/astral-sh/uv/pull/13897))
|
||||
- Support transparent Python patch version upgrades
|
||||
([#13954](https://github.com/astral-sh/uv/pull/13954))
|
||||
- Warn on empty index directory ([#13940](https://github.com/astral-sh/uv/pull/13940))
|
||||
- Publish to DockerHub ([#14088](https://github.com/astral-sh/uv/pull/14088))
|
||||
|
||||
### Performance
|
||||
|
||||
- Make cold resolves about 10% faster ([#14035](https://github.com/astral-sh/uv/pull/14035))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Don't use walrus operator in interpreter query script
|
||||
([#14108](https://github.com/astral-sh/uv/pull/14108))
|
||||
- Fix handling of changes to `requires-python`
|
||||
([#14076](https://github.com/astral-sh/uv/pull/14076))
|
||||
- Fix implied `platform_machine` marker for `win_amd64` platform tag
|
||||
([#14041](https://github.com/astral-sh/uv/pull/14041))
|
||||
- Only update existing symlink directories on preview uninstall
|
||||
([#14179](https://github.com/astral-sh/uv/pull/14179))
|
||||
- Serialize Python requests for tools as canonicalized strings
|
||||
([#14109](https://github.com/astral-sh/uv/pull/14109))
|
||||
- Support netrc and same-origin credential propagation on index redirects
|
||||
([#14126](https://github.com/astral-sh/uv/pull/14126))
|
||||
- Support reading `dependency-groups` from pyproject.tomls with no `[project]`
|
||||
([#13742](https://github.com/astral-sh/uv/pull/13742))
|
||||
- Handle an existing shebang in `uv init --script`
|
||||
([#14141](https://github.com/astral-sh/uv/pull/14141))
|
||||
- Prevent concurrent updates of the environment in `uv run`
|
||||
([#14153](https://github.com/astral-sh/uv/pull/14153))
|
||||
- Filter managed Python distributions by platform before querying when included in request
|
||||
([#13936](https://github.com/astral-sh/uv/pull/13936))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Replace cuda124 with cuda128 ([#14168](https://github.com/astral-sh/uv/pull/14168))
|
||||
- Document the way member sources shadow workspace sources
|
||||
([#14136](https://github.com/astral-sh/uv/pull/14136))
|
||||
- Sync documented PyTorch integration index for CUDA and ROCm versions from PyTorch website
|
||||
([#14100](https://github.com/astral-sh/uv/pull/14100))
|
||||
|
||||
## 0.7.15
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Consistently use `Ordering::Relaxed` for standalone atomic use cases
|
||||
([#14190](https://github.com/astral-sh/uv/pull/14190))
|
||||
- Warn on ambiguous relative paths for `--index`
|
||||
([#14152](https://github.com/astral-sh/uv/pull/14152))
|
||||
- Skip GitHub fast path when rate-limited ([#13033](https://github.com/astral-sh/uv/pull/13033))
|
||||
- Preserve newlines in `schema.json` descriptions
|
||||
([#13693](https://github.com/astral-sh/uv/pull/13693))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add check for using minor version link when creating a venv on Windows
|
||||
([#14252](https://github.com/astral-sh/uv/pull/14252))
|
||||
- Strip query parameters when parsing source URL
|
||||
([#14224](https://github.com/astral-sh/uv/pull/14224))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a link to PyPI FAQ to clarify what per-project token is
|
||||
([#14242](https://github.com/astral-sh/uv/pull/14242))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Allow symlinks in the build backend ([#14212](https://github.com/astral-sh/uv/pull/14212))
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b3
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250626)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Include path or URL when failing to convert in lockfile
|
||||
([#14292](https://github.com/astral-sh/uv/pull/14292))
|
||||
- Warn when `~=` is used as a Python version specifier without a patch version
|
||||
([#14008](https://github.com/astral-sh/uv/pull/14008))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ensure preview default Python installs are upgradeable
|
||||
([#14261](https://github.com/astral-sh/uv/pull/14261))
|
||||
|
||||
### Performance
|
||||
|
||||
- Share workspace cache between lock and sync operations
|
||||
([#14321](https://github.com/astral-sh/uv/pull/14321))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow local indexes to reference remote files
|
||||
([#14294](https://github.com/astral-sh/uv/pull/14294))
|
||||
- Avoid rendering desugared prefix matches in error messages
|
||||
([#14195](https://github.com/astral-sh/uv/pull/14195))
|
||||
- Avoid using path URL for workspace Git dependencies in `requirements.txt`
|
||||
([#14288](https://github.com/astral-sh/uv/pull/14288))
|
||||
- Normalize index URLs to remove trailing slash
|
||||
([#14245](https://github.com/astral-sh/uv/pull/14245))
|
||||
- Respect URL-encoded credentials in redirect location
|
||||
([#14315](https://github.com/astral-sh/uv/pull/14315))
|
||||
- Lock the source tree when running setuptools, to protect concurrent builds
|
||||
([#14174](https://github.com/astral-sh/uv/pull/14174))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Note that GCP Artifact Registry download URLs must have `/simple` component
|
||||
([#14251](https://github.com/astral-sh/uv/pull/14251))
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply build constraints when resolving `--with` dependencies
|
||||
([#14340](https://github.com/astral-sh/uv/pull/14340))
|
||||
- Drop trailing slashes when converting index URL from URL
|
||||
([#14346](https://github.com/astral-sh/uv/pull/14346))
|
||||
- Ignore `UV_PYTHON_CACHE_DIR` when empty ([#14336](https://github.com/astral-sh/uv/pull/14336))
|
||||
- Fix error message ordering for `pyvenv.cfg` version conflict
|
||||
([#14329](https://github.com/astral-sh/uv/pull/14329))
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Python
|
||||
|
||||
- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14 These are not downloaded by default, since
|
||||
x86-64 Python has broader ecosystem support on Windows. However, they can be requested with
|
||||
`cpython-<version>-windows-aarch64`.
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry`
|
||||
([#14378](https://github.com/astral-sh/uv/pull/14378))
|
||||
- Reuse build (virtual) environments across resolution and installation
|
||||
([#14338](https://github.com/astral-sh/uv/pull/14338))
|
||||
- Improve trace message for cached Python interpreter query
|
||||
([#14328](https://github.com/astral-sh/uv/pull/14328))
|
||||
- Use parsed URLs for conflicting URL error message
|
||||
([#14380](https://github.com/astral-sh/uv/pull/14380))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ignore invalid build backend settings when not building
|
||||
([#14372](https://github.com/astral-sh/uv/pull/14372))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix equals-star and tilde-equals with `python_version` and `python_full_version`
|
||||
([#14271](https://github.com/astral-sh/uv/pull/14271))
|
||||
- Include the canonical path in the interpreter query cache key
|
||||
([#14331](https://github.com/astral-sh/uv/pull/14331))
|
||||
- Only drop build directories on program exit ([#14304](https://github.com/astral-sh/uv/pull/14304))
|
||||
- Error instead of panic on conflict between global and subcommand flags
|
||||
([#14368](https://github.com/astral-sh/uv/pull/14368))
|
||||
- Consistently normalize trailing slashes on URLs with no path segments
|
||||
([#14349](https://github.com/astral-sh/uv/pull/14349))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions for publishing to JFrog's Artifactory
|
||||
([#14253](https://github.com/astral-sh/uv/pull/14253))
|
||||
- Edits to the build backend documentation ([#14376](https://github.com/astral-sh/uv/pull/14376))
|
||||
|
||||
## 0.7.19
|
||||
|
||||
The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and
|
||||
considered ready for production use.
|
||||
|
||||
The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with
|
||||
the goal of requiring zero configuration for most users, but provides flexible configuration to
|
||||
accommodate most Python project structures. It integrates tightly with uv, to improve messaging and
|
||||
user experience. It validates project metadata and structures, preventing common mistakes. And,
|
||||
finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with
|
||||
other build backends.
|
||||
|
||||
To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section
|
||||
in your `pyproject.toml`:
|
||||
|
||||
```toml
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will
|
||||
remain compatible with all standards-compliant build backends.
|
||||
|
||||
### Python
|
||||
|
||||
- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Ignore Python patch version for `--universal` pip compile
|
||||
([#14405](https://github.com/astral-sh/uv/pull/14405))
|
||||
- Update the tilde version specifier warning to include more context
|
||||
([#14335](https://github.com/astral-sh/uv/pull/14335))
|
||||
- Clarify behavior and hint on tool install when no executables are available
|
||||
([#14423](https://github.com/astral-sh/uv/pull/14423))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make project and interpreter lock acquisition non-fatal
|
||||
([#14404](https://github.com/astral-sh/uv/pull/14404))
|
||||
- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects
|
||||
([#14403](https://github.com/astral-sh/uv/pull/14403))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a migration guide from pip to uv projects
|
||||
([#12382](https://github.com/astral-sh/uv/pull/12382))
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b4
|
||||
- Add zstd support to Python 3.14 on Unix (it already was available on Windows)
|
||||
- Add PyPy 7.3.20 (for Python 3.11.13)
|
||||
|
||||
See the [PyPy](https://pypy.org/posts/2025/07/pypy-v7320-release.html) and
|
||||
[`python-build-standalone`](https://github.com/astral-sh/python-build-standalone/releases/tag/20250708)
|
||||
release notes for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--workspace` flag to `uv add` ([#14496](https://github.com/astral-sh/uv/pull/14496))
|
||||
- Add auto-detection for Intel GPUs ([#14386](https://github.com/astral-sh/uv/pull/14386))
|
||||
- Drop trailing arguments when writing shebangs
|
||||
([#14519](https://github.com/astral-sh/uv/pull/14519))
|
||||
- Add debug message when skipping Python downloads
|
||||
([#14509](https://github.com/astral-sh/uv/pull/14509))
|
||||
- Add support for declaring multiple modules in namespace packages
|
||||
([#14460](https://github.com/astral-sh/uv/pull/14460))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert normalization of trailing slashes on index URLs
|
||||
([#14511](https://github.com/astral-sh/uv/pull/14511))
|
||||
- Fix forced resolution with all extras in `uv version`
|
||||
([#14434](https://github.com/astral-sh/uv/pull/14434))
|
||||
- Fix handling of pre-releases in preferences ([#14498](https://github.com/astral-sh/uv/pull/14498))
|
||||
- Remove transparent variants in `uv-extract` to enable retries
|
||||
([#14450](https://github.com/astral-sh/uv/pull/14450))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Add method to get packages involved in a `NoSolutionError`
|
||||
([#14457](https://github.com/astral-sh/uv/pull/14457))
|
||||
- Make `ErrorTree` for `NoSolutionError` public
|
||||
([#14444](https://github.com/astral-sh/uv/pull/14444))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Finish incomplete sentence in pip migration guide
|
||||
([#14432](https://github.com/astral-sh/uv/pull/14432))
|
||||
- Remove `cache-dependency-glob` examples for `setup-uv`
|
||||
([#14493](https://github.com/astral-sh/uv/pull/14493))
|
||||
- Remove `uv pip sync` suggestion with `pyproject.toml`
|
||||
([#14510](https://github.com/astral-sh/uv/pull/14510))
|
||||
- Update documentation for GitHub to use `setup-uv@v6`
|
||||
([#14490](https://github.com/astral-sh/uv/pull/14490))
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Python
|
||||
|
||||
- Restore the SQLite `fts4`, `fts5`, `rtree`, and `geopoly` extensions on macOS and Linux
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250712)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--python-platform` to `uv sync` ([#14320](https://github.com/astral-sh/uv/pull/14320))
|
||||
- Support pre-releases in `uv version --bump` ([#13578](https://github.com/astral-sh/uv/pull/13578))
|
||||
- Add `-w` shorthand for `--with` ([#14530](https://github.com/astral-sh/uv/pull/14530))
|
||||
- Add an exception handler on Windows to display information on crash
|
||||
([#14582](https://github.com/astral-sh/uv/pull/14582))
|
||||
- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522))
|
||||
- Add `UV_HTTP_RETRIES` to customize retry counts
|
||||
([#14544](https://github.com/astral-sh/uv/pull/14544))
|
||||
- Follow leaf symlinks matched by globs in `cache-key`
|
||||
([#13438](https://github.com/astral-sh/uv/pull/13438))
|
||||
- Support parent path components (`..`) in globs in `cache-key`
|
||||
([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
- Improve `cache-key` performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `uv sync --output-format json` ([#13689](https://github.com/astral-sh/uv/pull/13689))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python`
|
||||
([#14606](https://github.com/astral-sh/uv/pull/14606))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document how to nest dependency groups with `include-group`
|
||||
([#14539](https://github.com/astral-sh/uv/pull/14539))
|
||||
- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554))
|
||||
- Update CONTRIBUTING.md with instructions to format Markdown files via Docker
|
||||
([#14246](https://github.com/astral-sh/uv/pull/14246))
|
||||
- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533))
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Python
|
||||
|
||||
- Upgrade GraalPy to 24.2.2
|
||||
|
||||
See the [GraalPy release notes](https://github.com/oracle/graalpython/releases/tag/graal-24.2.2) for
|
||||
more details.
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_COMPILE_BYTECODE_TIMEOUT` environment variable
|
||||
([#14369](https://github.com/astral-sh/uv/pull/14369))
|
||||
- Allow users to override index `cache-control` headers
|
||||
([#14620](https://github.com/astral-sh/uv/pull/14620))
|
||||
- Add `UV_LIBC` to override libc selection in multi-libc environment
|
||||
([#14646](https://github.com/astral-sh/uv/pull/14646))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `--all-arches` when paired with `--only-downloads`
|
||||
([#14629](https://github.com/astral-sh/uv/pull/14629))
|
||||
- Skip Windows Python interpreters that return a broken MSIX package code
|
||||
([#14636](https://github.com/astral-sh/uv/pull/14636))
|
||||
- Warn on invalid `uv.toml` when provided via direct path
|
||||
([#14653](https://github.com/astral-sh/uv/pull/14653))
|
||||
- Improve async signal safety in Windows exception handler
|
||||
([#14619](https://github.com/astral-sh/uv/pull/14619))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Mention the `revision` in the lockfile versioning doc
|
||||
([#14634](https://github.com/astral-sh/uv/pull/14634))
|
||||
- Move "Conflicting dependencies" to the "Resolution" page
|
||||
([#14633](https://github.com/astral-sh/uv/pull/14633))
|
||||
- Rename "Dependency specifiers" section to exclude PEP 508 reference
|
||||
([#14631](https://github.com/astral-sh/uv/pull/14631))
|
||||
- Suggest `uv cache clean` prior to `--reinstall`
|
||||
([#14659](https://github.com/astral-sh/uv/pull/14659))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Make preview Python registration on Windows non-fatal
|
||||
([#14614](https://github.com/astral-sh/uv/pull/14614))
|
||||
- Update preview installation of Python executables to be non-fatal
|
||||
([#14612](https://github.com/astral-sh/uv/pull/14612))
|
||||
- Add `uv python update-shell` ([#14627](https://github.com/astral-sh/uv/pull/14627))
|
||||
|
|
@ -15,6 +15,7 @@ mod credentials;
|
|||
mod index;
|
||||
mod keyring;
|
||||
mod middleware;
|
||||
mod providers;
|
||||
mod realm;
|
||||
|
||||
// TODO(zanieb): Consider passing a cache explicitly throughout
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use reqwest::{Request, Response};
|
|||
use reqwest_middleware::{Error, Middleware, Next};
|
||||
use tracing::{debug, trace, warn};
|
||||
|
||||
use crate::providers::HuggingFaceProvider;
|
||||
use crate::{
|
||||
CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||
cache::FetchUrl,
|
||||
|
|
@ -457,9 +458,8 @@ impl AuthMiddleware {
|
|||
Some(credentials)
|
||||
};
|
||||
|
||||
return self
|
||||
.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await;
|
||||
self.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Fetch credentials for a URL.
|
||||
|
|
@ -503,6 +503,13 @@ impl AuthMiddleware {
|
|||
return credentials;
|
||||
}
|
||||
|
||||
// Support for known providers, like Hugging Face.
|
||||
if let Some(credentials) = HuggingFaceProvider::credentials_for(url).map(Arc::new) {
|
||||
debug!("Found Hugging Face credentials for {url}");
|
||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||
return Some(credentials);
|
||||
}
|
||||
|
||||
// Netrc support based on: <https://github.com/gribouille/netrc>.
|
||||
let credentials = if let Some(credentials) = self.netrc.get().and_then(|netrc| {
|
||||
debug!("Checking netrc for credentials for {url}");
|
||||
|
|
|
|||
|
|
@ -0,0 +1,49 @@
|
|||
use std::sync::LazyLock;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::Credentials;
|
||||
use crate::realm::{Realm, RealmRef};
|
||||
|
||||
/// The [`Realm`] for the Hugging Face platform.
|
||||
static HUGGING_FACE_REALM: LazyLock<Realm> = LazyLock::new(|| {
|
||||
let url = Url::parse("https://huggingface.co").expect("Failed to parse Hugging Face URL");
|
||||
Realm::from(&url)
|
||||
});
|
||||
|
||||
/// The authentication token for the Hugging Face platform, if set.
|
||||
static HUGGING_FACE_TOKEN: LazyLock<Option<Vec<u8>>> = LazyLock::new(|| {
|
||||
// Extract the Hugging Face token from the environment variable, if it exists.
|
||||
let hf_token = std::env::var(EnvVars::HF_TOKEN)
|
||||
.ok()
|
||||
.map(String::into_bytes)
|
||||
.filter(|token| !token.is_empty())?;
|
||||
|
||||
if std::env::var_os(EnvVars::UV_NO_HF_TOKEN).is_some() {
|
||||
debug!("Ignoring Hugging Face token from environment due to `UV_NO_HF_TOKEN`");
|
||||
return None;
|
||||
}
|
||||
|
||||
debug!("Found Hugging Face token in environment");
|
||||
Some(hf_token)
|
||||
});
|
||||
|
||||
/// A provider for authentication credentials for the Hugging Face platform.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct HuggingFaceProvider;
|
||||
|
||||
impl HuggingFaceProvider {
|
||||
/// Returns the credentials for the Hugging Face platform, if available.
|
||||
pub(crate) fn credentials_for(url: &Url) -> Option<Credentials> {
|
||||
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
||||
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
||||
return Some(Credentials::Bearer {
|
||||
token: token.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
use std::hash::{Hash, Hasher};
|
||||
use std::{fmt::Display, fmt::Formatter};
|
||||
|
||||
use url::Url;
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
|
|
@ -22,7 +22,7 @@ use uv_small_str::SmallString;
|
|||
// The port is only allowed to differ if it matches the "default port" for the scheme.
|
||||
// However, `url` (and therefore `reqwest`) sets the `port` to `None` if it matches the default port
|
||||
// so we do not need any special handling here.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct Realm {
|
||||
scheme: SmallString,
|
||||
host: Option<SmallString>,
|
||||
|
|
@ -59,6 +59,76 @@ impl Display for Realm {
|
|||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Realm {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
RealmRef::from(self) == RealmRef::from(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Realm {}
|
||||
|
||||
impl Hash for Realm {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
RealmRef::from(self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub(crate) struct RealmRef<'a> {
|
||||
scheme: &'a str,
|
||||
host: Option<&'a str>,
|
||||
port: Option<u16>,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Url> for RealmRef<'a> {
|
||||
fn from(url: &'a Url) -> Self {
|
||||
Self {
|
||||
scheme: url.scheme(),
|
||||
host: url.host_str(),
|
||||
port: url.port(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for RealmRef<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.scheme == other.scheme && self.host == other.host && self.port == other.port
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RealmRef<'_> {}
|
||||
|
||||
impl Hash for RealmRef<'_> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.scheme.hash(state);
|
||||
self.host.hash(state);
|
||||
self.port.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<RealmRef<'a>> for Realm {
|
||||
fn eq(&self, rhs: &RealmRef<'a>) -> bool {
|
||||
RealmRef::from(self) == *rhs
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Realm> for RealmRef<'_> {
|
||||
fn eq(&self, rhs: &Realm) -> bool {
|
||||
*self == RealmRef::from(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Realm> for RealmRef<'a> {
|
||||
fn from(realm: &'a Realm) -> Self {
|
||||
Self {
|
||||
scheme: &realm.scheme,
|
||||
host: realm.host.as_deref(),
|
||||
port: realm.port,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::{ParseError, Url};
|
||||
|
|
|
|||
|
|
@ -680,7 +680,7 @@ mod tests {
|
|||
license = { file = "license.txt" }
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -748,7 +748,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -812,7 +812,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -854,7 +854,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -879,7 +879,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -928,7 +928,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -959,7 +959,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1010,7 +1010,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -1036,7 +1036,7 @@ mod tests {
|
|||
module-name = "simple_namespace.part"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1104,7 +1104,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1127,7 +1127,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1188,7 +1188,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1211,7 +1211,7 @@ mod tests {
|
|||
module-name = "cloud-stubs.db.schema"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1261,7 +1261,7 @@ mod tests {
|
|||
module-name = ["foo", "simple_namespace.part_a", "simple_namespace.part_b"]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
|
|||
|
|
@ -171,7 +171,7 @@ impl PyProjectToml {
|
|||
///
|
||||
/// ```toml
|
||||
/// [build-system]
|
||||
/// requires = ["uv_build>=0.4.15,<0.5"]
|
||||
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
/// build-backend = "uv_build"
|
||||
/// ```
|
||||
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
||||
|
|
@ -703,7 +703,7 @@ struct Project {
|
|||
/// The optional `project.readme` key in a pyproject.toml as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/#readme>.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged, rename_all = "kebab-case")]
|
||||
#[serde(untagged, rename_all_fields = "kebab-case")]
|
||||
pub(crate) enum Readme {
|
||||
/// Relative path to the README.
|
||||
String(PathBuf),
|
||||
|
|
@ -713,7 +713,7 @@ pub(crate) enum Readme {
|
|||
content_type: String,
|
||||
charset: Option<String>,
|
||||
},
|
||||
/// The full description of the project as inline value.
|
||||
/// The full description of the project as an inline value.
|
||||
Text {
|
||||
text: String,
|
||||
content_type: String,
|
||||
|
|
@ -826,7 +826,7 @@ mod tests {
|
|||
{payload}
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
}
|
||||
|
|
@ -909,7 +909,7 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -965,6 +965,65 @@ mod tests {
|
|||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn readme() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("Readme.md"),
|
||||
indoc! {r"
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("License.txt"),
|
||||
indoc! {r#"
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"#},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = indoc! {r#"
|
||||
# See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example
|
||||
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
description = "A Python package"
|
||||
readme = { file = "Readme.md", content-type = "text/markdown" }
|
||||
requires_python = ">=3.12"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
Summary: A Python package
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_extras() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
|
@ -1036,7 +1095,7 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1135,7 +1194,7 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5", "wheel"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
|
|
@ -1171,7 +1230,7 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
|
|
|
|||
|
|
@ -20,8 +20,8 @@ use fs_err as fs;
|
|||
use indoc::formatdoc;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::de::{IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer, de};
|
||||
use serde::de::{self, IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Command;
|
||||
|
|
@ -540,12 +540,10 @@ impl SourceBuild {
|
|||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||
Ok(toml) => {
|
||||
let pyproject_toml: toml_edit::ImDocument<_> =
|
||||
toml_edit::ImDocument::from_str(&toml)
|
||||
.map_err(Error::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml: PyProjectToml =
|
||||
PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(Error::InvalidPyprojectTomlSchema)?;
|
||||
let pyproject_toml = toml_edit::Document::from_str(&toml)
|
||||
.map_err(Error::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(Error::InvalidPyprojectTomlSchema)?;
|
||||
|
||||
let backend = if let Some(build_system) = pyproject_toml.build_system {
|
||||
// If necessary, lower the requirements.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv-build"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "uv-build"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
description = "The uv build backend"
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
|
|||
|
|
@ -1202,6 +1202,14 @@ pub struct PipCompileArgs {
|
|||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub resolver: ResolverArgs,
|
||||
|
||||
|
|
@ -1216,14 +1224,6 @@ pub struct PipCompileArgs {
|
|||
#[arg(long, overrides_with("no_deps"), hide = true)]
|
||||
pub deps: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
/// Write the compiled requirements to the given `requirements.txt` or `pylock.toml` file.
|
||||
///
|
||||
/// If the file already exists, the existing versions will be preferred when resolving
|
||||
|
|
@ -1518,6 +1518,30 @@ pub struct PipSyncArgs {
|
|||
#[arg(long, short, alias = "build-constraint", env = EnvVars::UV_BUILD_CONSTRAINT, value_delimiter = ' ', value_parser = parse_maybe_file_path)]
|
||||
pub build_constraints: Vec<Maybe<PathBuf>>,
|
||||
|
||||
/// Include optional dependencies from the specified extra name; may be provided more than once.
|
||||
///
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")]
|
||||
pub all_extras: bool,
|
||||
|
||||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is
|
||||
/// used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub installer: InstallerArgs,
|
||||
|
||||
|
|
@ -1798,19 +1822,28 @@ pub struct PipInstallArgs {
|
|||
|
||||
/// Include optional dependencies from the specified extra name; may be provided more than once.
|
||||
///
|
||||
/// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "all_extras", value_parser = extra_name_with_clap_error)]
|
||||
pub extra: Option<Vec<ExtraName>>,
|
||||
|
||||
/// Include all optional dependencies.
|
||||
///
|
||||
/// Only applies to `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
/// Only applies to `pylock.toml`, `pyproject.toml`, `setup.py`, and `setup.cfg` sources.
|
||||
#[arg(long, conflicts_with = "extra", overrides_with = "no_all_extras")]
|
||||
pub all_extras: bool,
|
||||
|
||||
#[arg(long, overrides_with("all_extras"), hide = true)]
|
||||
pub no_all_extras: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pylock.toml` or `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pylock.toml` or `pyproject.toml` in the working directory is
|
||||
/// used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
#[command(flatten)]
|
||||
pub installer: ResolverInstallerArgs,
|
||||
|
||||
|
|
@ -1825,14 +1858,6 @@ pub struct PipInstallArgs {
|
|||
#[arg(long, overrides_with("no_deps"), hide = true)]
|
||||
pub deps: bool,
|
||||
|
||||
/// Install the specified dependency group from a `pyproject.toml`.
|
||||
///
|
||||
/// If no path is provided, the `pyproject.toml` in the working directory is used.
|
||||
///
|
||||
/// May be provided multiple times.
|
||||
#[arg(long, group = "sources")]
|
||||
pub group: Vec<PipGroupName>,
|
||||
|
||||
/// Require a matching hash for each requirement.
|
||||
///
|
||||
/// By default, uv will verify any available hashes in the requirements file, but will not
|
||||
|
|
@ -2866,7 +2891,7 @@ pub struct InitArgs {
|
|||
/// Initialize a build-backend of choice for the project.
|
||||
///
|
||||
/// Implicitly sets `--package`.
|
||||
#[arg(long, value_enum, conflicts_with_all=["script", "no_package"])]
|
||||
#[arg(long, value_enum, conflicts_with_all=["script", "no_package"], env = EnvVars::UV_INIT_BUILD_BACKEND)]
|
||||
pub build_backend: Option<ProjectBuildBackend>,
|
||||
|
||||
/// Invalid option name for build backend.
|
||||
|
|
|
|||
|
|
@ -67,6 +67,7 @@ pub struct BaseClientBuilder<'a> {
|
|||
keyring: KeyringProviderType,
|
||||
allow_insecure_host: Vec<TrustedHost>,
|
||||
native_tls: bool,
|
||||
built_in_root_certs: bool,
|
||||
retries: u32,
|
||||
pub connectivity: Connectivity,
|
||||
markers: Option<&'a MarkerEnvironment>,
|
||||
|
|
@ -127,6 +128,7 @@ impl BaseClientBuilder<'_> {
|
|||
keyring: KeyringProviderType::default(),
|
||||
allow_insecure_host: vec![],
|
||||
native_tls: false,
|
||||
built_in_root_certs: false,
|
||||
connectivity: Connectivity::Online,
|
||||
retries: DEFAULT_RETRIES,
|
||||
markers: None,
|
||||
|
|
@ -192,6 +194,12 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn built_in_root_certs(mut self, built_in_root_certs: bool) -> Self {
|
||||
self.built_in_root_certs = built_in_root_certs;
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn markers(mut self, markers: &'a MarkerEnvironment) -> Self {
|
||||
self.markers = Some(markers);
|
||||
|
|
@ -388,7 +396,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
.user_agent(user_agent)
|
||||
.pool_max_idle_per_host(20)
|
||||
.read_timeout(timeout)
|
||||
.tls_built_in_root_certs(false)
|
||||
.tls_built_in_root_certs(self.built_in_root_certs)
|
||||
.redirect(redirect_policy.reqwest_policy());
|
||||
|
||||
// If necessary, accept invalid certificates.
|
||||
|
|
@ -920,18 +928,34 @@ pub fn is_extended_transient_error(err: &dyn Error) -> bool {
|
|||
}
|
||||
|
||||
// IO Errors may be nested through custom IO errors.
|
||||
let mut has_io_error = false;
|
||||
for io_err in find_sources::<io::Error>(&err) {
|
||||
if io_err.kind() == io::ErrorKind::ConnectionReset
|
||||
|| io_err.kind() == io::ErrorKind::UnexpectedEof
|
||||
|| io_err.kind() == io::ErrorKind::BrokenPipe
|
||||
{
|
||||
trace!("Retrying error: `ConnectionReset` or `UnexpectedEof`");
|
||||
has_io_error = true;
|
||||
let retryable_io_err_kinds = [
|
||||
// https://github.com/astral-sh/uv/issues/12054
|
||||
io::ErrorKind::BrokenPipe,
|
||||
// From reqwest-middleware
|
||||
io::ErrorKind::ConnectionAborted,
|
||||
// https://github.com/astral-sh/uv/issues/3514
|
||||
io::ErrorKind::ConnectionReset,
|
||||
// https://github.com/astral-sh/uv/issues/14699
|
||||
io::ErrorKind::InvalidData,
|
||||
// https://github.com/astral-sh/uv/issues/9246
|
||||
io::ErrorKind::UnexpectedEof,
|
||||
];
|
||||
if retryable_io_err_kinds.contains(&io_err.kind()) {
|
||||
trace!("Retrying error: `{}`", io_err.kind());
|
||||
return true;
|
||||
}
|
||||
trace!("Cannot retry IO error: not one of `ConnectionReset` or `UnexpectedEof`");
|
||||
trace!(
|
||||
"Cannot retry IO error `{}`, not a retryable IO error kind",
|
||||
io_err.kind()
|
||||
);
|
||||
}
|
||||
|
||||
trace!("Cannot retry error: not an IO error");
|
||||
if !has_io_error {
|
||||
trace!("Cannot retry error: not an extended IO error");
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -304,7 +304,7 @@ impl CachedClient {
|
|||
.await?
|
||||
} else {
|
||||
debug!("No cache entry for: {}", req.url());
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -318,8 +318,13 @@ impl CachedClient {
|
|||
"Broken fresh cache entry (for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
.await
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
}
|
||||
},
|
||||
CachedResponse::NotModified { cached, new_policy } => {
|
||||
|
|
@ -339,8 +344,13 @@ impl CachedClient {
|
|||
(for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
.await
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -355,8 +365,13 @@ impl CachedClient {
|
|||
// ETag didn't match). We need to make a fresh request.
|
||||
if response.status() == http::StatusCode::NOT_MODIFIED {
|
||||
warn!("Server returned unusable 304 for: {}", fresh_req.url());
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
.await
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
self.run_response_callback(
|
||||
cache_entry,
|
||||
|
|
@ -379,9 +394,10 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
|
||||
let payload = self
|
||||
.run_response_callback(cache_entry, cache_policy, response, async |resp| {
|
||||
|
|
@ -401,10 +417,11 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let _ = fs_err::tokio::remove_file(&cache_entry.path()).await;
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
self.run_response_callback(cache_entry, cache_policy, response, response_callback)
|
||||
.await
|
||||
}
|
||||
|
|
@ -476,20 +493,13 @@ impl CachedClient {
|
|||
) -> Result<CachedResponse, Error> {
|
||||
// Apply the cache control header, if necessary.
|
||||
match cache_control {
|
||||
CacheControl::None | CacheControl::AllowStale => {}
|
||||
CacheControl::None | CacheControl::AllowStale | CacheControl::Override(..) => {}
|
||||
CacheControl::MustRevalidate => {
|
||||
req.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_static("no-cache"),
|
||||
);
|
||||
}
|
||||
CacheControl::Override(value) => {
|
||||
req.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(value)
|
||||
.map_err(|_| ErrorKind::InvalidCacheControl(value.to_string()))?,
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(match cached.cache_policy.before_request(&mut req) {
|
||||
BeforeRequest::Fresh => {
|
||||
|
|
@ -499,8 +509,13 @@ impl CachedClient {
|
|||
BeforeRequest::Stale(new_cache_policy_builder) => match cache_control {
|
||||
CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => {
|
||||
debug!("Found stale response for: {}", req.url());
|
||||
self.send_cached_handle_stale(req, cached, new_cache_policy_builder)
|
||||
.await?
|
||||
self.send_cached_handle_stale(
|
||||
req,
|
||||
cache_control,
|
||||
cached,
|
||||
new_cache_policy_builder,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
CacheControl::AllowStale => {
|
||||
debug!("Found stale (but allowed) response for: {}", req.url());
|
||||
|
|
@ -513,7 +528,7 @@ impl CachedClient {
|
|||
"Cached request doesn't match current request for: {}",
|
||||
req.url()
|
||||
);
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -525,12 +540,13 @@ impl CachedClient {
|
|||
async fn send_cached_handle_stale(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
cached: DataWithCachePolicy,
|
||||
new_cache_policy_builder: CachePolicyBuilder,
|
||||
) -> Result<CachedResponse, Error> {
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
debug!("Sending revalidation request for: {url}");
|
||||
let response = self
|
||||
let mut response = self
|
||||
.0
|
||||
.execute(req)
|
||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||
|
|
@ -538,6 +554,16 @@ impl CachedClient {
|
|||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||
.error_for_status()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
match cached
|
||||
.cache_policy
|
||||
.after_response(new_cache_policy_builder, &response)
|
||||
|
|
@ -566,16 +592,26 @@ impl CachedClient {
|
|||
async fn fresh_request(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
trace!("Sending fresh {} request for {}", req.method(), url);
|
||||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||
let response = self
|
||||
let mut response = self
|
||||
.0
|
||||
.execute(req)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
let retry_count = response
|
||||
.extensions()
|
||||
.get::<reqwest_retry::RetryCount>()
|
||||
|
|
@ -690,6 +726,7 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let mut past_retries = 0;
|
||||
|
|
@ -698,7 +735,7 @@ impl CachedClient {
|
|||
loop {
|
||||
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
||||
let result = self
|
||||
.skip_cache(fresh_req, cache_entry, &response_callback)
|
||||
.skip_cache(fresh_req, cache_entry, cache_control, &response_callback)
|
||||
.await;
|
||||
|
||||
// Check if the middleware already performed retries
|
||||
|
|
|
|||
|
|
@ -126,6 +126,14 @@ impl<'a> RegistryClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn built_in_root_certs(mut self, built_in_root_certs: bool) -> Self {
|
||||
self.base_client_builder = self
|
||||
.base_client_builder
|
||||
.built_in_root_certs(built_in_root_certs);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn cache(mut self, cache: Cache) -> Self {
|
||||
self.cache = cache;
|
||||
|
|
|
|||
|
|
@ -186,6 +186,18 @@ impl DependencyGroupsInner {
|
|||
self.include.names().chain(&self.exclude)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all groups that are included in the specification,
|
||||
/// assuming `all_names` is an iterator over all groups.
|
||||
pub fn group_names<'a, Names>(
|
||||
&'a self,
|
||||
all_names: Names,
|
||||
) -> impl Iterator<Item = &'a GroupName> + 'a
|
||||
where
|
||||
Names: Iterator<Item = &'a GroupName> + 'a,
|
||||
{
|
||||
all_names.filter(move |name| self.contains(name))
|
||||
}
|
||||
|
||||
/// Iterate over all groups the user explicitly asked for on the CLI
|
||||
pub fn explicit_names(&self) -> impl Iterator<Item = &GroupName> {
|
||||
let DependencyGroupsHistory {
|
||||
|
|
|
|||
|
|
@ -155,7 +155,8 @@ impl ExtrasSpecificationInner {
|
|||
self.include.names().chain(&self.exclude)
|
||||
}
|
||||
|
||||
/// Returns `true` if the specification includes the given extra.
|
||||
/// Returns an iterator over all extras that are included in the specification,
|
||||
/// assuming `all_names` is an iterator over all extras.
|
||||
pub fn extra_names<'a, Names>(
|
||||
&'a self,
|
||||
all_names: Names,
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use crate::ROOT_DIR;
|
|||
use crate::generate_all::Mode;
|
||||
|
||||
/// Contains current supported targets
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250712/cpython-unix/targets.yml";
|
||||
const TARGETS_YML_URL: &str = "https://raw.githubusercontent.com/astral-sh/python-build-standalone/refs/tags/20250723/cpython-unix/targets.yml";
|
||||
|
||||
#[derive(clap::Args)]
|
||||
pub(crate) struct Args {
|
||||
|
|
@ -130,7 +130,7 @@ async fn generate() -> Result<String> {
|
|||
output.push_str("//! DO NOT EDIT\n");
|
||||
output.push_str("//!\n");
|
||||
output.push_str("//! Generated with `cargo run dev generate-sysconfig-metadata`\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250712/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250723/cpython-unix/targets.yml>\n");
|
||||
output.push_str("//!\n");
|
||||
|
||||
// Disable clippy/fmt
|
||||
|
|
|
|||
|
|
@ -30,21 +30,20 @@ impl DependencyMetadata {
|
|||
|
||||
if let Some(version) = version {
|
||||
// If a specific version was requested, search for an exact match, then a global match.
|
||||
let metadata = versions
|
||||
let metadata = if let Some(metadata) = versions
|
||||
.iter()
|
||||
.find(|v| v.version.as_ref() == Some(version))
|
||||
.inspect(|_| {
|
||||
debug!("Found dependency metadata entry for `{package}=={version}`");
|
||||
})
|
||||
.or_else(|| versions.iter().find(|v| v.version.is_none()))
|
||||
.inspect(|_| {
|
||||
debug!("Found global metadata entry for `{package}`");
|
||||
});
|
||||
let Some(metadata) = metadata else {
|
||||
.find(|entry| entry.version.as_ref() == Some(version))
|
||||
{
|
||||
debug!("Found dependency metadata entry for `{package}=={version}`");
|
||||
metadata
|
||||
} else if let Some(metadata) = versions.iter().find(|entry| entry.version.is_none()) {
|
||||
debug!("Found global metadata entry for `{package}`");
|
||||
metadata
|
||||
} else {
|
||||
warn!("No dependency metadata entry found for `{package}=={version}`");
|
||||
return None;
|
||||
};
|
||||
debug!("Found dependency metadata entry for `{package}=={version}`");
|
||||
|
||||
Some(ResolutionMetadata {
|
||||
name: metadata.name.clone(),
|
||||
version: version.clone(),
|
||||
|
|
@ -65,6 +64,7 @@ impl DependencyMetadata {
|
|||
return None;
|
||||
};
|
||||
debug!("Found dependency metadata entry for `{package}` (assuming: `{version}`)");
|
||||
|
||||
Some(ResolutionMetadata {
|
||||
name: metadata.name.clone(),
|
||||
version,
|
||||
|
|
@ -86,7 +86,7 @@ impl DependencyMetadata {
|
|||
/// <https://packaging.python.org/specifications/core-metadata/>.
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct StaticMetadata {
|
||||
// Mandatory fields
|
||||
pub name: PackageName,
|
||||
|
|
|
|||
|
|
@ -441,6 +441,26 @@ impl<'a> IndexLocations {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the Simple API cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn simple_api_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.api.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
/// Return the artifact cache control header for an [`IndexUrl`], if configured.
|
||||
pub fn artifact_cache_control_for(&self, url: &IndexUrl) -> Option<&str> {
|
||||
for index in &self.indexes {
|
||||
if index.url() == url {
|
||||
return index.cache_control.as_ref()?.files.as_deref();
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&IndexLocations> for uv_auth::Indexes {
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ use uv_client::{
|
|||
};
|
||||
use uv_distribution_filename::WheelFilename;
|
||||
use uv_distribution_types::{
|
||||
BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, InstalledDist, Name, SourceDist,
|
||||
BuildableSource, BuiltDist, Dist, HashPolicy, Hashed, IndexUrl, InstalledDist, Name, SourceDist,
|
||||
};
|
||||
use uv_extract::hash::Hasher;
|
||||
use uv_fs::write_atomic;
|
||||
|
|
@ -201,6 +201,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
match self
|
||||
.stream_wheel(
|
||||
url.clone(),
|
||||
dist.index(),
|
||||
&wheel.filename,
|
||||
wheel.file.size,
|
||||
&wheel_entry,
|
||||
|
|
@ -236,6 +237,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
let archive = self
|
||||
.download_wheel(
|
||||
url,
|
||||
dist.index(),
|
||||
&wheel.filename,
|
||||
wheel.file.size,
|
||||
&wheel_entry,
|
||||
|
|
@ -272,6 +274,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
match self
|
||||
.stream_wheel(
|
||||
wheel.url.raw().clone(),
|
||||
None,
|
||||
&wheel.filename,
|
||||
None,
|
||||
&wheel_entry,
|
||||
|
|
@ -301,6 +304,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
let archive = self
|
||||
.download_wheel(
|
||||
wheel.url.raw().clone(),
|
||||
None,
|
||||
&wheel.filename,
|
||||
None,
|
||||
&wheel_entry,
|
||||
|
|
@ -534,6 +538,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
async fn stream_wheel(
|
||||
&self,
|
||||
url: DisplaySafeUrl,
|
||||
index: Option<&IndexUrl>,
|
||||
filename: &WheelFilename,
|
||||
size: Option<u64>,
|
||||
wheel_entry: &CacheEntry,
|
||||
|
|
@ -616,13 +621,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
// Fetch the archive from the cache, or download it if necessary.
|
||||
let req = self.request(url.clone())?;
|
||||
|
||||
// Determine the cache control policy for the URL.
|
||||
let cache_control = match self.client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -654,7 +670,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.skip_cache_with_retry(
|
||||
self.request(url)?,
|
||||
&http_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
|
|
@ -671,6 +692,7 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
async fn download_wheel(
|
||||
&self,
|
||||
url: DisplaySafeUrl,
|
||||
index: Option<&IndexUrl>,
|
||||
filename: &WheelFilename,
|
||||
size: Option<u64>,
|
||||
wheel_entry: &CacheEntry,
|
||||
|
|
@ -783,13 +805,24 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
// Fetch the archive from the cache, or download it if necessary.
|
||||
let req = self.request(url.clone())?;
|
||||
|
||||
// Determine the cache control policy for the URL.
|
||||
let cache_control = match self.client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&http_entry, Some(&filename.name), None)
|
||||
.map_err(Error::CacheRead)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -821,7 +854,12 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> {
|
|||
.managed(async |client| {
|
||||
client
|
||||
.cached_client()
|
||||
.skip_cache_with_retry(self.request(url)?, &http_entry, download)
|
||||
.skip_cache_with_retry(
|
||||
self.request(url)?,
|
||||
&http_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
|
|
|
|||
|
|
@ -618,14 +618,13 @@ mod test {
|
|||
tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
|
||||
"#};
|
||||
|
||||
assert_snapshot!(format_err(input).await, @r###"
|
||||
error: TOML parse error at line 8, column 16
|
||||
assert_snapshot!(format_err(input).await, @r#"
|
||||
error: TOML parse error at line 8, column 28
|
||||
|
|
||||
8 | tqdm = { url = invalid url to tqdm-4.66.0-py3-none-any.whl" }
|
||||
| ^
|
||||
invalid string
|
||||
expected `"`, `'`
|
||||
"###);
|
||||
| ^
|
||||
missing comma between key-value pairs, expected `,`
|
||||
"#);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ use uv_client::{
|
|||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||
use uv_distribution_filename::{SourceDistExtension, WheelFilename};
|
||||
use uv_distribution_types::{
|
||||
BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, PathSourceUrl,
|
||||
BuildableSource, DirectorySourceUrl, GitSourceUrl, HashPolicy, Hashed, IndexUrl, PathSourceUrl,
|
||||
SourceDist, SourceUrl,
|
||||
};
|
||||
use uv_extract::hash::Hasher;
|
||||
|
|
@ -148,6 +148,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
&url,
|
||||
Some(&dist.index),
|
||||
&cache_shard,
|
||||
None,
|
||||
dist.ext,
|
||||
|
|
@ -168,6 +169,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
&dist.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
dist.subdirectory.as_deref(),
|
||||
dist.ext,
|
||||
|
|
@ -213,6 +215,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url(
|
||||
source,
|
||||
resource.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
resource.subdirectory,
|
||||
resource.ext,
|
||||
|
|
@ -288,9 +291,18 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.await;
|
||||
}
|
||||
|
||||
self.url_metadata(source, &url, &cache_shard, None, dist.ext, hashes, client)
|
||||
.boxed_local()
|
||||
.await?
|
||||
self.url_metadata(
|
||||
source,
|
||||
&url,
|
||||
Some(&dist.index),
|
||||
&cache_shard,
|
||||
None,
|
||||
dist.ext,
|
||||
hashes,
|
||||
client,
|
||||
)
|
||||
.boxed_local()
|
||||
.await?
|
||||
}
|
||||
BuildableSource::Dist(SourceDist::DirectUrl(dist)) => {
|
||||
// For direct URLs, cache directly under the hash of the URL itself.
|
||||
|
|
@ -302,6 +314,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url_metadata(
|
||||
source,
|
||||
&dist.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
dist.subdirectory.as_deref(),
|
||||
dist.ext,
|
||||
|
|
@ -340,6 +353,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
self.url_metadata(
|
||||
source,
|
||||
resource.url,
|
||||
None,
|
||||
&cache_shard,
|
||||
resource.subdirectory,
|
||||
resource.ext,
|
||||
|
|
@ -409,6 +423,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'data>,
|
||||
url: &'data DisplaySafeUrl,
|
||||
index: Option<&'data IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
ext: SourceDistExtension,
|
||||
|
|
@ -420,7 +435,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
.url_revision(source, ext, url, cache_shard, hashes, client)
|
||||
.url_revision(source, ext, url, index, cache_shard, hashes, client)
|
||||
.await?;
|
||||
|
||||
// Before running the build, check that the hashes match.
|
||||
|
|
@ -463,6 +478,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source,
|
||||
ext,
|
||||
url,
|
||||
index,
|
||||
&source_dist_entry,
|
||||
revision,
|
||||
hashes,
|
||||
|
|
@ -526,6 +542,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
&self,
|
||||
source: &BuildableSource<'data>,
|
||||
url: &'data Url,
|
||||
index: Option<&'data IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
subdirectory: Option<&'data Path>,
|
||||
ext: SourceDistExtension,
|
||||
|
|
@ -536,7 +553,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
|
||||
// Fetch the revision for the source distribution.
|
||||
let revision = self
|
||||
.url_revision(source, ext, url, cache_shard, hashes, client)
|
||||
.url_revision(source, ext, url, index, cache_shard, hashes, client)
|
||||
.await?;
|
||||
|
||||
// Before running the build, check that the hashes match.
|
||||
|
|
@ -593,6 +610,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source,
|
||||
ext,
|
||||
url,
|
||||
index,
|
||||
&source_dist_entry,
|
||||
revision,
|
||||
hashes,
|
||||
|
|
@ -705,18 +723,31 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
ext: SourceDistExtension,
|
||||
url: &Url,
|
||||
index: Option<&IndexUrl>,
|
||||
cache_shard: &CacheShard,
|
||||
hashes: HashPolicy<'_>,
|
||||
client: &ManagedClient<'_>,
|
||||
) -> Result<Revision, Error> {
|
||||
let cache_entry = cache_shard.entry(HTTP_REVISION);
|
||||
|
||||
// Determine the cache control policy for the request.
|
||||
let cache_control = match client.unmanaged.connectivity() {
|
||||
Connectivity::Online => CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&cache_entry, source.name(), source.source_tree())
|
||||
.map_err(Error::CacheRead)?,
|
||||
),
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&cache_entry, source.name(), source.source_tree())
|
||||
.map_err(Error::CacheRead)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
|
|
@ -766,6 +797,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.skip_cache_with_retry(
|
||||
Self::request(DisplaySafeUrl::from(url.clone()), client)?,
|
||||
&cache_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
|
|
@ -2078,6 +2110,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
source: &BuildableSource<'_>,
|
||||
ext: SourceDistExtension,
|
||||
url: &Url,
|
||||
index: Option<&IndexUrl>,
|
||||
entry: &CacheEntry,
|
||||
revision: Revision,
|
||||
hashes: HashPolicy<'_>,
|
||||
|
|
@ -2085,6 +2118,28 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
) -> Result<Revision, Error> {
|
||||
warn!("Re-downloading missing source distribution: {source}");
|
||||
let cache_entry = entry.shard().entry(HTTP_REVISION);
|
||||
|
||||
// Determine the cache control policy for the request.
|
||||
let cache_control = match client.unmanaged.connectivity() {
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = index.and_then(|index| {
|
||||
self.build_context
|
||||
.locations()
|
||||
.artifact_cache_control_for(index)
|
||||
}) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.build_context
|
||||
.cache()
|
||||
.freshness(&cache_entry, source.name(), source.source_tree())
|
||||
.map_err(Error::CacheRead)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
let download = |response| {
|
||||
async {
|
||||
// Take the union of the requested and existing hash algorithms.
|
||||
|
|
@ -2118,6 +2173,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
|
|||
.skip_cache_with_retry(
|
||||
Self::request(DisplaySafeUrl::from(url.clone()), client)?,
|
||||
&cache_entry,
|
||||
cache_control,
|
||||
download,
|
||||
)
|
||||
.await
|
||||
|
|
|
|||
|
|
@ -236,6 +236,7 @@ pub async fn untar_gz<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
Ok(untar_in(archive, target.as_ref()).await?)
|
||||
}
|
||||
|
|
@ -255,6 +256,7 @@ pub async fn untar_bz2<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
Ok(untar_in(archive, target.as_ref()).await?)
|
||||
}
|
||||
|
|
@ -274,6 +276,7 @@ pub async fn untar_zst<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
Ok(untar_in(archive, target.as_ref()).await?)
|
||||
}
|
||||
|
|
@ -293,6 +296,7 @@ pub async fn untar_xz<R: tokio::io::AsyncRead + Unpin>(
|
|||
)
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
untar_in(archive, target.as_ref()).await?;
|
||||
Ok(())
|
||||
|
|
@ -311,6 +315,7 @@ pub async fn untar<R: tokio::io::AsyncRead + Unpin>(
|
|||
tokio_tar::ArchiveBuilder::new(&mut reader as &mut (dyn tokio::io::AsyncRead + Unpin))
|
||||
.set_preserve_mtime(false)
|
||||
.set_preserve_permissions(false)
|
||||
.set_allow_external_symlinks(false)
|
||||
.build();
|
||||
untar_in(archive, target.as_ref()).await?;
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -84,6 +84,8 @@ pub async fn read_to_string_transcode(path: impl AsRef<Path>) -> std::io::Result
|
|||
/// junction at the same path.
|
||||
///
|
||||
/// Note that because junctions are used, the source must be a directory.
|
||||
///
|
||||
/// Changes to this function should be reflected in [`create_symlink`].
|
||||
#[cfg(windows)]
|
||||
pub fn replace_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
// If the source is a file, we can't create a junction
|
||||
|
|
@ -138,6 +140,38 @@ pub fn replace_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io:
|
|||
}
|
||||
}
|
||||
|
||||
/// Create a symlink at `dst` pointing to `src`.
|
||||
///
|
||||
/// On Windows, this uses the `junction` crate to create a junction point.
|
||||
///
|
||||
/// Note that because junctions are used, the source must be a directory.
|
||||
///
|
||||
/// Changes to this function should be reflected in [`replace_symlink`].
|
||||
#[cfg(windows)]
|
||||
pub fn create_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
// If the source is a file, we can't create a junction
|
||||
if src.as_ref().is_file() {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
format!(
|
||||
"Cannot create a junction for {}: is not a directory",
|
||||
src.as_ref().display()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
junction::create(
|
||||
dunce::simplified(src.as_ref()),
|
||||
dunce::simplified(dst.as_ref()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a symlink at `dst` pointing to `src`.
|
||||
#[cfg(unix)]
|
||||
pub fn create_symlink(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
fs_err::os::unix::fs::symlink(src.as_ref(), dst.as_ref())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
pub fn remove_symlink(path: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
fs_err::remove_file(path.as_ref())
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
|
|
@ -98,17 +98,6 @@ pub struct PipGroupName {
|
|||
pub name: GroupName,
|
||||
}
|
||||
|
||||
impl PipGroupName {
|
||||
/// Gets the path to use, applying the default if it's missing
|
||||
pub fn path(&self) -> &Path {
|
||||
if let Some(path) = &self.path {
|
||||
path
|
||||
} else {
|
||||
Path::new("pyproject.toml")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PipGroupName {
|
||||
type Err = InvalidPipGroupError;
|
||||
|
||||
|
|
|
|||
|
|
@ -69,12 +69,20 @@ impl Display for OptionEntry {
|
|||
///
|
||||
/// It extracts the options by calling the [`OptionsMetadata::record`] of a type implementing
|
||||
/// [`OptionsMetadata`].
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct OptionSet {
|
||||
record: fn(&mut dyn Visit),
|
||||
doc: fn() -> Option<&'static str>,
|
||||
}
|
||||
|
||||
impl PartialEq for OptionSet {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
std::ptr::fn_addr_eq(self.record, other.record) && std::ptr::fn_addr_eq(self.doc, other.doc)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for OptionSet {}
|
||||
|
||||
impl OptionSet {
|
||||
pub fn of<T>() -> Self
|
||||
where
|
||||
|
|
|
|||
|
|
@ -33,8 +33,8 @@ pub use marker::{
|
|||
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
ContainsMarkerTree, ExtraMarkerTree, ExtraOperator, InMarkerTree, MarkerEnvironment,
|
||||
MarkerEnvironmentBuilder, MarkerExpression, MarkerOperator, MarkerTree, MarkerTreeContents,
|
||||
MarkerTreeKind, MarkerValue, MarkerValueExtra, MarkerValueString, MarkerValueVersion,
|
||||
MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
MarkerTreeKind, MarkerValue, MarkerValueExtra, MarkerValueList, MarkerValueString,
|
||||
MarkerValueVersion, MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
};
|
||||
pub use origin::RequirementOrigin;
|
||||
#[cfg(feature = "non-pep508-extensions")]
|
||||
|
|
|
|||
|
|
@ -59,8 +59,10 @@ use uv_pep440::{Operator, Version, VersionSpecifier, release_specifier_to_range}
|
|||
|
||||
use crate::marker::MarkerValueExtra;
|
||||
use crate::marker::lowering::{
|
||||
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
CanonicalMarkerListPair, CanonicalMarkerValueExtra, CanonicalMarkerValueString,
|
||||
CanonicalMarkerValueVersion,
|
||||
};
|
||||
use crate::marker::tree::ContainerOperator;
|
||||
use crate::{
|
||||
ExtraOperator, MarkerExpression, MarkerOperator, MarkerValueString, MarkerValueVersion,
|
||||
};
|
||||
|
|
@ -186,19 +188,19 @@ impl InternerGuard<'_> {
|
|||
MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
} => match key {
|
||||
MarkerValueVersion::ImplementationVersion => (
|
||||
Variable::Version(CanonicalMarkerValueVersion::ImplementationVersion),
|
||||
Edges::from_versions(&versions, negated),
|
||||
Edges::from_versions(&versions, operator),
|
||||
),
|
||||
MarkerValueVersion::PythonFullVersion => (
|
||||
Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion),
|
||||
Edges::from_versions(&versions, negated),
|
||||
Edges::from_versions(&versions, operator),
|
||||
),
|
||||
// Normalize `python_version` markers to `python_full_version` nodes.
|
||||
MarkerValueVersion::PythonVersion => {
|
||||
match Edges::from_python_versions(versions, negated) {
|
||||
match Edges::from_python_versions(versions, operator) {
|
||||
Ok(edges) => (
|
||||
Variable::Version(CanonicalMarkerValueVersion::PythonFullVersion),
|
||||
edges,
|
||||
|
|
@ -313,6 +315,10 @@ impl InternerGuard<'_> {
|
|||
};
|
||||
(Variable::String(key), Edges::from_string(operator, value))
|
||||
}
|
||||
MarkerExpression::List { pair, operator } => (
|
||||
Variable::List(pair),
|
||||
Edges::from_bool(operator == ContainerOperator::In),
|
||||
),
|
||||
// A variable representing the existence or absence of a particular extra.
|
||||
MarkerExpression::Extra {
|
||||
name: MarkerValueExtra::Extra(extra),
|
||||
|
|
@ -328,7 +334,7 @@ impl InternerGuard<'_> {
|
|||
Variable::Extra(CanonicalMarkerValueExtra::Extra(extra)),
|
||||
Edges::from_bool(false),
|
||||
),
|
||||
// Invalid extras are always `false`.
|
||||
// Invalid `extra` names are always `false`.
|
||||
MarkerExpression::Extra {
|
||||
name: MarkerValueExtra::Arbitrary(_),
|
||||
..
|
||||
|
|
@ -1046,6 +1052,12 @@ pub(crate) enum Variable {
|
|||
/// We keep extras at the leaves of the tree, so when simplifying extras we can
|
||||
/// trivially remove the leaves without having to reconstruct the entire tree.
|
||||
Extra(CanonicalMarkerValueExtra),
|
||||
/// A variable representing whether a `<value> in <key>` or `<value> not in <key>`
|
||||
/// expression, where the key is a list.
|
||||
///
|
||||
/// We keep extras and groups at the leaves of the tree, so when simplifying extras we can
|
||||
/// trivially remove the leaves without having to reconstruct the entire tree.
|
||||
List(CanonicalMarkerListPair),
|
||||
}
|
||||
|
||||
impl Variable {
|
||||
|
|
@ -1223,7 +1235,10 @@ impl Edges {
|
|||
/// Returns an [`Edges`] where values in the given range are `true`.
|
||||
///
|
||||
/// Only for use when the `key` is a `PythonVersion`. Normalizes to `PythonFullVersion`.
|
||||
fn from_python_versions(versions: Vec<Version>, negated: bool) -> Result<Edges, NodeId> {
|
||||
fn from_python_versions(
|
||||
versions: Vec<Version>,
|
||||
operator: ContainerOperator,
|
||||
) -> Result<Edges, NodeId> {
|
||||
let mut range: Ranges<Version> = versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
|
|
@ -1234,7 +1249,7 @@ impl Edges {
|
|||
.flatten_ok()
|
||||
.collect::<Result<Ranges<_>, NodeId>>()?;
|
||||
|
||||
if negated {
|
||||
if operator == ContainerOperator::NotIn {
|
||||
range = range.complement();
|
||||
}
|
||||
|
||||
|
|
@ -1244,7 +1259,7 @@ impl Edges {
|
|||
}
|
||||
|
||||
/// Returns an [`Edges`] where values in the given range are `true`.
|
||||
fn from_versions(versions: &[Version], negated: bool) -> Edges {
|
||||
fn from_versions(versions: &[Version], operator: ContainerOperator) -> Edges {
|
||||
let mut range: Ranges<Version> = versions
|
||||
.iter()
|
||||
.map(|version| {
|
||||
|
|
@ -1255,7 +1270,7 @@ impl Edges {
|
|||
})
|
||||
.collect();
|
||||
|
||||
if negated {
|
||||
if operator == ContainerOperator::NotIn {
|
||||
range = range.complement();
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_normalize::{ExtraName, GroupName};
|
||||
|
||||
use crate::marker::tree::MarkerValueList;
|
||||
use crate::{MarkerValueExtra, MarkerValueString, MarkerValueVersion};
|
||||
|
||||
/// Those environment markers with a PEP 440 version as value such as `python_version`
|
||||
|
|
@ -128,7 +129,7 @@ impl Display for CanonicalMarkerValueString {
|
|||
}
|
||||
}
|
||||
|
||||
/// The [`ExtraName`] value used in `extra` markers.
|
||||
/// The [`ExtraName`] value used in `extra` and `extras` markers.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum CanonicalMarkerValueExtra {
|
||||
/// A valid [`ExtraName`].
|
||||
|
|
@ -159,3 +160,36 @@ impl Display for CanonicalMarkerValueExtra {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A key-value pair for `<value> in <key>` or `<value> not in <key>`, where the key is a list.
|
||||
///
|
||||
/// Used for PEP 751 markers.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum CanonicalMarkerListPair {
|
||||
/// A valid [`ExtraName`].
|
||||
Extras(ExtraName),
|
||||
/// A valid [`GroupName`].
|
||||
DependencyGroup(GroupName),
|
||||
/// For leniency, preserve invalid values.
|
||||
Arbitrary { key: MarkerValueList, value: String },
|
||||
}
|
||||
|
||||
impl CanonicalMarkerListPair {
|
||||
/// The key (RHS) of the marker expression.
|
||||
pub(crate) fn key(&self) -> MarkerValueList {
|
||||
match self {
|
||||
Self::Extras(_) => MarkerValueList::Extras,
|
||||
Self::DependencyGroup(_) => MarkerValueList::DependencyGroups,
|
||||
Self::Arbitrary { key, .. } => *key,
|
||||
}
|
||||
}
|
||||
|
||||
/// The value (LHS) of the marker expression.
|
||||
pub(crate) fn value(&self) -> String {
|
||||
match self {
|
||||
Self::Extras(extra) => extra.to_string(),
|
||||
Self::DependencyGroup(group) => group.to_string(),
|
||||
Self::Arbitrary { value, .. } => value.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,8 +23,8 @@ pub use lowering::{
|
|||
pub use tree::{
|
||||
ContainsMarkerTree, ExtraMarkerTree, ExtraOperator, InMarkerTree, MarkerExpression,
|
||||
MarkerOperator, MarkerTree, MarkerTreeContents, MarkerTreeDebugGraph, MarkerTreeKind,
|
||||
MarkerValue, MarkerValueExtra, MarkerValueString, MarkerValueVersion, MarkerWarningKind,
|
||||
StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
MarkerValue, MarkerValueExtra, MarkerValueList, MarkerValueString, MarkerValueVersion,
|
||||
MarkerWarningKind, StringMarkerTree, StringVersion, VersionMarkerTree,
|
||||
};
|
||||
|
||||
/// `serde` helpers for [`MarkerTree`].
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
use arcstr::ArcStr;
|
||||
use std::str::FromStr;
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_normalize::{ExtraName, GroupName};
|
||||
use uv_pep440::{Version, VersionPattern, VersionSpecifier};
|
||||
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::MarkerValueExtra;
|
||||
use crate::marker::lowering::CanonicalMarkerListPair;
|
||||
use crate::marker::tree::{ContainerOperator, MarkerValueList};
|
||||
use crate::{
|
||||
ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerValue, MarkerValueString,
|
||||
MarkerValueVersion, MarkerWarningKind, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter,
|
||||
|
|
@ -168,6 +170,7 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
reporter: &mut impl Reporter,
|
||||
) -> Result<Option<MarkerExpression>, Pep508Error<T>> {
|
||||
cursor.eat_whitespace();
|
||||
let start = cursor.pos();
|
||||
let l_value = parse_marker_value(cursor, reporter)?;
|
||||
cursor.eat_whitespace();
|
||||
// "not in" and "in" must be preceded by whitespace. We must already have matched a whitespace
|
||||
|
|
@ -176,6 +179,7 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
let operator = parse_marker_operator(cursor)?;
|
||||
cursor.eat_whitespace();
|
||||
let r_value = parse_marker_value(cursor, reporter)?;
|
||||
let len = cursor.pos() - start;
|
||||
|
||||
// Convert a `<marker_value> <marker_op> <marker_value>` expression into its
|
||||
// typed equivalent.
|
||||
|
|
@ -209,7 +213,8 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
let value = match r_value {
|
||||
MarkerValue::Extra
|
||||
| MarkerValue::MarkerEnvVersion(_)
|
||||
| MarkerValue::MarkerEnvString(_) => {
|
||||
| MarkerValue::MarkerEnvString(_)
|
||||
| MarkerValue::MarkerEnvList(_) => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::MarkerMarkerComparison,
|
||||
"Comparing two markers with each other doesn't make any sense,
|
||||
|
|
@ -237,11 +242,23 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
value,
|
||||
})
|
||||
}
|
||||
// `extras in "test"` or `dependency_groups not in "dev"` are invalid.
|
||||
MarkerValue::MarkerEnvList(key) => {
|
||||
return Err(Pep508Error {
|
||||
message: Pep508ErrorSource::String(format!(
|
||||
"The marker {key} must be on the right hand side of the expression"
|
||||
)),
|
||||
start,
|
||||
len,
|
||||
input: cursor.to_string(),
|
||||
});
|
||||
}
|
||||
// `extra == '...'`
|
||||
MarkerValue::Extra => {
|
||||
let value = match r_value {
|
||||
MarkerValue::MarkerEnvVersion(_)
|
||||
| MarkerValue::MarkerEnvString(_)
|
||||
| MarkerValue::MarkerEnvList(_)
|
||||
| MarkerValue::Extra => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtraInvalidComparison,
|
||||
|
|
@ -257,7 +274,7 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
|
||||
parse_extra_expr(operator, &value, reporter)
|
||||
}
|
||||
// This is either MarkerEnvVersion, MarkerEnvString or Extra inverted
|
||||
// This is either MarkerEnvVersion, MarkerEnvString, Extra (inverted), or Extras
|
||||
MarkerValue::QuotedString(l_string) => {
|
||||
match r_value {
|
||||
// The only sound choice for this is `<quoted PEP 440 version> <version op>` <version key>
|
||||
|
|
@ -271,6 +288,54 @@ pub(crate) fn parse_marker_key_op_value<T: Pep508Url>(
|
|||
operator: operator.invert(),
|
||||
value: l_string,
|
||||
}),
|
||||
// `"test" in extras` or `"dev" in dependency_groups`
|
||||
MarkerValue::MarkerEnvList(key) => {
|
||||
let operator =
|
||||
ContainerOperator::from_marker_operator(operator).ok_or_else(|| {
|
||||
Pep508Error {
|
||||
message: Pep508ErrorSource::String(format!(
|
||||
"The operator {operator} is not supported with the marker {key}, only the `in` and `not in` operators are supported"
|
||||
)),
|
||||
start,
|
||||
len,
|
||||
input: cursor.to_string(),
|
||||
}
|
||||
})?;
|
||||
let pair = match key {
|
||||
// `'...' in extras`
|
||||
MarkerValueList::Extras => match ExtraName::from_str(&l_string) {
|
||||
Ok(name) => CanonicalMarkerListPair::Extras(name),
|
||||
Err(err) => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtrasInvalidComparison,
|
||||
format!("Expected extra name (found `{l_string}`): {err}"),
|
||||
);
|
||||
CanonicalMarkerListPair::Arbitrary {
|
||||
key,
|
||||
value: l_string.to_string(),
|
||||
}
|
||||
}
|
||||
},
|
||||
// `'...' in dependency_groups`
|
||||
MarkerValueList::DependencyGroups => {
|
||||
match GroupName::from_str(&l_string) {
|
||||
Ok(name) => CanonicalMarkerListPair::DependencyGroup(name),
|
||||
Err(err) => {
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtrasInvalidComparison,
|
||||
format!("Expected dependency group name (found `{l_string}`): {err}"),
|
||||
);
|
||||
CanonicalMarkerListPair::Arbitrary {
|
||||
key,
|
||||
value: l_string.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Some(MarkerExpression::List { pair, operator })
|
||||
}
|
||||
// `'...' == extra`
|
||||
MarkerValue::Extra => parse_extra_expr(operator, &l_string, reporter),
|
||||
// `'...' == '...'`, doesn't make much sense
|
||||
|
|
@ -319,10 +384,7 @@ fn parse_version_in_expr(
|
|||
value: &str,
|
||||
reporter: &mut impl Reporter,
|
||||
) -> Option<MarkerExpression> {
|
||||
if !matches!(operator, MarkerOperator::In | MarkerOperator::NotIn) {
|
||||
return None;
|
||||
}
|
||||
let negated = matches!(operator, MarkerOperator::NotIn);
|
||||
let operator = ContainerOperator::from_marker_operator(operator)?;
|
||||
|
||||
let mut cursor = Cursor::new(value);
|
||||
let mut versions = Vec::new();
|
||||
|
|
@ -358,7 +420,7 @@ fn parse_version_in_expr(
|
|||
Some(MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -491,8 +553,7 @@ fn parse_extra_expr(
|
|||
|
||||
reporter.report(
|
||||
MarkerWarningKind::ExtraInvalidComparison,
|
||||
"Comparing extra with something other than a quoted string is wrong,
|
||||
will be ignored"
|
||||
"Comparing `extra` with any operator other than `==` or `!=` is wrong and will be ignored"
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ use version_ranges::Ranges;
|
|||
|
||||
use uv_pep440::{Version, VersionSpecifier};
|
||||
|
||||
use crate::marker::tree::ContainerOperator;
|
||||
use crate::{ExtraOperator, MarkerExpression, MarkerOperator, MarkerTree, MarkerTreeKind};
|
||||
|
||||
/// Returns a simplified DNF expression for a given marker tree.
|
||||
|
|
@ -161,6 +162,22 @@ fn collect_dnf(
|
|||
path.pop();
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
for (is_high, tree) in marker.children() {
|
||||
let expr = MarkerExpression::List {
|
||||
pair: marker.pair().clone(),
|
||||
operator: if is_high {
|
||||
ContainerOperator::In
|
||||
} else {
|
||||
ContainerOperator::NotIn
|
||||
},
|
||||
};
|
||||
|
||||
path.push(expr);
|
||||
collect_dnf(tree, dnf, path);
|
||||
path.pop();
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::Extra(marker) => {
|
||||
for (value, tree) in marker.children() {
|
||||
let operator = if value {
|
||||
|
|
@ -396,18 +413,18 @@ fn is_negation(left: &MarkerExpression, right: &MarkerExpression) -> bool {
|
|||
MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
} => {
|
||||
let MarkerExpression::VersionIn {
|
||||
key: key2,
|
||||
versions: versions2,
|
||||
negated: negated2,
|
||||
operator: operator2,
|
||||
} = right
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
key == key2 && versions == versions2 && negated != negated2
|
||||
key == key2 && versions == versions2 && operator != operator2
|
||||
}
|
||||
MarkerExpression::String {
|
||||
key,
|
||||
|
|
@ -440,5 +457,16 @@ fn is_negation(left: &MarkerExpression, right: &MarkerExpression) -> bool {
|
|||
|
||||
name == name2 && operator.negate() == *operator2
|
||||
}
|
||||
MarkerExpression::List { pair, operator } => {
|
||||
let MarkerExpression::List {
|
||||
pair: pair2,
|
||||
operator: operator2,
|
||||
} = right
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
pair == pair2 && operator != operator2
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,18 +9,19 @@ use itertools::Itertools;
|
|||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
|
||||
use version_ranges::Ranges;
|
||||
|
||||
use uv_normalize::ExtraName;
|
||||
use uv_normalize::{ExtraName, GroupName};
|
||||
use uv_pep440::{Version, VersionParseError, VersionSpecifier};
|
||||
|
||||
use super::algebra::{Edges, INTERNER, NodeId, Variable};
|
||||
use super::simplify;
|
||||
use crate::cursor::Cursor;
|
||||
use crate::marker::lowering::{
|
||||
CanonicalMarkerValueExtra, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
CanonicalMarkerListPair, CanonicalMarkerValueString, CanonicalMarkerValueVersion,
|
||||
};
|
||||
use crate::marker::parse;
|
||||
use crate::{
|
||||
MarkerEnvironment, Pep508Error, Pep508ErrorSource, Pep508Url, Reporter, TracingReporter,
|
||||
CanonicalMarkerValueExtra, MarkerEnvironment, Pep508Error, Pep508ErrorSource, Pep508Url,
|
||||
Reporter, TracingReporter,
|
||||
};
|
||||
|
||||
/// Ways in which marker evaluation can fail
|
||||
|
|
@ -32,6 +33,12 @@ pub enum MarkerWarningKind {
|
|||
/// Doing an operation other than `==` and `!=` on a quoted string with `extra`, such as
|
||||
/// `extra > "perf"` or `extra == os_name`
|
||||
ExtraInvalidComparison,
|
||||
/// Doing an operation other than `in` and `not in` on a quoted string with `extra`, such as
|
||||
/// `extras > "perf"` or `extras == os_name`
|
||||
ExtrasInvalidComparison,
|
||||
/// Doing an operation other than `in` and `not in` on a quoted string with `dependency_groups`,
|
||||
/// such as `dependency_groups > "perf"` or `dependency_groups == os_name`
|
||||
DependencyGroupsInvalidComparison,
|
||||
/// Comparing a string valued marker and a string lexicographically, such as `"3.9" > "3.10"`
|
||||
LexicographicComparison,
|
||||
/// Comparing two markers, such as `os_name != sys_implementation`
|
||||
|
|
@ -119,6 +126,26 @@ impl Display for MarkerValueString {
|
|||
}
|
||||
}
|
||||
|
||||
/// Those markers with exclusively `in` and `not in` operators.
|
||||
///
|
||||
/// Contains PEP 751 lockfile markers.
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum MarkerValueList {
|
||||
/// `extras`. This one is special because it's a list, and user-provided
|
||||
Extras,
|
||||
/// `dependency_groups`. This one is special because it's a list, and user-provided
|
||||
DependencyGroups,
|
||||
}
|
||||
|
||||
impl Display for MarkerValueList {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Extras => f.write_str("extras"),
|
||||
Self::DependencyGroups => f.write_str("dependency_groups"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// One of the predefined environment values
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/dependency-specifiers/#environment-markers>
|
||||
|
|
@ -128,7 +155,9 @@ pub enum MarkerValue {
|
|||
MarkerEnvVersion(MarkerValueVersion),
|
||||
/// Those environment markers with an arbitrary string as value such as `sys_platform`
|
||||
MarkerEnvString(MarkerValueString),
|
||||
/// `extra`. This one is special because it's a list and not env but user given
|
||||
/// Those markers with exclusively `in` and `not in` operators
|
||||
MarkerEnvList(MarkerValueList),
|
||||
/// `extra`. This one is special because it's a list, and user-provided
|
||||
Extra,
|
||||
/// Not a constant, but a user given quoted string with a value inside such as '3.8' or "windows"
|
||||
QuotedString(ArcStr),
|
||||
|
|
@ -169,6 +198,8 @@ impl FromStr for MarkerValue {
|
|||
"python_version" => Self::MarkerEnvVersion(MarkerValueVersion::PythonVersion),
|
||||
"sys_platform" => Self::MarkerEnvString(MarkerValueString::SysPlatform),
|
||||
"sys.platform" => Self::MarkerEnvString(MarkerValueString::SysPlatformDeprecated),
|
||||
"extras" => Self::MarkerEnvList(MarkerValueList::Extras),
|
||||
"dependency_groups" => Self::MarkerEnvList(MarkerValueList::DependencyGroups),
|
||||
"extra" => Self::Extra,
|
||||
_ => return Err(format!("Invalid key: {s}")),
|
||||
};
|
||||
|
|
@ -181,6 +212,7 @@ impl Display for MarkerValue {
|
|||
match self {
|
||||
Self::MarkerEnvVersion(marker_value_version) => marker_value_version.fmt(f),
|
||||
Self::MarkerEnvString(marker_value_string) => marker_value_string.fmt(f),
|
||||
Self::MarkerEnvList(marker_value_contains) => marker_value_contains.fmt(f),
|
||||
Self::Extra => f.write_str("extra"),
|
||||
Self::QuotedString(value) => write!(f, "'{value}'"),
|
||||
}
|
||||
|
|
@ -433,7 +465,7 @@ impl Deref for StringVersion {
|
|||
}
|
||||
}
|
||||
|
||||
/// The [`ExtraName`] value used in `extra` markers.
|
||||
/// The [`ExtraName`] value used in `extra` and `extras` markers.
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum MarkerValueExtra {
|
||||
/// A valid [`ExtraName`].
|
||||
|
|
@ -492,7 +524,7 @@ pub enum MarkerExpression {
|
|||
VersionIn {
|
||||
key: MarkerValueVersion,
|
||||
versions: Vec<Version>,
|
||||
negated: bool,
|
||||
operator: ContainerOperator,
|
||||
},
|
||||
/// An string marker comparison, e.g. `sys_platform == '...'`.
|
||||
///
|
||||
|
|
@ -502,10 +534,15 @@ pub enum MarkerExpression {
|
|||
operator: MarkerOperator,
|
||||
value: ArcStr,
|
||||
},
|
||||
/// `'...' in <key>`, a PEP 751 expression.
|
||||
List {
|
||||
pair: CanonicalMarkerListPair,
|
||||
operator: ContainerOperator,
|
||||
},
|
||||
/// `extra <extra op> '...'` or `'...' <extra op> extra`.
|
||||
Extra {
|
||||
operator: ExtraOperator,
|
||||
name: MarkerValueExtra,
|
||||
operator: ExtraOperator,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -514,10 +551,12 @@ pub enum MarkerExpression {
|
|||
pub(crate) enum MarkerExpressionKind {
|
||||
/// A version expression, e.g. `<version key> <version op> <quoted PEP 440 version>`.
|
||||
Version(MarkerValueVersion),
|
||||
/// A version "in" expression, e.g. `<version key> in <quoted list of PEP 440 versions>`.
|
||||
/// A version `in` expression, e.g. `<version key> in <quoted list of PEP 440 versions>`.
|
||||
VersionIn(MarkerValueVersion),
|
||||
/// A string marker comparison, e.g. `sys_platform == '...'`.
|
||||
String(MarkerValueString),
|
||||
/// A list `in` or `not in` expression, e.g. `'...' in dependency_groups`.
|
||||
List(MarkerValueList),
|
||||
/// An extra expression, e.g. `extra == '...'`.
|
||||
Extra,
|
||||
}
|
||||
|
|
@ -561,6 +600,37 @@ impl Display for ExtraOperator {
|
|||
}
|
||||
}
|
||||
|
||||
/// The operator for a container expression, either 'in' or 'not in'.
|
||||
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
|
||||
pub enum ContainerOperator {
|
||||
/// `in`
|
||||
In,
|
||||
/// `not in`
|
||||
NotIn,
|
||||
}
|
||||
|
||||
impl ContainerOperator {
|
||||
/// Creates a [`ContainerOperator`] from an equivalent [`MarkerOperator`].
|
||||
///
|
||||
/// Returns `None` if the operator is not supported for containers.
|
||||
pub(crate) fn from_marker_operator(operator: MarkerOperator) -> Option<ContainerOperator> {
|
||||
match operator {
|
||||
MarkerOperator::In => Some(ContainerOperator::In),
|
||||
MarkerOperator::NotIn => Some(ContainerOperator::NotIn),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for ContainerOperator {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(match self {
|
||||
Self::In => "in",
|
||||
Self::NotIn => "not in",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MarkerExpression {
|
||||
/// Parse a [`MarkerExpression`] from a string with the given reporter.
|
||||
pub fn parse_reporter(
|
||||
|
|
@ -599,6 +669,7 @@ impl MarkerExpression {
|
|||
MarkerExpression::Version { key, .. } => MarkerExpressionKind::Version(*key),
|
||||
MarkerExpression::VersionIn { key, .. } => MarkerExpressionKind::VersionIn(*key),
|
||||
MarkerExpression::String { key, .. } => MarkerExpressionKind::String(*key),
|
||||
MarkerExpression::List { pair, .. } => MarkerExpressionKind::List(pair.key()),
|
||||
MarkerExpression::Extra { .. } => MarkerExpressionKind::Extra,
|
||||
}
|
||||
}
|
||||
|
|
@ -618,11 +689,10 @@ impl Display for MarkerExpression {
|
|||
MarkerExpression::VersionIn {
|
||||
key,
|
||||
versions,
|
||||
negated,
|
||||
operator,
|
||||
} => {
|
||||
let op = if *negated { "not in" } else { "in" };
|
||||
let versions = versions.iter().map(ToString::to_string).join(" ");
|
||||
write!(f, "{key} {op} '{versions}'")
|
||||
write!(f, "{key} {operator} '{versions}'")
|
||||
}
|
||||
MarkerExpression::String {
|
||||
key,
|
||||
|
|
@ -638,6 +708,9 @@ impl Display for MarkerExpression {
|
|||
|
||||
write!(f, "{key} {operator} '{value}'")
|
||||
}
|
||||
MarkerExpression::List { pair, operator } => {
|
||||
write!(f, "'{}' {} {}", pair.value(), operator, pair.key())
|
||||
}
|
||||
MarkerExpression::Extra { operator, name } => {
|
||||
write!(f, "extra {operator} '{name}'")
|
||||
}
|
||||
|
|
@ -645,6 +718,51 @@ impl Display for MarkerExpression {
|
|||
}
|
||||
}
|
||||
|
||||
/// The extra and dependency group names to use when evaluating a marker tree.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
enum ExtrasEnvironment<'a> {
|
||||
/// E.g., `extra == '...'`
|
||||
Extras(&'a [ExtraName]),
|
||||
/// E.g., `'...' in extras` or `'...' in dependency_groups`
|
||||
Pep751(&'a [ExtraName], &'a [GroupName]),
|
||||
}
|
||||
|
||||
impl<'a> ExtrasEnvironment<'a> {
|
||||
/// Creates a new [`ExtrasEnvironment`] for the given `extra` names.
|
||||
fn from_extras(extras: &'a [ExtraName]) -> Self {
|
||||
Self::Extras(extras)
|
||||
}
|
||||
|
||||
/// Creates a new [`ExtrasEnvironment`] for the given PEP 751 `extras` and `dependency_groups`.
|
||||
fn from_pep751(extras: &'a [ExtraName], dependency_groups: &'a [GroupName]) -> Self {
|
||||
Self::Pep751(extras, dependency_groups)
|
||||
}
|
||||
|
||||
/// Returns the `extra` names in this environment.
|
||||
fn extra(&self) -> &[ExtraName] {
|
||||
match self {
|
||||
Self::Extras(extra) => extra,
|
||||
Self::Pep751(..) => &[],
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `extras` names in this environment, as in a PEP 751 lockfile.
|
||||
fn extras(&self) -> &[ExtraName] {
|
||||
match self {
|
||||
Self::Extras(..) => &[],
|
||||
Self::Pep751(extras, ..) => extras,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the `dependency_group` group names in this environment, as in a PEP 751 lockfile.
|
||||
fn dependency_groups(&self) -> &[GroupName] {
|
||||
match self {
|
||||
Self::Extras(..) => &[],
|
||||
Self::Pep751(.., groups) => groups,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents one or more nested marker expressions with and/or/parentheses.
|
||||
///
|
||||
/// Marker trees are canonical, meaning any two functionally equivalent markers
|
||||
|
|
@ -852,6 +970,16 @@ impl MarkerTree {
|
|||
low: low.negate(self.0),
|
||||
})
|
||||
}
|
||||
Variable::List(key) => {
|
||||
let Edges::Boolean { low, high } = node.children else {
|
||||
unreachable!()
|
||||
};
|
||||
MarkerTreeKind::List(ListMarkerTree {
|
||||
pair: key,
|
||||
high: high.negate(self.0),
|
||||
low: low.negate(self.0),
|
||||
})
|
||||
}
|
||||
Variable::Extra(name) => {
|
||||
let Edges::Boolean { low, high } = node.children else {
|
||||
unreachable!()
|
||||
|
|
@ -872,7 +1000,27 @@ impl MarkerTree {
|
|||
|
||||
/// Does this marker apply in the given environment?
|
||||
pub fn evaluate(self, env: &MarkerEnvironment, extras: &[ExtraName]) -> bool {
|
||||
self.evaluate_reporter_impl(env, extras, &mut TracingReporter)
|
||||
self.evaluate_reporter_impl(
|
||||
env,
|
||||
ExtrasEnvironment::from_extras(extras),
|
||||
&mut TracingReporter,
|
||||
)
|
||||
}
|
||||
|
||||
/// Evaluate a marker in the context of a PEP 751 lockfile, which exposes several additional
|
||||
/// markers (`extras` and `dependency_groups`) that are not available in any other context,
|
||||
/// per the spec.
|
||||
pub fn evaluate_pep751(
|
||||
self,
|
||||
env: &MarkerEnvironment,
|
||||
extras: &[ExtraName],
|
||||
groups: &[GroupName],
|
||||
) -> bool {
|
||||
self.evaluate_reporter_impl(
|
||||
env,
|
||||
ExtrasEnvironment::from_pep751(extras, groups),
|
||||
&mut TracingReporter,
|
||||
)
|
||||
}
|
||||
|
||||
/// Evaluates this marker tree against an optional environment and a
|
||||
|
|
@ -889,7 +1037,11 @@ impl MarkerTree {
|
|||
) -> bool {
|
||||
match env {
|
||||
None => self.evaluate_extras(extras),
|
||||
Some(env) => self.evaluate_reporter_impl(env, extras, &mut TracingReporter),
|
||||
Some(env) => self.evaluate_reporter_impl(
|
||||
env,
|
||||
ExtrasEnvironment::from_extras(extras),
|
||||
&mut TracingReporter,
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -901,13 +1053,13 @@ impl MarkerTree {
|
|||
extras: &[ExtraName],
|
||||
reporter: &mut impl Reporter,
|
||||
) -> bool {
|
||||
self.evaluate_reporter_impl(env, extras, reporter)
|
||||
self.evaluate_reporter_impl(env, ExtrasEnvironment::from_extras(extras), reporter)
|
||||
}
|
||||
|
||||
fn evaluate_reporter_impl(
|
||||
self,
|
||||
env: &MarkerEnvironment,
|
||||
extras: &[ExtraName],
|
||||
extras: ExtrasEnvironment,
|
||||
reporter: &mut impl Reporter,
|
||||
) -> bool {
|
||||
match self.kind() {
|
||||
|
|
@ -959,7 +1111,21 @@ impl MarkerTree {
|
|||
}
|
||||
MarkerTreeKind::Extra(marker) => {
|
||||
return marker
|
||||
.edge(extras.contains(marker.name().extra()))
|
||||
.edge(extras.extra().contains(marker.name().extra()))
|
||||
.evaluate_reporter_impl(env, extras, reporter);
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
let edge = match marker.pair() {
|
||||
CanonicalMarkerListPair::Extras(extra) => extras.extras().contains(extra),
|
||||
CanonicalMarkerListPair::DependencyGroup(dependency_group) => {
|
||||
extras.dependency_groups().contains(dependency_group)
|
||||
}
|
||||
// Invalid marker expression
|
||||
CanonicalMarkerListPair::Arbitrary { .. } => return false,
|
||||
};
|
||||
|
||||
return marker
|
||||
.edge(edge)
|
||||
.evaluate_reporter_impl(env, extras, reporter);
|
||||
}
|
||||
}
|
||||
|
|
@ -986,6 +1152,9 @@ impl MarkerTree {
|
|||
MarkerTreeKind::Contains(marker) => marker
|
||||
.children()
|
||||
.any(|(_, tree)| tree.evaluate_extras(extras)),
|
||||
MarkerTreeKind::List(marker) => marker
|
||||
.children()
|
||||
.any(|(_, tree)| tree.evaluate_extras(extras)),
|
||||
MarkerTreeKind::Extra(marker) => marker
|
||||
.edge(extras.contains(marker.name().extra()))
|
||||
.evaluate_extras(extras),
|
||||
|
|
@ -1216,6 +1385,11 @@ impl MarkerTree {
|
|||
imp(tree, f);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(kind) => {
|
||||
for (_, tree) in kind.children() {
|
||||
imp(tree, f);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::Extra(kind) => {
|
||||
if kind.low.is_false() {
|
||||
f(MarkerOperator::Equal, kind.name().extra());
|
||||
|
|
@ -1333,6 +1507,21 @@ impl MarkerTree {
|
|||
write!(f, "{} not in {} -> ", kind.value(), kind.key())?;
|
||||
kind.edge(false).fmt_graph(f, level + 1)?;
|
||||
}
|
||||
MarkerTreeKind::List(kind) => {
|
||||
writeln!(f)?;
|
||||
for _ in 0..level {
|
||||
write!(f, " ")?;
|
||||
}
|
||||
write!(f, "{} in {} -> ", kind.value(), kind.key())?;
|
||||
kind.edge(true).fmt_graph(f, level + 1)?;
|
||||
|
||||
writeln!(f)?;
|
||||
for _ in 0..level {
|
||||
write!(f, " ")?;
|
||||
}
|
||||
write!(f, "{} not in {} -> ", kind.value(), kind.key())?;
|
||||
kind.edge(false).fmt_graph(f, level + 1)?;
|
||||
}
|
||||
MarkerTreeKind::Extra(kind) => {
|
||||
writeln!(f)?;
|
||||
for _ in 0..level {
|
||||
|
|
@ -1417,7 +1606,9 @@ pub enum MarkerTreeKind<'a> {
|
|||
In(InMarkerTree<'a>),
|
||||
/// A string expression with the `contains` operator.
|
||||
Contains(ContainsMarkerTree<'a>),
|
||||
/// A string expression.
|
||||
/// A `in` or `not in` expression.
|
||||
List(ListMarkerTree<'a>),
|
||||
/// An extra expression (e.g., `extra == 'dev'`).
|
||||
Extra(ExtraMarkerTree<'a>),
|
||||
}
|
||||
|
||||
|
|
@ -1593,6 +1784,59 @@ impl Ord for ContainsMarkerTree<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct ListMarkerTree<'a> {
|
||||
// No separate canonical type, the type is already canonical.
|
||||
pair: &'a CanonicalMarkerListPair,
|
||||
high: NodeId,
|
||||
low: NodeId,
|
||||
}
|
||||
|
||||
impl ListMarkerTree<'_> {
|
||||
/// The key-value pair for this expression
|
||||
pub fn pair(&self) -> &CanonicalMarkerListPair {
|
||||
self.pair
|
||||
}
|
||||
|
||||
/// The key (RHS) for this expression.
|
||||
pub fn key(&self) -> MarkerValueList {
|
||||
self.pair.key()
|
||||
}
|
||||
|
||||
/// The value (LHS) for this expression.
|
||||
pub fn value(&self) -> String {
|
||||
self.pair.value()
|
||||
}
|
||||
|
||||
/// The edges of this node, corresponding to the boolean evaluation of the expression.
|
||||
pub fn children(&self) -> impl Iterator<Item = (bool, MarkerTree)> {
|
||||
[(true, MarkerTree(self.high)), (false, MarkerTree(self.low))].into_iter()
|
||||
}
|
||||
|
||||
/// Returns the subtree associated with the given edge value.
|
||||
pub fn edge(&self, value: bool) -> MarkerTree {
|
||||
if value {
|
||||
MarkerTree(self.high)
|
||||
} else {
|
||||
MarkerTree(self.low)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for ListMarkerTree<'_> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for ListMarkerTree<'_> {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.pair()
|
||||
.cmp(other.pair())
|
||||
.then_with(|| self.children().cmp(other.children()))
|
||||
}
|
||||
}
|
||||
|
||||
/// A node representing the existence or absence of a given extra, such as `extra == 'bar'`.
|
||||
#[derive(PartialEq, Eq, Clone, Debug)]
|
||||
pub struct ExtraMarkerTree<'a> {
|
||||
|
|
@ -1745,7 +1989,7 @@ mod test {
|
|||
implementation_name: "",
|
||||
implementation_version: "3.7",
|
||||
os_name: "linux",
|
||||
platform_machine: "",
|
||||
platform_machine: "x86_64",
|
||||
platform_python_implementation: "",
|
||||
platform_release: "",
|
||||
platform_system: "",
|
||||
|
|
|
|||
|
|
@ -62,6 +62,7 @@ impl VerbatimUrl {
|
|||
///
|
||||
/// If no root directory is provided, relative paths are resolved against the current working
|
||||
/// directory.
|
||||
#[cfg(feature = "non-pep508-extensions")] // PEP 508 arguably only allows absolute file URLs.
|
||||
pub fn from_url_or_path(
|
||||
input: &str,
|
||||
root_dir: Option<&Path>,
|
||||
|
|
|
|||
|
|
@ -19,9 +19,9 @@ pub struct PyProjectToml {
|
|||
|
||||
impl PyProjectToml {
|
||||
pub fn from_toml(toml: &str) -> Result<Self, MetadataError> {
|
||||
let pyproject_toml: toml_edit::ImDocument<_> = toml_edit::ImDocument::from_str(toml)
|
||||
let pyproject_toml = toml_edit::Document::from_str(toml)
|
||||
.map_err(MetadataError::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml: Self = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(MetadataError::InvalidPyprojectTomlSchema)?;
|
||||
Ok(pyproject_toml)
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -53,8 +53,7 @@ import re
|
|||
from dataclasses import asdict, dataclass, field
|
||||
from enum import StrEnum
|
||||
from pathlib import Path
|
||||
from typing import Generator, Iterable, NamedTuple, Self
|
||||
from urllib.parse import unquote
|
||||
from typing import Any, Generator, Iterable, NamedTuple, Self
|
||||
|
||||
import httpx
|
||||
|
||||
|
|
@ -255,13 +254,7 @@ class CPythonFinder(Finder):
|
|||
# Sort the assets to ensure deterministic results
|
||||
row["assets"].sort(key=lambda asset: asset["browser_download_url"])
|
||||
for asset in row["assets"]:
|
||||
# On older versions, GitHub didn't backfill the digest.
|
||||
if digest := asset["digest"]:
|
||||
sha256 = digest.removeprefix("sha256:")
|
||||
else:
|
||||
sha256 = None
|
||||
url = asset["browser_download_url"]
|
||||
download = self._parse_download_url(url, sha256)
|
||||
download = self._parse_download_asset(asset)
|
||||
if download is None:
|
||||
continue
|
||||
if (
|
||||
|
|
@ -355,16 +348,19 @@ class CPythonFinder(Finder):
|
|||
continue
|
||||
download.sha256 = checksums.get(download.filename)
|
||||
|
||||
def _parse_download_url(
|
||||
self, url: str, sha256: str | None
|
||||
) -> PythonDownload | None:
|
||||
"""Parse an indygreg download URL into a PythonDownload object."""
|
||||
def _parse_download_asset(self, asset: dict[str, Any]) -> PythonDownload | None:
|
||||
"""Parse a python-build-standalone download asset into a PythonDownload object."""
|
||||
url = asset["browser_download_url"]
|
||||
# Ex)
|
||||
# https://github.com/astral-sh/python-build-standalone/releases/download/20240107/cpython-3.12.1%2B20240107-aarch64-unknown-linux-gnu-lto-full.tar.zst
|
||||
if url.endswith(".sha256"):
|
||||
return None
|
||||
filename = unquote(url.rsplit("/", maxsplit=1)[-1])
|
||||
release = int(url.rsplit("/")[-2])
|
||||
filename = asset["name"]
|
||||
sha256 = None
|
||||
# On older versions, GitHub didn't backfill the digest.
|
||||
if digest := asset["digest"]:
|
||||
sha256 = digest.removeprefix("sha256:")
|
||||
|
||||
match = self._filename_re.match(filename) or self._legacy_filename_re.match(
|
||||
filename
|
||||
|
|
@ -611,6 +607,9 @@ class GraalPyFinder(Finder):
|
|||
platform = self._normalize_os(m.group(1))
|
||||
arch = self._normalize_arch(m.group(2))
|
||||
libc = "gnu" if platform == "linux" else "none"
|
||||
sha256 = None
|
||||
if digest := asset["digest"]:
|
||||
sha256 = digest.removeprefix("sha256:")
|
||||
download = PythonDownload(
|
||||
release=0,
|
||||
version=python_version,
|
||||
|
|
@ -623,6 +622,7 @@ class GraalPyFinder(Finder):
|
|||
implementation=self.implementation,
|
||||
filename=asset["name"],
|
||||
url=url,
|
||||
sha256=sha256,
|
||||
)
|
||||
# Only keep the latest GraalPy version of each arch/platform
|
||||
if (python_version, arch, platform) not in results:
|
||||
|
|
@ -637,6 +637,7 @@ class GraalPyFinder(Finder):
|
|||
return self.PLATFORM_MAPPING.get(os, os)
|
||||
|
||||
async def _fetch_checksums(self, downloads: list[PythonDownload], n: int) -> None:
|
||||
downloads = list(filter(lambda d: not d.sha256, downloads))
|
||||
for idx, batch in enumerate(batched(downloads, n)):
|
||||
logging.info("Fetching GraalPy checksums: %d/%d", idx * n, len(downloads))
|
||||
checksum_requests = []
|
||||
|
|
|
|||
|
|
@ -988,7 +988,11 @@ impl ManagedPythonDownload {
|
|||
archive_writer.flush().await?;
|
||||
}
|
||||
// Move the completed file into place, invalidating the `File` instance.
|
||||
fs_err::rename(&temp_file, target_cache_file)?;
|
||||
match rename_with_retry(&temp_file, target_cache_file).await {
|
||||
Ok(()) => {}
|
||||
Err(_) if target_cache_file.is_file() => {}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -847,7 +847,7 @@ fn executable_path_from_base(
|
|||
/// Create a link to a managed Python executable.
|
||||
///
|
||||
/// If the file already exists at the link path, an error will be returned.
|
||||
pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(), Error> {
|
||||
pub fn create_link_to_executable(link: &Path, executable: &Path) -> Result<(), Error> {
|
||||
let link_parent = link.parent().ok_or(Error::NoExecutableDirectory)?;
|
||||
fs_err::create_dir_all(link_parent).map_err(|err| Error::ExecutableDirectory {
|
||||
to: link_parent.to_path_buf(),
|
||||
|
|
@ -856,20 +856,20 @@ pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(),
|
|||
|
||||
if cfg!(unix) {
|
||||
// Note this will never copy on Unix — we use it here to allow compilation on Windows
|
||||
match symlink_or_copy_file(&executable, link) {
|
||||
match symlink_or_copy_file(executable, link) {
|
||||
Ok(()) => Ok(()),
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {
|
||||
Err(Error::MissingExecutable(executable.clone()))
|
||||
Err(Error::MissingExecutable(executable.to_path_buf()))
|
||||
}
|
||||
Err(err) => Err(Error::LinkExecutable {
|
||||
from: executable,
|
||||
from: executable.to_path_buf(),
|
||||
to: link.to_path_buf(),
|
||||
err,
|
||||
}),
|
||||
}
|
||||
} else if cfg!(windows) {
|
||||
// TODO(zanieb): Install GUI launchers as well
|
||||
let launcher = windows_python_launcher(&executable, false)?;
|
||||
let launcher = windows_python_launcher(executable, false)?;
|
||||
|
||||
// OK to use `std::fs` here, `fs_err` does not support `File::create_new` and we attach
|
||||
// error context anyway
|
||||
|
|
@ -878,7 +878,7 @@ pub fn create_link_to_executable(link: &Path, executable: PathBuf) -> Result<(),
|
|||
std::fs::File::create_new(link)
|
||||
.and_then(|mut file| file.write_all(launcher.as_ref()))
|
||||
.map_err(|err| Error::LinkExecutable {
|
||||
from: executable,
|
||||
from: executable.to_path_buf(),
|
||||
to: link.to_path_buf(),
|
||||
err,
|
||||
})
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
//! DO NOT EDIT
|
||||
//!
|
||||
//! Generated with `cargo run dev generate-sysconfig-metadata`
|
||||
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250712/cpython-unix/targets.yml>
|
||||
//! Targets from <https://github.com/astral-sh/python-build-standalone/blob/20250723/cpython-unix/targets.yml>
|
||||
//!
|
||||
#![allow(clippy::all)]
|
||||
#![cfg_attr(any(), rustfmt::skip)]
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
use crate::managed::ManagedPythonInstallation;
|
||||
use crate::platform::Arch;
|
||||
use crate::{COMPANY_DISPLAY_NAME, COMPANY_KEY, PythonInstallationKey, PythonVersion};
|
||||
use anyhow::anyhow;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
|
@ -238,8 +239,7 @@ pub fn remove_registry_entry<'a>(
|
|||
} else {
|
||||
errors.push((
|
||||
installation.key().clone(),
|
||||
anyhow::Error::new(err)
|
||||
.context("Failed to clear registry entries under HKCU:\\{python_entry}"),
|
||||
anyhow!("Failed to clear registry entries under HKCU:\\{python_entry}: {err}"),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -273,13 +273,13 @@ impl RequirementsSource {
|
|||
pub fn allows_extras(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
Self::PyprojectToml(_) | Self::SetupPy(_) | Self::SetupCfg(_)
|
||||
Self::PylockToml(_) | Self::PyprojectToml(_) | Self::SetupPy(_) | Self::SetupCfg(_)
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns `true` if the source allows groups to be specified.
|
||||
pub fn allows_groups(&self) -> bool {
|
||||
matches!(self, Self::PyprojectToml(_))
|
||||
matches!(self, Self::PylockToml(_) | Self::PyprojectToml(_))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ use uv_distribution_types::{
|
|||
UnresolvedRequirementSpecification,
|
||||
};
|
||||
use uv_fs::{CWD, Simplified};
|
||||
use uv_normalize::{ExtraName, GroupName, PackageName};
|
||||
use uv_normalize::{ExtraName, PackageName, PipGroupName};
|
||||
use uv_requirements_txt::{RequirementsTxt, RequirementsTxtRequirement};
|
||||
use uv_warnings::warn_user;
|
||||
use uv_workspace::pyproject::PyProjectToml;
|
||||
|
|
@ -215,7 +215,7 @@ impl RequirementsSpecification {
|
|||
requirements: &[RequirementsSource],
|
||||
constraints: &[RequirementsSource],
|
||||
overrides: &[RequirementsSource],
|
||||
groups: BTreeMap<PathBuf, Vec<GroupName>>,
|
||||
groups: Option<&GroupsSpecification>,
|
||||
client_builder: &BaseClientBuilder<'_>,
|
||||
) -> Result<Self> {
|
||||
let mut spec = Self::default();
|
||||
|
|
@ -250,10 +250,13 @@ impl RequirementsSpecification {
|
|||
|
||||
// If we have a `pylock.toml`, don't allow additional requirements, constraints, or
|
||||
// overrides.
|
||||
if requirements
|
||||
.iter()
|
||||
.any(|source| matches!(source, RequirementsSource::PylockToml(..)))
|
||||
{
|
||||
if let Some(pylock_toml) = requirements.iter().find_map(|source| {
|
||||
if let RequirementsSource::PylockToml(path) = source {
|
||||
Some(path)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
if requirements
|
||||
.iter()
|
||||
.any(|source| !matches!(source, RequirementsSource::PylockToml(..)))
|
||||
|
|
@ -272,24 +275,55 @@ impl RequirementsSpecification {
|
|||
"Cannot specify constraints with a `pylock.toml` file"
|
||||
));
|
||||
}
|
||||
if !groups.is_empty() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Cannot specify groups with a `pylock.toml` file"
|
||||
));
|
||||
|
||||
// If we have a `pylock.toml`, disallow specifying paths for groups; instead, require
|
||||
// that all groups refer to the `pylock.toml` file.
|
||||
if let Some(groups) = groups {
|
||||
let mut names = Vec::new();
|
||||
for group in &groups.groups {
|
||||
if group.path.is_some() {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Cannot specify paths for groups with a `pylock.toml` file; all groups must refer to the `pylock.toml` file"
|
||||
));
|
||||
}
|
||||
names.push(group.name.clone());
|
||||
}
|
||||
|
||||
if !names.is_empty() {
|
||||
spec.groups.insert(
|
||||
pylock_toml.clone(),
|
||||
DependencyGroups::from_args(
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
Vec::new(),
|
||||
Vec::new(),
|
||||
false,
|
||||
names,
|
||||
false,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
} else if let Some(groups) = groups {
|
||||
// pip `--group` flags specify their own sources, which we need to process here.
|
||||
// First, we collect all groups by their path.
|
||||
let mut groups_by_path = BTreeMap::new();
|
||||
for group in &groups.groups {
|
||||
// If there's no path provided, expect a pyproject.toml in the project-dir
|
||||
// (Which is typically the current working directory, matching pip's behaviour)
|
||||
let pyproject_path = group
|
||||
.path
|
||||
.clone()
|
||||
.unwrap_or_else(|| groups.root.join("pyproject.toml"));
|
||||
groups_by_path
|
||||
.entry(pyproject_path)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(group.name.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve sources into specifications so we know their `source_tree`.
|
||||
let mut requirement_sources = Vec::new();
|
||||
for source in requirements {
|
||||
let source = Self::from_source(source, client_builder).await?;
|
||||
requirement_sources.push(source);
|
||||
}
|
||||
|
||||
// pip `--group` flags specify their own sources, which we need to process here
|
||||
if !groups.is_empty() {
|
||||
let mut group_specs = BTreeMap::new();
|
||||
for (path, groups) in groups {
|
||||
for (path, groups) in groups_by_path {
|
||||
let group_spec = DependencyGroups::from_args(
|
||||
false,
|
||||
false,
|
||||
|
|
@ -305,6 +339,13 @@ impl RequirementsSpecification {
|
|||
spec.groups = group_specs;
|
||||
}
|
||||
|
||||
// Resolve sources into specifications so we know their `source_tree`.
|
||||
let mut requirement_sources = Vec::new();
|
||||
for source in requirements {
|
||||
let source = Self::from_source(source, client_builder).await?;
|
||||
requirement_sources.push(source);
|
||||
}
|
||||
|
||||
// Read all requirements, and keep track of all requirements _and_ constraints.
|
||||
// A `requirements.txt` can contain a `-c constraints.txt` directive within it, so reading
|
||||
// a requirements file can also add constraints.
|
||||
|
|
@ -426,7 +467,7 @@ impl RequirementsSpecification {
|
|||
requirements: &[RequirementsSource],
|
||||
client_builder: &BaseClientBuilder<'_>,
|
||||
) -> Result<Self> {
|
||||
Self::from_sources(requirements, &[], &[], BTreeMap::default(), client_builder).await
|
||||
Self::from_sources(requirements, &[], &[], None, client_builder).await
|
||||
}
|
||||
|
||||
/// Initialize a [`RequirementsSpecification`] from a list of [`Requirement`].
|
||||
|
|
@ -485,3 +526,12 @@ impl RequirementsSpecification {
|
|||
self.requirements.is_empty() && self.source_trees.is_empty() && self.overrides.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct GroupsSpecification {
|
||||
/// The path to the project root, relative to which the default `pyproject.toml` file is
|
||||
/// located.
|
||||
pub root: PathBuf,
|
||||
/// The enabled groups.
|
||||
pub groups: Vec<PipGroupName>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,6 +37,14 @@ use crate::{InMemoryIndex, Options};
|
|||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ResolveError {
|
||||
#[error("Failed to resolve dependencies for package `{1}=={2}`")]
|
||||
Dependencies(
|
||||
#[source] Box<ResolveError>,
|
||||
PackageName,
|
||||
Version,
|
||||
DerivationChain,
|
||||
),
|
||||
|
||||
#[error(transparent)]
|
||||
Client(#[from] uv_client::Error),
|
||||
|
||||
|
|
@ -92,9 +100,11 @@ pub enum ResolveError {
|
|||
ConflictingIndexes(PackageName, String, String),
|
||||
|
||||
#[error(
|
||||
"Package `{0}` attempted to resolve via URL: {1}. URL dependencies must be expressed as direct requirements or constraints. Consider adding `{0} @ {1}` to your dependencies or constraints file."
|
||||
"Package `{name}` was included as a URL dependency. URL dependencies must be expressed as direct requirements or constraints. Consider adding `{requirement}` to your dependencies or constraints file.",
|
||||
name = name.cyan(),
|
||||
requirement = format!("{name} @ {url}").cyan(),
|
||||
)]
|
||||
DisallowedUrl(PackageName, String),
|
||||
DisallowedUrl { name: PackageName, url: String },
|
||||
|
||||
#[error(transparent)]
|
||||
DistributionType(#[from] uv_distribution_types::Error),
|
||||
|
|
|
|||
|
|
@ -186,13 +186,13 @@ pub struct PylockToml {
|
|||
lock_version: Version,
|
||||
created_by: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
requires_python: Option<RequiresPython>,
|
||||
pub requires_python: Option<RequiresPython>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
extras: Vec<ExtraName>,
|
||||
pub extras: Vec<ExtraName>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
dependency_groups: Vec<GroupName>,
|
||||
pub dependency_groups: Vec<GroupName>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
default_groups: Vec<GroupName>,
|
||||
pub default_groups: Vec<GroupName>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub packages: Vec<PylockTomlPackage>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
|
|
@ -966,9 +966,12 @@ impl<'lock> PylockToml {
|
|||
self,
|
||||
install_path: &Path,
|
||||
markers: &MarkerEnvironment,
|
||||
extras: &[ExtraName],
|
||||
groups: &[GroupName],
|
||||
tags: &Tags,
|
||||
build_options: &BuildOptions,
|
||||
) -> Result<Resolution, PylockTomlError> {
|
||||
// Convert the extras and dependency groups specifications to a concrete environment.
|
||||
let mut graph =
|
||||
petgraph::graph::DiGraph::with_capacity(self.packages.len(), self.packages.len());
|
||||
|
||||
|
|
@ -977,7 +980,7 @@ impl<'lock> PylockToml {
|
|||
|
||||
for package in self.packages {
|
||||
// Omit packages that aren't relevant to the current environment.
|
||||
if !package.marker.evaluate(markers, &[]) {
|
||||
if !package.marker.evaluate_pep751(markers, extras, groups) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -54,6 +54,11 @@ pub(crate) fn requires_python(tree: MarkerTree) -> Option<RequiresPythonRange> {
|
|||
collect_python_markers(tree, markers, range);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
for (_, tree) in marker.children() {
|
||||
collect_python_markers(tree, markers, range);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -698,6 +698,11 @@ impl ResolverOutput {
|
|||
add_marker_params_from_tree(tree, set);
|
||||
}
|
||||
}
|
||||
MarkerTreeKind::List(marker) => {
|
||||
for (_, tree) in marker.children() {
|
||||
add_marker_params_from_tree(tree, set);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -635,19 +635,26 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
|||
}
|
||||
ForkedDependencies::Unforked(dependencies) => {
|
||||
// Enrich the state with any URLs, etc.
|
||||
state.visit_package_version_dependencies(
|
||||
next_id,
|
||||
&version,
|
||||
&self.urls,
|
||||
&self.indexes,
|
||||
&dependencies,
|
||||
&self.git,
|
||||
&self.workspace_members,
|
||||
self.selector.resolution_strategy(),
|
||||
)?;
|
||||
state
|
||||
.visit_package_version_dependencies(
|
||||
next_id,
|
||||
&version,
|
||||
&self.urls,
|
||||
&self.indexes,
|
||||
&dependencies,
|
||||
&self.git,
|
||||
&self.workspace_members,
|
||||
self.selector.resolution_strategy(),
|
||||
)
|
||||
.map_err(|err| {
|
||||
enrich_dependency_error(err, next_id, &version, &state.pubgrub)
|
||||
})?;
|
||||
|
||||
// Emit a request to fetch the metadata for each registry package.
|
||||
self.visit_dependencies(&dependencies, &state, &request_sink)?;
|
||||
self.visit_dependencies(&dependencies, &state, &request_sink)
|
||||
.map_err(|err| {
|
||||
enrich_dependency_error(err, next_id, &version, &state.pubgrub)
|
||||
})?;
|
||||
|
||||
// Add the dependencies to the state.
|
||||
state.add_package_version_dependencies(next_id, &version, dependencies);
|
||||
|
|
@ -870,19 +877,26 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
|
|||
})
|
||||
.map(move |(fork, mut forked_state)| {
|
||||
// Enrich the state with any URLs, etc.
|
||||
forked_state.visit_package_version_dependencies(
|
||||
package,
|
||||
version,
|
||||
&self.urls,
|
||||
&self.indexes,
|
||||
&fork.dependencies,
|
||||
&self.git,
|
||||
&self.workspace_members,
|
||||
self.selector.resolution_strategy(),
|
||||
)?;
|
||||
forked_state
|
||||
.visit_package_version_dependencies(
|
||||
package,
|
||||
version,
|
||||
&self.urls,
|
||||
&self.indexes,
|
||||
&fork.dependencies,
|
||||
&self.git,
|
||||
&self.workspace_members,
|
||||
self.selector.resolution_strategy(),
|
||||
)
|
||||
.map_err(|err| {
|
||||
enrich_dependency_error(err, package, version, &forked_state.pubgrub)
|
||||
})?;
|
||||
|
||||
// Emit a request to fetch the metadata for each registry package.
|
||||
self.visit_dependencies(&fork.dependencies, &forked_state, request_sink)?;
|
||||
self.visit_dependencies(&fork.dependencies, &forked_state, request_sink)
|
||||
.map_err(|err| {
|
||||
enrich_dependency_error(err, package, version, &forked_state.pubgrub)
|
||||
})?;
|
||||
|
||||
// Add the dependencies to the state.
|
||||
forked_state.add_package_version_dependencies(package, version, fork.dependencies);
|
||||
|
|
@ -3836,6 +3850,20 @@ pub(crate) struct VersionFork {
|
|||
version: Option<Version>,
|
||||
}
|
||||
|
||||
/// Enrich a [`ResolveError`] with additional information about why a given package was included.
|
||||
fn enrich_dependency_error(
|
||||
error: ResolveError,
|
||||
id: Id<PubGrubPackage>,
|
||||
version: &Version,
|
||||
pubgrub: &State<UvDependencyProvider>,
|
||||
) -> ResolveError {
|
||||
let Some(name) = pubgrub.package_store[id].name_no_root() else {
|
||||
return error;
|
||||
};
|
||||
let chain = DerivationChainBuilder::from_state(id, version, pubgrub).unwrap_or_default();
|
||||
ResolveError::Dependencies(Box::new(error), name.clone(), version.clone(), chain)
|
||||
}
|
||||
|
||||
/// Compute the set of markers for which a package is known to be relevant.
|
||||
fn find_environments(id: Id<PubGrubPackage>, state: &State<UvDependencyProvider>) -> MarkerTree {
|
||||
let package = &state.package_store[id];
|
||||
|
|
|
|||
|
|
@ -155,10 +155,10 @@ impl Urls {
|
|||
parsed_url: &'a ParsedUrl,
|
||||
) -> Result<&'a VerbatimParsedUrl, ResolveError> {
|
||||
let Some(expected) = self.get_regular(package_name) else {
|
||||
return Err(ResolveError::DisallowedUrl(
|
||||
package_name.clone(),
|
||||
verbatim_url.to_string(),
|
||||
));
|
||||
return Err(ResolveError::DisallowedUrl {
|
||||
name: package_name.clone(),
|
||||
url: verbatim_url.to_string(),
|
||||
});
|
||||
};
|
||||
|
||||
let matching_urls: Vec<_> = expected
|
||||
|
|
|
|||
|
|
@ -120,10 +120,9 @@ impl FilesystemOptions {
|
|||
.ok()
|
||||
.and_then(|content| toml::from_str::<PyProjectToml>(&content).ok())
|
||||
{
|
||||
if pyproject.tool.is_some_and(|tool| tool.uv.is_some()) {
|
||||
warn_user!(
|
||||
"Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The `[tool.uv]` section will be ignored in favor of the `uv.toml` file."
|
||||
);
|
||||
if let Some(options) = pyproject.tool.as_ref().and_then(|tool| tool.uv.as_ref())
|
||||
{
|
||||
warn_uv_toml_masked_fields(options);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -269,6 +268,261 @@ fn validate_uv_toml(path: &Path, options: &Options) -> Result<(), Error> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Validate that an [`Options`] contains no fields that `uv.toml` would mask
|
||||
///
|
||||
/// This is essentially the inverse of [`validated_uv_toml`][].
|
||||
fn warn_uv_toml_masked_fields(options: &Options) {
|
||||
let Options {
|
||||
globals:
|
||||
GlobalOptions {
|
||||
required_version,
|
||||
native_tls,
|
||||
offline,
|
||||
no_cache,
|
||||
cache_dir,
|
||||
preview,
|
||||
python_preference,
|
||||
python_downloads,
|
||||
concurrent_downloads,
|
||||
concurrent_builds,
|
||||
concurrent_installs,
|
||||
allow_insecure_host,
|
||||
},
|
||||
top_level:
|
||||
ResolverInstallerOptions {
|
||||
index,
|
||||
index_url,
|
||||
extra_index_url,
|
||||
no_index,
|
||||
find_links,
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
resolution,
|
||||
prerelease,
|
||||
fork_strategy,
|
||||
dependency_metadata,
|
||||
config_settings,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
extra_build_dependencies,
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
compile_bytecode,
|
||||
no_sources,
|
||||
upgrade,
|
||||
upgrade_package,
|
||||
reinstall,
|
||||
reinstall_package,
|
||||
no_build,
|
||||
no_build_package,
|
||||
no_binary,
|
||||
no_binary_package,
|
||||
},
|
||||
install_mirrors:
|
||||
PythonInstallMirrors {
|
||||
python_install_mirror,
|
||||
pypy_install_mirror,
|
||||
python_downloads_json_url,
|
||||
},
|
||||
publish:
|
||||
PublishOptions {
|
||||
publish_url,
|
||||
trusted_publishing,
|
||||
check_url,
|
||||
},
|
||||
add: AddOptions { add_bounds },
|
||||
pip,
|
||||
cache_keys,
|
||||
override_dependencies,
|
||||
constraint_dependencies,
|
||||
build_constraint_dependencies,
|
||||
environments,
|
||||
required_environments,
|
||||
conflicts: _,
|
||||
workspace: _,
|
||||
sources: _,
|
||||
dev_dependencies: _,
|
||||
default_groups: _,
|
||||
dependency_groups: _,
|
||||
managed: _,
|
||||
package: _,
|
||||
build_backend: _,
|
||||
} = options;
|
||||
|
||||
let mut masked_fields = vec![];
|
||||
|
||||
if required_version.is_some() {
|
||||
masked_fields.push("required-version");
|
||||
}
|
||||
if native_tls.is_some() {
|
||||
masked_fields.push("native-tls");
|
||||
}
|
||||
if offline.is_some() {
|
||||
masked_fields.push("offline");
|
||||
}
|
||||
if no_cache.is_some() {
|
||||
masked_fields.push("no-cache");
|
||||
}
|
||||
if cache_dir.is_some() {
|
||||
masked_fields.push("cache-dir");
|
||||
}
|
||||
if preview.is_some() {
|
||||
masked_fields.push("preview");
|
||||
}
|
||||
if python_preference.is_some() {
|
||||
masked_fields.push("python-preference");
|
||||
}
|
||||
if python_downloads.is_some() {
|
||||
masked_fields.push("python-downloads");
|
||||
}
|
||||
if concurrent_downloads.is_some() {
|
||||
masked_fields.push("concurrent-downloads");
|
||||
}
|
||||
if concurrent_builds.is_some() {
|
||||
masked_fields.push("concurrent-builds");
|
||||
}
|
||||
if concurrent_installs.is_some() {
|
||||
masked_fields.push("concurrent-installs");
|
||||
}
|
||||
if allow_insecure_host.is_some() {
|
||||
masked_fields.push("allow-insecure-host");
|
||||
}
|
||||
if index.is_some() {
|
||||
masked_fields.push("index");
|
||||
}
|
||||
if index_url.is_some() {
|
||||
masked_fields.push("index-url");
|
||||
}
|
||||
if extra_index_url.is_some() {
|
||||
masked_fields.push("extra-index-url");
|
||||
}
|
||||
if no_index.is_some() {
|
||||
masked_fields.push("no-index");
|
||||
}
|
||||
if find_links.is_some() {
|
||||
masked_fields.push("find-links");
|
||||
}
|
||||
if index_strategy.is_some() {
|
||||
masked_fields.push("index-strategy");
|
||||
}
|
||||
if keyring_provider.is_some() {
|
||||
masked_fields.push("keyring-provider");
|
||||
}
|
||||
if resolution.is_some() {
|
||||
masked_fields.push("resolution");
|
||||
}
|
||||
if prerelease.is_some() {
|
||||
masked_fields.push("prerelease");
|
||||
}
|
||||
if fork_strategy.is_some() {
|
||||
masked_fields.push("fork-strategy");
|
||||
}
|
||||
if dependency_metadata.is_some() {
|
||||
masked_fields.push("dependency-metadata");
|
||||
}
|
||||
if config_settings.is_some() {
|
||||
masked_fields.push("config-settings");
|
||||
}
|
||||
if config_settings_package.is_some() {
|
||||
masked_fields.push("config-settings-package");
|
||||
}
|
||||
if no_build_isolation.is_some() {
|
||||
masked_fields.push("no-build-isolation");
|
||||
}
|
||||
if no_build_isolation_package.is_some() {
|
||||
masked_fields.push("no-build-isolation-package");
|
||||
}
|
||||
if extra_build_dependencies.is_some() {
|
||||
masked_fields.push("extra-build-dependencies");
|
||||
}
|
||||
if exclude_newer.is_some() {
|
||||
masked_fields.push("exclude-newer");
|
||||
}
|
||||
if link_mode.is_some() {
|
||||
masked_fields.push("link-mode");
|
||||
}
|
||||
if compile_bytecode.is_some() {
|
||||
masked_fields.push("compile-bytecode");
|
||||
}
|
||||
if no_sources.is_some() {
|
||||
masked_fields.push("no-sources");
|
||||
}
|
||||
if upgrade.is_some() {
|
||||
masked_fields.push("upgrade");
|
||||
}
|
||||
if upgrade_package.is_some() {
|
||||
masked_fields.push("upgrade-package");
|
||||
}
|
||||
if reinstall.is_some() {
|
||||
masked_fields.push("reinstall");
|
||||
}
|
||||
if reinstall_package.is_some() {
|
||||
masked_fields.push("reinstall-package");
|
||||
}
|
||||
if no_build.is_some() {
|
||||
masked_fields.push("no-build");
|
||||
}
|
||||
if no_build_package.is_some() {
|
||||
masked_fields.push("no-build-package");
|
||||
}
|
||||
if no_binary.is_some() {
|
||||
masked_fields.push("no-binary");
|
||||
}
|
||||
if no_binary_package.is_some() {
|
||||
masked_fields.push("no-binary-package");
|
||||
}
|
||||
if python_install_mirror.is_some() {
|
||||
masked_fields.push("python-install-mirror");
|
||||
}
|
||||
if pypy_install_mirror.is_some() {
|
||||
masked_fields.push("pypy-install-mirror");
|
||||
}
|
||||
if python_downloads_json_url.is_some() {
|
||||
masked_fields.push("python-downloads-json-url");
|
||||
}
|
||||
if publish_url.is_some() {
|
||||
masked_fields.push("publish-url");
|
||||
}
|
||||
if trusted_publishing.is_some() {
|
||||
masked_fields.push("trusted-publishing");
|
||||
}
|
||||
if check_url.is_some() {
|
||||
masked_fields.push("check-url");
|
||||
}
|
||||
if add_bounds.is_some() {
|
||||
masked_fields.push("add-bounds");
|
||||
}
|
||||
if pip.is_some() {
|
||||
masked_fields.push("pip");
|
||||
}
|
||||
if cache_keys.is_some() {
|
||||
masked_fields.push("cache_keys");
|
||||
}
|
||||
if override_dependencies.is_some() {
|
||||
masked_fields.push("override-dependencies");
|
||||
}
|
||||
if constraint_dependencies.is_some() {
|
||||
masked_fields.push("constraint-dependencies");
|
||||
}
|
||||
if build_constraint_dependencies.is_some() {
|
||||
masked_fields.push("build-constraint-dependencies");
|
||||
}
|
||||
if environments.is_some() {
|
||||
masked_fields.push("environments");
|
||||
}
|
||||
if required_environments.is_some() {
|
||||
masked_fields.push("required-environments");
|
||||
}
|
||||
if !masked_fields.is_empty() {
|
||||
let field_listing = masked_fields.join("\n- ");
|
||||
warn_user!(
|
||||
"Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The following fields from `[tool.uv]` will be ignored in favor of the `uv.toml` file:\n- {}",
|
||||
field_listing,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
|
|
|
|||
|
|
@ -257,6 +257,10 @@ impl EnvVars {
|
|||
/// Specifies the "bin" directory for installing tool executables.
|
||||
pub const UV_TOOL_BIN_DIR: &'static str = "UV_TOOL_BIN_DIR";
|
||||
|
||||
/// Equivalent to the `--build-backend` argument for `uv init`. Determines the default backend
|
||||
/// to use when creating a new project.
|
||||
pub const UV_INIT_BUILD_BACKEND: &'static str = "UV_INIT_BUILD_BACKEND";
|
||||
|
||||
/// Specifies the path to the directory to use for a project virtual environment.
|
||||
///
|
||||
/// See the [project documentation](../concepts/projects/config.md#project-environment-path)
|
||||
|
|
@ -765,4 +769,11 @@ impl EnvVars {
|
|||
|
||||
/// Disable GitHub-specific requests that allow uv to skip `git fetch` in some circumstances.
|
||||
pub const UV_NO_GITHUB_FAST_PATH: &'static str = "UV_NO_GITHUB_FAST_PATH";
|
||||
|
||||
/// Authentication token for Hugging Face requests. When set, uv will use this token
|
||||
/// when making requests to `https://huggingface.co/` and any subdomains.
|
||||
pub const HF_TOKEN: &'static str = "HF_TOKEN";
|
||||
|
||||
/// Disable Hugging Face authentication, even if `HF_TOKEN` is set.
|
||||
pub const UV_NO_HF_TOKEN: &'static str = "UV_NO_HF_TOKEN";
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,6 +41,7 @@ const MAGIC_NUMBER_SIZE: usize = 4;
|
|||
pub struct Launcher {
|
||||
pub kind: LauncherKind,
|
||||
pub python_path: PathBuf,
|
||||
payload: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Launcher {
|
||||
|
|
@ -109,11 +110,69 @@ impl Launcher {
|
|||
String::from_utf8(buffer).map_err(|err| Error::InvalidPath(err.utf8_error()))?,
|
||||
);
|
||||
|
||||
#[allow(clippy::cast_possible_truncation)]
|
||||
let file_size = {
|
||||
let raw_length = file
|
||||
.seek(io::SeekFrom::End(0))
|
||||
.map_err(|e| Error::InvalidLauncherSeek("size probe".into(), 0, e))?;
|
||||
|
||||
if raw_length > usize::MAX as u64 {
|
||||
return Err(Error::InvalidDataLength(raw_length));
|
||||
}
|
||||
|
||||
// SAFETY: Above we guarantee the length is less than uszie
|
||||
raw_length as usize
|
||||
};
|
||||
|
||||
// Read the payload
|
||||
file.seek(io::SeekFrom::Start(0))
|
||||
.map_err(|e| Error::InvalidLauncherSeek("rewind".into(), 0, e))?;
|
||||
let payload_len =
|
||||
file_size.saturating_sub(MAGIC_NUMBER_SIZE + PATH_LENGTH_SIZE + path_length);
|
||||
let mut buffer = vec![0u8; payload_len];
|
||||
file.read_exact(&mut buffer)
|
||||
.map_err(|err| Error::InvalidLauncherRead("payload".into(), err))?;
|
||||
|
||||
Ok(Some(Self {
|
||||
kind,
|
||||
payload: buffer,
|
||||
python_path: path,
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn write_to_file(self, file: &mut File) -> Result<(), Error> {
|
||||
let python_path = self.python_path.simplified_display().to_string();
|
||||
|
||||
if python_path.len() > MAX_PATH_LENGTH as usize {
|
||||
return Err(Error::InvalidPathLength(
|
||||
u32::try_from(python_path.len()).expect("path length already checked"),
|
||||
));
|
||||
}
|
||||
|
||||
let mut launcher: Vec<u8> = Vec::with_capacity(
|
||||
self.payload.len() + python_path.len() + PATH_LENGTH_SIZE + MAGIC_NUMBER_SIZE,
|
||||
);
|
||||
launcher.extend_from_slice(&self.payload);
|
||||
launcher.extend_from_slice(python_path.as_bytes());
|
||||
launcher.extend_from_slice(
|
||||
&u32::try_from(python_path.len())
|
||||
.expect("file path should be smaller than 4GB")
|
||||
.to_le_bytes(),
|
||||
);
|
||||
launcher.extend_from_slice(self.kind.magic_number());
|
||||
|
||||
file.write_all(&launcher)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_python_path(self, path: PathBuf) -> Self {
|
||||
Self {
|
||||
kind: self.kind,
|
||||
payload: self.payload,
|
||||
python_path: path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The kind of trampoline launcher to create.
|
||||
|
|
@ -177,6 +236,8 @@ pub enum Error {
|
|||
Io(#[from] io::Error),
|
||||
#[error("Only paths with a length up to 32KB are supported but found a length of {0} bytes")]
|
||||
InvalidPathLength(u32),
|
||||
#[error("Only data with a length up to usize is supported but found a length of {0} bytes")]
|
||||
InvalidDataLength(u64),
|
||||
#[error("Failed to parse executable path")]
|
||||
InvalidPath(#[source] Utf8Error),
|
||||
#[error("Failed to seek to {0} at offset {1}")]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv-version"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ use thiserror::Error;
|
|||
use uv_configuration::PreviewMode;
|
||||
use uv_python::{Interpreter, PythonEnvironment};
|
||||
|
||||
pub use virtualenv::OnExisting;
|
||||
pub use virtualenv::{OnExisting, remove_virtualenv};
|
||||
|
||||
mod virtualenv;
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use fs_err as fs;
|
|||
use fs_err::File;
|
||||
use itertools::Itertools;
|
||||
use owo_colors::OwoColorize;
|
||||
use tracing::debug;
|
||||
use tracing::{debug, trace};
|
||||
|
||||
use uv_configuration::PreviewMode;
|
||||
use uv_fs::{CWD, Simplified, cachedir};
|
||||
|
|
@ -85,6 +85,18 @@ pub(crate) fn create(
|
|||
format!("File exists at `{}`", location.user_display()),
|
||||
)));
|
||||
}
|
||||
Ok(metadata)
|
||||
if metadata.is_dir()
|
||||
&& location
|
||||
.read_dir()
|
||||
.is_ok_and(|mut dir| dir.next().is_none()) =>
|
||||
{
|
||||
// If it's an empty directory, we can proceed
|
||||
trace!(
|
||||
"Using empty directory at `{}` for virtual environment",
|
||||
location.user_display()
|
||||
);
|
||||
}
|
||||
Ok(metadata) if metadata.is_dir() => {
|
||||
let name = if uv_fs::is_virtualenv_base(location) {
|
||||
"virtual environment"
|
||||
|
|
@ -97,20 +109,15 @@ pub(crate) fn create(
|
|||
}
|
||||
OnExisting::Remove => {
|
||||
debug!("Removing existing {name} due to `--clear`");
|
||||
remove_venv_directory(location)?;
|
||||
}
|
||||
OnExisting::Fail
|
||||
if location
|
||||
.read_dir()
|
||||
.is_ok_and(|mut dir| dir.next().is_none()) =>
|
||||
{
|
||||
debug!("Ignoring empty directory");
|
||||
remove_virtualenv(location)?;
|
||||
fs::create_dir_all(location)?;
|
||||
}
|
||||
OnExisting::Fail => {
|
||||
match confirm_clear(location, name)? {
|
||||
Some(true) => {
|
||||
debug!("Removing existing {name} due to confirmation");
|
||||
remove_venv_directory(location)?;
|
||||
remove_virtualenv(location)?;
|
||||
fs::create_dir_all(location)?;
|
||||
}
|
||||
Some(false) => {
|
||||
let hint = format!(
|
||||
|
|
@ -242,6 +249,16 @@ pub(crate) fn create(
|
|||
interpreter.python_minor(),
|
||||
)),
|
||||
)?;
|
||||
if interpreter.gil_disabled() {
|
||||
uv_fs::replace_symlink(
|
||||
"python",
|
||||
scripts.join(format!(
|
||||
"python{}.{}t",
|
||||
interpreter.python_major(),
|
||||
interpreter.python_minor(),
|
||||
)),
|
||||
)?;
|
||||
}
|
||||
|
||||
if interpreter.markers().implementation_name() == "pypy" {
|
||||
uv_fs::replace_symlink(
|
||||
|
|
@ -262,12 +279,21 @@ pub(crate) fn create(
|
|||
if cfg!(windows) {
|
||||
if using_minor_version_link {
|
||||
let target = scripts.join(WindowsExecutable::Python.exe(interpreter));
|
||||
create_link_to_executable(target.as_path(), executable_target.clone())
|
||||
create_link_to_executable(target.as_path(), &executable_target)
|
||||
.map_err(Error::Python)?;
|
||||
let targetw = scripts.join(WindowsExecutable::Pythonw.exe(interpreter));
|
||||
create_link_to_executable(targetw.as_path(), executable_target)
|
||||
create_link_to_executable(targetw.as_path(), &executable_target)
|
||||
.map_err(Error::Python)?;
|
||||
if interpreter.gil_disabled() {
|
||||
let targett = scripts.join(WindowsExecutable::PythonMajorMinort.exe(interpreter));
|
||||
create_link_to_executable(targett.as_path(), &executable_target)
|
||||
.map_err(Error::Python)?;
|
||||
let targetwt = scripts.join(WindowsExecutable::PythonwMajorMinort.exe(interpreter));
|
||||
create_link_to_executable(targetwt.as_path(), &executable_target)
|
||||
.map_err(Error::Python)?;
|
||||
}
|
||||
} else {
|
||||
// Always copy `python.exe`.
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::Python,
|
||||
interpreter,
|
||||
|
|
@ -276,81 +302,111 @@ pub(crate) fn create(
|
|||
python_home,
|
||||
)?;
|
||||
|
||||
if interpreter.markers().implementation_name() == "graalpy" {
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::GraalPy,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonMajor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
} else {
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::Pythonw,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
}
|
||||
match interpreter.implementation_name() {
|
||||
"graalpy" => {
|
||||
// For GraalPy, copy `graalpy.exe` and `python3.exe`.
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::GraalPy,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonMajor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
}
|
||||
"pypy" => {
|
||||
// For PyPy, copy all versioned executables and all PyPy-specific executables.
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonMajor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonMajorMinor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::Pythonw,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPy,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyMajor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyMajorMinor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyw,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyMajorMinorw,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
}
|
||||
_ => {
|
||||
// For all other interpreters, copy `pythonw.exe`.
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::Pythonw,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
|
||||
if interpreter.markers().implementation_name() == "pypy" {
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonMajor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonMajorMinor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPy,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyMajor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyMajorMinor,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyw,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PyPyMajorMinorw,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
// If the GIL is disabled, copy `venvlaunchert.exe` and `venvwlaunchert.exe`.
|
||||
if interpreter.gil_disabled() {
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonMajorMinort,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
copy_launcher_windows(
|
||||
WindowsExecutable::PythonwMajorMinort,
|
||||
interpreter,
|
||||
&base_python,
|
||||
&scripts,
|
||||
python_home,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -517,9 +573,10 @@ fn confirm_clear(location: &Path, name: &'static str) -> Result<Option<bool>, io
|
|||
}
|
||||
}
|
||||
|
||||
fn remove_venv_directory(location: &Path) -> Result<(), Error> {
|
||||
// On Windows, if the current executable is in the directory, guard against
|
||||
// self-deletion.
|
||||
/// Perform a safe removal of a virtual environment.
|
||||
pub fn remove_virtualenv(location: &Path) -> Result<(), Error> {
|
||||
// On Windows, if the current executable is in the directory, defer self-deletion since Windows
|
||||
// won't let you unlink a running executable.
|
||||
#[cfg(windows)]
|
||||
if let Ok(itself) = std::env::current_exe() {
|
||||
let target = std::path::absolute(location)?;
|
||||
|
|
@ -529,8 +586,27 @@ fn remove_venv_directory(location: &Path) -> Result<(), Error> {
|
|||
}
|
||||
}
|
||||
|
||||
// We defer removal of the `pyvenv.cfg` until the end, so if we fail to remove the environment,
|
||||
// uv can still identify it as a Python virtual environment that can be deleted.
|
||||
for entry in fs::read_dir(location)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path == location.join("pyvenv.cfg") {
|
||||
continue;
|
||||
}
|
||||
if path.is_dir() {
|
||||
fs::remove_dir_all(&path)?;
|
||||
} else {
|
||||
fs::remove_file(&path)?;
|
||||
}
|
||||
}
|
||||
|
||||
match fs::remove_file(location.join("pyvenv.cfg")) {
|
||||
Ok(()) => {}
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => {}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
fs::remove_dir_all(location)?;
|
||||
fs::create_dir_all(location)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -567,8 +643,12 @@ enum WindowsExecutable {
|
|||
PythonMajor,
|
||||
/// The `python3.<minor>.exe` executable (or `venvlauncher.exe` launcher shim).
|
||||
PythonMajorMinor,
|
||||
/// The `python3.<minor>t.exe` executable (or `venvlaunchert.exe` launcher shim).
|
||||
PythonMajorMinort,
|
||||
/// The `pythonw.exe` executable (or `venvwlauncher.exe` launcher shim).
|
||||
Pythonw,
|
||||
/// The `pythonw3.<minor>t.exe` executable (or `venvwlaunchert.exe` launcher shim).
|
||||
PythonwMajorMinort,
|
||||
/// The `pypy.exe` executable.
|
||||
PyPy,
|
||||
/// The `pypy3.exe` executable.
|
||||
|
|
@ -579,7 +659,7 @@ enum WindowsExecutable {
|
|||
PyPyw,
|
||||
/// The `pypy3.<minor>w.exe` executable.
|
||||
PyPyMajorMinorw,
|
||||
// The `graalpy.exe` executable
|
||||
/// The `graalpy.exe` executable.
|
||||
GraalPy,
|
||||
}
|
||||
|
||||
|
|
@ -598,7 +678,21 @@ impl WindowsExecutable {
|
|||
interpreter.python_minor()
|
||||
)
|
||||
}
|
||||
WindowsExecutable::PythonMajorMinort => {
|
||||
format!(
|
||||
"python{}.{}t.exe",
|
||||
interpreter.python_major(),
|
||||
interpreter.python_minor()
|
||||
)
|
||||
}
|
||||
WindowsExecutable::Pythonw => String::from("pythonw.exe"),
|
||||
WindowsExecutable::PythonwMajorMinort => {
|
||||
format!(
|
||||
"pythonw{}.{}t.exe",
|
||||
interpreter.python_major(),
|
||||
interpreter.python_minor()
|
||||
)
|
||||
}
|
||||
WindowsExecutable::PyPy => String::from("pypy.exe"),
|
||||
WindowsExecutable::PyPyMajor => {
|
||||
format!("pypy{}.exe", interpreter.python_major())
|
||||
|
|
@ -633,6 +727,8 @@ impl WindowsExecutable {
|
|||
Self::Python | Self::PythonMajor | Self::PythonMajorMinor => "venvlauncher.exe",
|
||||
Self::Pythonw if interpreter.gil_disabled() => "venvwlaunchert.exe",
|
||||
Self::Pythonw => "venvwlauncher.exe",
|
||||
Self::PythonMajorMinort => "venvlaunchert.exe",
|
||||
Self::PythonwMajorMinort => "venvwlaunchert.exe",
|
||||
// From 3.13 on these should replace the `python.exe` and `pythonw.exe` shims.
|
||||
// These are not relevant as of now for PyPy as it doesn't yet support Python 3.13.
|
||||
Self::PyPy | Self::PyPyMajor | Self::PyPyMajorMinor => "venvlauncher.exe",
|
||||
|
|
|
|||
|
|
@ -17,7 +17,8 @@ use std::str::FromStr;
|
|||
use glob::Pattern;
|
||||
use owo_colors::OwoColorize;
|
||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||
use serde::{Deserialize, Deserializer, Serialize, de::IntoDeserializer, de::SeqAccess};
|
||||
use serde::de::{IntoDeserializer, SeqAccess};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use thiserror::Error;
|
||||
use uv_build_backend::BuildBackendSettings;
|
||||
use uv_distribution_types::{Index, IndexName, RequirementSource};
|
||||
|
|
@ -72,8 +73,8 @@ pub struct PyProjectToml {
|
|||
impl PyProjectToml {
|
||||
/// Parse a `PyProjectToml` from a raw TOML string.
|
||||
pub fn from_string(raw: String) -> Result<Self, PyprojectTomlError> {
|
||||
let pyproject: toml_edit::ImDocument<_> =
|
||||
toml_edit::ImDocument::from_str(&raw).map_err(PyprojectTomlError::TomlSyntax)?;
|
||||
let pyproject =
|
||||
toml_edit::Document::from_str(&raw).map_err(PyprojectTomlError::TomlSyntax)?;
|
||||
let pyproject = PyProjectToml::deserialize(pyproject.into_deserializer())
|
||||
.map_err(PyprojectTomlError::TomlSchema)?;
|
||||
Ok(PyProjectToml { raw, ..pyproject })
|
||||
|
|
|
|||
|
|
@ -392,6 +392,7 @@ impl PyProjectTomlMut {
|
|||
|
||||
/// Add an [`Index`] to `tool.uv.index`.
|
||||
pub fn add_index(&mut self, index: &Index) -> Result<(), Error> {
|
||||
let size = self.doc.len();
|
||||
let existing = self
|
||||
.doc
|
||||
.entry("tool")
|
||||
|
|
@ -472,8 +473,7 @@ impl PyProjectTomlMut {
|
|||
if table
|
||||
.get("url")
|
||||
.and_then(|item| item.as_str())
|
||||
.and_then(|url| DisplaySafeUrl::parse(url).ok())
|
||||
.is_none_or(|url| CanonicalUrl::new(&url) != CanonicalUrl::new(index.url.url()))
|
||||
.is_none_or(|url| url != index.url.without_credentials().as_str())
|
||||
{
|
||||
let mut formatted = Formatted::new(index.url.without_credentials().to_string());
|
||||
if let Some(value) = table.get("url").and_then(Item::as_value) {
|
||||
|
|
@ -552,6 +552,9 @@ impl PyProjectTomlMut {
|
|||
table.set_position(position + 1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let position = isize::try_from(size).expect("TOML table size fits in `isize`");
|
||||
table.set_position(position);
|
||||
}
|
||||
|
||||
// Push the item to the table.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "uv"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -92,6 +92,15 @@ impl OperationDiagnostic {
|
|||
requested_dist_error(kind, dist, &chain, err, self.hint);
|
||||
None
|
||||
}
|
||||
pip::operations::Error::Resolve(uv_resolver::ResolveError::Dependencies(
|
||||
error,
|
||||
name,
|
||||
version,
|
||||
chain,
|
||||
)) => {
|
||||
dependencies_error(error, &name, &version, &chain, self.hint.clone());
|
||||
None
|
||||
}
|
||||
pip::operations::Error::Requirements(uv_requirements::Error::Dist(kind, dist, err)) => {
|
||||
dist_error(
|
||||
kind,
|
||||
|
|
@ -232,6 +241,54 @@ pub(crate) fn requested_dist_error(
|
|||
anstream::eprint!("{report:?}");
|
||||
}
|
||||
|
||||
/// Render an error in fetching a package's dependencies.
|
||||
pub(crate) fn dependencies_error(
|
||||
error: Box<uv_resolver::ResolveError>,
|
||||
name: &PackageName,
|
||||
version: &Version,
|
||||
chain: &DerivationChain,
|
||||
help: Option<String>,
|
||||
) {
|
||||
#[derive(Debug, miette::Diagnostic, thiserror::Error)]
|
||||
#[error("Failed to resolve dependencies for `{}` ({})", name.cyan(), format!("v{version}").cyan())]
|
||||
#[diagnostic()]
|
||||
struct Diagnostic {
|
||||
name: PackageName,
|
||||
version: Version,
|
||||
#[source]
|
||||
cause: Box<uv_resolver::ResolveError>,
|
||||
#[help]
|
||||
help: Option<String>,
|
||||
}
|
||||
|
||||
let help = help.or_else(|| {
|
||||
SUGGESTIONS
|
||||
.get(name)
|
||||
.map(|suggestion| {
|
||||
format!(
|
||||
"`{}` is often confused for `{}` Did you mean to install `{}` instead?",
|
||||
name.cyan(),
|
||||
suggestion.cyan(),
|
||||
suggestion.cyan(),
|
||||
)
|
||||
})
|
||||
.or_else(|| {
|
||||
if chain.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(format_chain(name, Some(version), chain))
|
||||
}
|
||||
})
|
||||
});
|
||||
let report = miette::Report::new(Diagnostic {
|
||||
name: name.clone(),
|
||||
version: version.clone(),
|
||||
cause: error,
|
||||
help,
|
||||
});
|
||||
anstream::eprint!("{report:?}");
|
||||
}
|
||||
|
||||
/// Render a [`uv_resolver::NoSolutionError`].
|
||||
pub(crate) fn no_solution(err: &uv_resolver::NoSolutionError) {
|
||||
let report = miette::Report::msg(format!("{err}")).context(err.header());
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::collections::BTreeSet;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
|
|
@ -26,7 +26,7 @@ use uv_distribution_types::{
|
|||
use uv_fs::{CWD, Simplified};
|
||||
use uv_git::ResolvedRepositoryReference;
|
||||
use uv_install_wheel::LinkMode;
|
||||
use uv_normalize::{GroupName, PackageName};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::{Conflicts, SupportedEnvironments};
|
||||
use uv_python::{
|
||||
EnvironmentPreference, PythonEnvironment, PythonInstallation, PythonPreference, PythonRequest,
|
||||
|
|
@ -34,7 +34,8 @@ use uv_python::{
|
|||
};
|
||||
use uv_requirements::upgrade::{LockedRequirements, read_pylock_toml_requirements};
|
||||
use uv_requirements::{
|
||||
RequirementsSource, RequirementsSpecification, is_pylock_toml, upgrade::read_requirements_txt,
|
||||
GroupsSpecification, RequirementsSource, RequirementsSpecification, is_pylock_toml,
|
||||
upgrade::read_requirements_txt,
|
||||
};
|
||||
use uv_resolver::{
|
||||
AnnotationStyle, DependencyMode, DisplayResolutionGraph, ExcludeNewer, FlatIndex, ForkStrategy,
|
||||
|
|
@ -65,7 +66,7 @@ pub(crate) async fn pip_compile(
|
|||
build_constraints_from_workspace: Vec<Requirement>,
|
||||
environments: SupportedEnvironments,
|
||||
extras: ExtrasSpecification,
|
||||
groups: BTreeMap<PathBuf, Vec<GroupName>>,
|
||||
groups: GroupsSpecification,
|
||||
output_file: Option<&Path>,
|
||||
format: Option<ExportFormat>,
|
||||
resolution_mode: ResolutionMode,
|
||||
|
|
@ -215,7 +216,7 @@ pub(crate) async fn pip_compile(
|
|||
requirements,
|
||||
constraints,
|
||||
overrides,
|
||||
groups,
|
||||
Some(&groups),
|
||||
&client_builder,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::collections::BTreeSet;
|
||||
use std::fmt::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context;
|
||||
use itertools::Itertools;
|
||||
|
|
@ -23,14 +22,14 @@ use uv_distribution_types::{
|
|||
use uv_fs::Simplified;
|
||||
use uv_install_wheel::LinkMode;
|
||||
use uv_installer::{SatisfiesResult, SitePackages};
|
||||
use uv_normalize::GroupName;
|
||||
use uv_normalize::{DefaultExtras, DefaultGroups};
|
||||
use uv_pep508::PackageName;
|
||||
use uv_pypi_types::Conflicts;
|
||||
use uv_python::{
|
||||
EnvironmentPreference, Prefix, PythonEnvironment, PythonInstallation, PythonPreference,
|
||||
PythonRequest, PythonVersion, Target,
|
||||
};
|
||||
use uv_requirements::{RequirementsSource, RequirementsSpecification};
|
||||
use uv_requirements::{GroupsSpecification, RequirementsSource, RequirementsSpecification};
|
||||
use uv_resolver::{
|
||||
DependencyMode, ExcludeNewer, FlatIndex, OptionsBuilder, PrereleaseMode, PylockToml,
|
||||
PythonRequirement, ResolutionMode, ResolverEnvironment,
|
||||
|
|
@ -60,7 +59,7 @@ pub(crate) async fn pip_install(
|
|||
overrides_from_workspace: Vec<Requirement>,
|
||||
build_constraints_from_workspace: Vec<Requirement>,
|
||||
extras: &ExtrasSpecification,
|
||||
groups: BTreeMap<PathBuf, Vec<GroupName>>,
|
||||
groups: &GroupsSpecification,
|
||||
resolution_mode: ResolutionMode,
|
||||
prerelease_mode: PrereleaseMode,
|
||||
dependency_mode: DependencyMode,
|
||||
|
|
@ -136,7 +135,7 @@ pub(crate) async fn pip_install(
|
|||
constraints,
|
||||
overrides,
|
||||
extras,
|
||||
groups,
|
||||
Some(groups),
|
||||
&client_builder,
|
||||
)
|
||||
.await?;
|
||||
|
|
@ -453,11 +452,46 @@ pub(crate) async fn pip_install(
|
|||
let install_path = std::path::absolute(&pylock)?;
|
||||
let install_path = install_path.parent().unwrap();
|
||||
let content = fs_err::tokio::read_to_string(&pylock).await?;
|
||||
let lock = toml::from_str::<PylockToml>(&content)
|
||||
.with_context(|| format!("Not a valid pylock.toml file: {}", pylock.user_display()))?;
|
||||
let lock = toml::from_str::<PylockToml>(&content).with_context(|| {
|
||||
format!("Not a valid `pylock.toml` file: {}", pylock.user_display())
|
||||
})?;
|
||||
|
||||
let resolution =
|
||||
lock.to_resolution(install_path, marker_env.markers(), &tags, &build_options)?;
|
||||
// Verify that the Python version is compatible with the lock file.
|
||||
if let Some(requires_python) = lock.requires_python.as_ref() {
|
||||
if !requires_python.contains(interpreter.python_version()) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"The requested interpreter resolved to Python {}, which is incompatible with the `pylock.toml`'s Python requirement: `{}`",
|
||||
interpreter.python_version(),
|
||||
requires_python,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the extras and groups specifications into a concrete form.
|
||||
let extras = extras.with_defaults(DefaultExtras::default());
|
||||
let extras = extras
|
||||
.extra_names(lock.extras.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let groups = groups
|
||||
.get(&pylock)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.with_defaults(DefaultGroups::List(lock.default_groups.clone()));
|
||||
let groups = groups
|
||||
.group_names(lock.dependency_groups.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let resolution = lock.to_resolution(
|
||||
install_path,
|
||||
marker_env.markers(),
|
||||
&extras,
|
||||
&groups,
|
||||
&tags,
|
||||
&build_options,
|
||||
)?;
|
||||
let hasher = HashStrategy::from_resolution(&resolution, HashCheckingMode::Verify)?;
|
||||
|
||||
(resolution, hasher)
|
||||
|
|
|
|||
|
|
@ -27,14 +27,14 @@ use uv_distribution_types::{
|
|||
use uv_fs::Simplified;
|
||||
use uv_install_wheel::LinkMode;
|
||||
use uv_installer::{Plan, Planner, Preparer, SitePackages};
|
||||
use uv_normalize::{GroupName, PackageName};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep508::{MarkerEnvironment, RequirementOrigin};
|
||||
use uv_platform_tags::Tags;
|
||||
use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment};
|
||||
use uv_python::{PythonEnvironment, PythonInstallation};
|
||||
use uv_requirements::{
|
||||
LookaheadResolver, NamedRequirementsResolver, RequirementsSource, RequirementsSpecification,
|
||||
SourceTreeResolver,
|
||||
GroupsSpecification, LookaheadResolver, NamedRequirementsResolver, RequirementsSource,
|
||||
RequirementsSpecification, SourceTreeResolver,
|
||||
};
|
||||
use uv_resolver::{
|
||||
DependencyMode, Exclusions, FlatIndex, InMemoryIndex, Manifest, Options, Preference,
|
||||
|
|
@ -55,7 +55,7 @@ pub(crate) async fn read_requirements(
|
|||
constraints: &[RequirementsSource],
|
||||
overrides: &[RequirementsSource],
|
||||
extras: &ExtrasSpecification,
|
||||
groups: BTreeMap<PathBuf, Vec<GroupName>>,
|
||||
groups: Option<&GroupsSpecification>,
|
||||
client_builder: &BaseClientBuilder<'_>,
|
||||
) -> Result<RequirementsSpecification, Error> {
|
||||
// If the user requests `extras` but does not provide a valid source (e.g., a `pyproject.toml`),
|
||||
|
|
@ -70,7 +70,7 @@ pub(crate) async fn read_requirements(
|
|||
"Use `package[extra]` syntax instead."
|
||||
};
|
||||
return Err(anyhow!(
|
||||
"Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. {hint}"
|
||||
"Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. {hint}"
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
|
@ -91,15 +91,11 @@ pub(crate) async fn read_constraints(
|
|||
constraints: &[RequirementsSource],
|
||||
client_builder: &BaseClientBuilder<'_>,
|
||||
) -> Result<Vec<NameRequirementSpecification>, Error> {
|
||||
Ok(RequirementsSpecification::from_sources(
|
||||
&[],
|
||||
constraints,
|
||||
&[],
|
||||
BTreeMap::default(),
|
||||
client_builder,
|
||||
Ok(
|
||||
RequirementsSpecification::from_sources(&[], constraints, &[], None, client_builder)
|
||||
.await?
|
||||
.constraints,
|
||||
)
|
||||
.await?
|
||||
.constraints)
|
||||
}
|
||||
|
||||
/// Resolve a set of requirements, similar to running `pip compile`.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::collections::BTreeSet;
|
||||
use std::fmt::Write;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
|
@ -18,13 +18,14 @@ use uv_distribution_types::{DependencyMetadata, Index, IndexLocations, Origin, R
|
|||
use uv_fs::Simplified;
|
||||
use uv_install_wheel::LinkMode;
|
||||
use uv_installer::SitePackages;
|
||||
use uv_normalize::{DefaultExtras, DefaultGroups};
|
||||
use uv_pep508::PackageName;
|
||||
use uv_pypi_types::Conflicts;
|
||||
use uv_python::{
|
||||
EnvironmentPreference, Prefix, PythonEnvironment, PythonInstallation, PythonPreference,
|
||||
PythonRequest, PythonVersion, Target,
|
||||
};
|
||||
use uv_requirements::{RequirementsSource, RequirementsSpecification};
|
||||
use uv_requirements::{GroupsSpecification, RequirementsSource, RequirementsSpecification};
|
||||
use uv_resolver::{
|
||||
DependencyMode, ExcludeNewer, FlatIndex, OptionsBuilder, PrereleaseMode, PylockToml,
|
||||
PythonRequirement, ResolutionMode, ResolverEnvironment,
|
||||
|
|
@ -49,6 +50,8 @@ pub(crate) async fn pip_sync(
|
|||
requirements: &[RequirementsSource],
|
||||
constraints: &[RequirementsSource],
|
||||
build_constraints: &[RequirementsSource],
|
||||
extras: &ExtrasSpecification,
|
||||
groups: &GroupsSpecification,
|
||||
reinstall: Reinstall,
|
||||
link_mode: LinkMode,
|
||||
compile: bool,
|
||||
|
|
@ -99,8 +102,6 @@ pub(crate) async fn pip_sync(
|
|||
|
||||
// Initialize a few defaults.
|
||||
let overrides = &[];
|
||||
let extras = ExtrasSpecification::default();
|
||||
let groups = BTreeMap::default();
|
||||
let upgrade = Upgrade::default();
|
||||
let resolution_mode = ResolutionMode::default();
|
||||
let prerelease_mode = PrereleaseMode::default();
|
||||
|
|
@ -126,8 +127,8 @@ pub(crate) async fn pip_sync(
|
|||
requirements,
|
||||
constraints,
|
||||
overrides,
|
||||
&extras,
|
||||
groups,
|
||||
extras,
|
||||
Some(groups),
|
||||
&client_builder,
|
||||
)
|
||||
.await?;
|
||||
|
|
@ -389,11 +390,46 @@ pub(crate) async fn pip_sync(
|
|||
let install_path = std::path::absolute(&pylock)?;
|
||||
let install_path = install_path.parent().unwrap();
|
||||
let content = fs_err::tokio::read_to_string(&pylock).await?;
|
||||
let lock = toml::from_str::<PylockToml>(&content)
|
||||
.with_context(|| format!("Not a valid pylock.toml file: {}", pylock.user_display()))?;
|
||||
let lock = toml::from_str::<PylockToml>(&content).with_context(|| {
|
||||
format!("Not a valid `pylock.toml` file: {}", pylock.user_display())
|
||||
})?;
|
||||
|
||||
let resolution =
|
||||
lock.to_resolution(install_path, marker_env.markers(), &tags, &build_options)?;
|
||||
// Verify that the Python version is compatible with the lock file.
|
||||
if let Some(requires_python) = lock.requires_python.as_ref() {
|
||||
if !requires_python.contains(interpreter.python_version()) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"The requested interpreter resolved to Python {}, which is incompatible with the `pylock.toml`'s Python requirement: `{}`",
|
||||
interpreter.python_version(),
|
||||
requires_python,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Convert the extras and groups specifications into a concrete form.
|
||||
let extras = extras.with_defaults(DefaultExtras::default());
|
||||
let extras = extras
|
||||
.extra_names(lock.extras.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let groups = groups
|
||||
.get(&pylock)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.with_defaults(DefaultGroups::List(lock.default_groups.clone()));
|
||||
let groups = groups
|
||||
.group_names(lock.dependency_groups.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let resolution = lock.to_resolution(
|
||||
install_path,
|
||||
marker_env.markers(),
|
||||
&extras,
|
||||
&groups,
|
||||
&tags,
|
||||
&build_options,
|
||||
)?;
|
||||
let hasher = HashStrategy::from_resolution(&resolution, HashCheckingMode::Verify)?;
|
||||
|
||||
(resolution, hasher)
|
||||
|
|
@ -418,7 +454,7 @@ pub(crate) async fn pip_sync(
|
|||
source_trees,
|
||||
project,
|
||||
BTreeSet::default(),
|
||||
&extras,
|
||||
extras,
|
||||
&groups,
|
||||
preferences,
|
||||
site_packages.clone(),
|
||||
|
|
|
|||
|
|
@ -351,7 +351,7 @@ pub(crate) async fn add(
|
|||
&requirements,
|
||||
&constraints,
|
||||
&[],
|
||||
BTreeMap::default(),
|
||||
None,
|
||||
&client_builder,
|
||||
)
|
||||
.await?;
|
||||
|
|
@ -663,7 +663,9 @@ pub(crate) async fn add(
|
|||
// Add any indexes that were provided on the command-line, in priority order.
|
||||
if !raw {
|
||||
let urls = IndexUrls::from_indexes(indexes);
|
||||
for index in urls.defined_indexes() {
|
||||
let mut indexes = urls.defined_indexes().collect::<Vec<_>>();
|
||||
indexes.reverse();
|
||||
for index in indexes {
|
||||
toml.add_index(index)?;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ impl EphemeralEnvironment {
|
|||
/// environment's `site-packages` directory to Python's import search paths in addition to
|
||||
/// the ephemeral environment's `site-packages` directory. This works well at runtime, but
|
||||
/// is too dynamic for static analysis tools like ty to understand. As such, we
|
||||
/// additionally write the `sys.prefix` of the parent environment to to the
|
||||
/// additionally write the `sys.prefix` of the parent environment to the
|
||||
/// `extends-environment` key of the ephemeral environment's `pyvenv.cfg` file, making it
|
||||
/// easier for these tools to statically and reliably understand the relationship between
|
||||
/// the two environments.
|
||||
|
|
@ -78,6 +78,20 @@ impl EphemeralEnvironment {
|
|||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the path to the environment's scripts directory.
|
||||
pub(crate) fn scripts(&self) -> &Path {
|
||||
self.0.scripts()
|
||||
}
|
||||
|
||||
/// Returns the path to the environment's Python executable.
|
||||
pub(crate) fn sys_executable(&self) -> &Path {
|
||||
self.0.interpreter().sys_executable()
|
||||
}
|
||||
|
||||
pub(crate) fn sys_prefix(&self) -> &Path {
|
||||
self.0.interpreter().sys_prefix()
|
||||
}
|
||||
}
|
||||
|
||||
/// A [`PythonEnvironment`] stored in the cache.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use anyhow::{Context, Result, anyhow};
|
||||
use owo_colors::OwoColorize;
|
||||
use std::fmt::Write;
|
||||
use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
use std::str::FromStr;
|
||||
|
|
@ -944,7 +945,13 @@ fn pyproject_build_system(package: &PackageName, build_backend: ProjectBuildBack
|
|||
min_version.release()[0] == 0,
|
||||
"migrate to major version bumps"
|
||||
);
|
||||
let max_version = Version::new([0, min_version.release()[1] + 1]);
|
||||
let max_version = Version::new(
|
||||
[0, min_version.release()[1] + 1]
|
||||
.into_iter()
|
||||
// Add trailing zeroes to match the version length, to use the same style
|
||||
// as `--bounds`.
|
||||
.chain(iter::repeat_n(0, min_version.release().len() - 2)),
|
||||
);
|
||||
indoc::formatdoc! {r#"
|
||||
[build-system]
|
||||
requires = ["uv_build>={min_version},<{max_version}"]
|
||||
|
|
|
|||
|
|
@ -43,6 +43,7 @@ use uv_scripts::Pep723ItemRef;
|
|||
use uv_settings::PythonInstallMirrors;
|
||||
use uv_static::EnvVars;
|
||||
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
|
||||
use uv_virtualenv::remove_virtualenv;
|
||||
use uv_warnings::{warn_user, warn_user_once};
|
||||
use uv_workspace::dependency_groups::DependencyGroupError;
|
||||
use uv_workspace::pyproject::PyProjectToml;
|
||||
|
|
@ -939,13 +940,19 @@ impl ProjectInterpreter {
|
|||
));
|
||||
}
|
||||
InvalidEnvironmentKind::MissingExecutable(_) => {
|
||||
// If it's not an empty directory
|
||||
if fs_err::read_dir(&root).is_ok_and(|mut dir| dir.next().is_some()) {
|
||||
return Err(ProjectError::InvalidProjectEnvironmentDir(
|
||||
root,
|
||||
"it is not a valid Python environment (no Python executable was found)"
|
||||
.to_string(),
|
||||
));
|
||||
// ... and there's no `pyvenv.cfg`
|
||||
if !root.join("pyvenv.cfg").try_exists().unwrap_or_default() {
|
||||
// ... then it's not a valid Python environment
|
||||
return Err(ProjectError::InvalidProjectEnvironmentDir(
|
||||
root,
|
||||
"it is not a valid Python environment (no Python executable was found)"
|
||||
.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
// Otherwise, we'll delete it
|
||||
}
|
||||
// If the environment is an empty directory, it's fine to use
|
||||
InvalidEnvironmentKind::Empty => {}
|
||||
|
|
@ -1373,7 +1380,7 @@ impl ProjectEnvironment {
|
|||
|
||||
// Remove the existing virtual environment if it doesn't meet the requirements.
|
||||
if replace {
|
||||
match fs_err::remove_dir_all(&root) {
|
||||
match remove_virtualenv(&root) {
|
||||
Ok(()) => {
|
||||
writeln!(
|
||||
printer.stderr(),
|
||||
|
|
@ -1381,8 +1388,9 @@ impl ProjectEnvironment {
|
|||
root.user_display().cyan()
|
||||
)?;
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
Err(uv_virtualenv::Error::Io(err))
|
||||
if err.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,8 +9,9 @@ use anyhow::{Context, anyhow, bail};
|
|||
use futures::StreamExt;
|
||||
use itertools::Itertools;
|
||||
use owo_colors::OwoColorize;
|
||||
use thiserror::Error;
|
||||
use tokio::process::Command;
|
||||
use tracing::{debug, warn};
|
||||
use tracing::{debug, trace, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_cache::Cache;
|
||||
|
|
@ -22,7 +23,7 @@ use uv_configuration::{
|
|||
};
|
||||
use uv_distribution_types::Requirement;
|
||||
use uv_fs::which::is_executable;
|
||||
use uv_fs::{PythonExt, Simplified};
|
||||
use uv_fs::{PythonExt, Simplified, create_symlink};
|
||||
use uv_installer::{SatisfiesResult, SitePackages};
|
||||
use uv_normalize::{DefaultExtras, DefaultGroups, PackageName};
|
||||
use uv_python::{
|
||||
|
|
@ -1071,6 +1072,67 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
|
|||
requirements_site_packages.escape_for_python(),
|
||||
))?;
|
||||
|
||||
// N.B. The order here matters — earlier interpreters take precedence over the
|
||||
// later ones.
|
||||
for interpreter in [requirements_env.interpreter(), &base_interpreter] {
|
||||
// Copy each entrypoint from the base environments to the ephemeral environment,
|
||||
// updating the Python executable target to ensure they run in the ephemeral
|
||||
// environment.
|
||||
for entry in fs_err::read_dir(interpreter.scripts())? {
|
||||
let entry = entry?;
|
||||
if !entry.file_type()?.is_file() {
|
||||
continue;
|
||||
}
|
||||
match copy_entrypoint(
|
||||
&entry.path(),
|
||||
&ephemeral_env.scripts().join(entry.file_name()),
|
||||
interpreter.sys_executable(),
|
||||
ephemeral_env.sys_executable(),
|
||||
) {
|
||||
Ok(()) => {}
|
||||
// If the entrypoint already exists, skip it.
|
||||
Err(CopyEntrypointError::Io(err))
|
||||
if err.kind() == std::io::ErrorKind::AlreadyExists =>
|
||||
{
|
||||
trace!(
|
||||
"Skipping copy of entrypoint `{}`: already exists",
|
||||
&entry.path().display()
|
||||
);
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
// Link data directories from the base environment to the ephemeral environment.
|
||||
//
|
||||
// This is critical for Jupyter Lab, which cannot operate without the files it
|
||||
// writes to `<prefix>/share/jupyter`.
|
||||
//
|
||||
// See https://github.com/jupyterlab/jupyterlab/issues/17716
|
||||
for dir in &["etc/jupyter", "share/jupyter"] {
|
||||
let source = interpreter.sys_prefix().join(dir);
|
||||
if !matches!(source.try_exists(), Ok(true)) {
|
||||
continue;
|
||||
}
|
||||
if !source.is_dir() {
|
||||
continue;
|
||||
}
|
||||
let target = ephemeral_env.sys_prefix().join(dir);
|
||||
if let Some(parent) = target.parent() {
|
||||
fs_err::create_dir_all(parent)?;
|
||||
}
|
||||
match create_symlink(&source, &target) {
|
||||
Ok(()) => trace!(
|
||||
"Created link for {} -> {}",
|
||||
target.user_display(),
|
||||
source.user_display()
|
||||
),
|
||||
Err(err) if err.kind() == std::io::ErrorKind::AlreadyExists => {}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write the `sys.prefix` of the parent environment to the `extends-environment` key of the `pyvenv.cfg`
|
||||
// file. This helps out static-analysis tools such as ty (see docs on
|
||||
// `CachedEnvironment::set_parent_environment`).
|
||||
|
|
@ -1669,3 +1731,126 @@ fn read_recursion_depth_from_environment_variable() -> anyhow::Result<u32> {
|
|||
.parse::<u32>()
|
||||
.with_context(|| format!("invalid value for {}", EnvVars::UV_RUN_RECURSION_DEPTH))
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
enum CopyEntrypointError {
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
#[cfg(windows)]
|
||||
#[error(transparent)]
|
||||
Trampoline(#[from] uv_trampoline_builder::Error),
|
||||
}
|
||||
|
||||
/// Create a copy of the entrypoint at `source` at `target`, if it has a Python shebang, replacing
|
||||
/// the previous Python executable with a new one.
|
||||
///
|
||||
/// This is a no-op if the target already exists.
|
||||
///
|
||||
/// Note on Windows, the entrypoints do not use shebangs and require a rewrite of the trampoline.
|
||||
#[cfg(unix)]
|
||||
fn copy_entrypoint(
|
||||
source: &Path,
|
||||
target: &Path,
|
||||
previous_executable: &Path,
|
||||
python_executable: &Path,
|
||||
) -> Result<(), CopyEntrypointError> {
|
||||
use std::io::{Seek, Write};
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
use fs_err::os::unix::fs::OpenOptionsExt;
|
||||
|
||||
let mut file = fs_err::File::open(source)?;
|
||||
let mut buffer = [0u8; 2];
|
||||
if file.read_exact(&mut buffer).is_err() {
|
||||
// File is too small to have a shebang
|
||||
trace!(
|
||||
"Skipping copy of entrypoint `{}`: file is too small to contain a shebang",
|
||||
source.user_display()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Check if it starts with `#!` to avoid reading binary files and such into memory
|
||||
if &buffer != b"#!" {
|
||||
trace!(
|
||||
"Skipping copy of entrypoint `{}`: does not start with #!",
|
||||
source.user_display()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut contents = String::new();
|
||||
file.seek(std::io::SeekFrom::Start(0))?;
|
||||
match file.read_to_string(&mut contents) {
|
||||
Ok(_) => {}
|
||||
Err(err) if err.kind() == std::io::ErrorKind::InvalidData => {
|
||||
// If the file is not valid UTF-8, we skip it in case it was a binary file with `#!` at
|
||||
// the start (which seems pretty niche, but being defensive here seems safe)
|
||||
trace!(
|
||||
"Skipping copy of entrypoint `{}`: is not valid UTF-8",
|
||||
source.user_display()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
|
||||
let Some(contents) = contents
|
||||
// Check for a relative path or relocatable shebang
|
||||
.strip_prefix(
|
||||
r#"#!/bin/sh
|
||||
'''exec' "$(dirname -- "$(realpath -- "$0")")"/'python' "$0" "$@"
|
||||
' '''
|
||||
"#,
|
||||
)
|
||||
// Or an absolute path shebang
|
||||
.or_else(|| contents.strip_prefix(&format!("#!{}\n", previous_executable.display())))
|
||||
else {
|
||||
// If it's not a Python shebang, we'll skip it
|
||||
trace!(
|
||||
"Skipping copy of entrypoint `{}`: does not start with expected shebang",
|
||||
source.user_display()
|
||||
);
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let contents = format!("#!{}\n{}", python_executable.display(), contents);
|
||||
let mode = fs_err::metadata(source)?.permissions().mode();
|
||||
let mut file = fs_err::OpenOptions::new()
|
||||
.create_new(true)
|
||||
.write(true)
|
||||
.mode(mode)
|
||||
.open(target)?;
|
||||
file.write_all(contents.as_bytes())?;
|
||||
|
||||
trace!("Updated entrypoint at {}", target.user_display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a copy of the entrypoint at `source` at `target`, if it's a Python script launcher,
|
||||
/// replacing the target Python executable with a new one.
|
||||
#[cfg(windows)]
|
||||
fn copy_entrypoint(
|
||||
source: &Path,
|
||||
target: &Path,
|
||||
_previous_executable: &Path,
|
||||
python_executable: &Path,
|
||||
) -> Result<(), CopyEntrypointError> {
|
||||
use uv_trampoline_builder::Launcher;
|
||||
|
||||
let Some(launcher) = Launcher::try_from_path(source)? else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let launcher = launcher.with_python_path(python_executable.to_path_buf());
|
||||
let mut file = fs_err::OpenOptions::new()
|
||||
.create_new(true)
|
||||
.write(true)
|
||||
.open(target)?;
|
||||
launcher.write_to_file(&mut file)?;
|
||||
|
||||
trace!("Updated entrypoint at {}", target.user_display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -276,7 +276,7 @@ pub(crate) async fn sync(
|
|||
dry_run: dry_run.enabled(),
|
||||
};
|
||||
if let Some(output) = report.format(output_format) {
|
||||
writeln!(printer.stdout(), "{output}")?;
|
||||
writeln!(printer.stdout_important(), "{output}")?;
|
||||
}
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
|
@ -366,7 +366,7 @@ pub(crate) async fn sync(
|
|||
};
|
||||
|
||||
if let Some(output) = report.format(output_format) {
|
||||
writeln!(printer.stdout(), "{output}")?;
|
||||
writeln!(printer.stdout_important(), "{output}")?;
|
||||
}
|
||||
|
||||
// Identify the installation target.
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ use uv_python::{PythonDownloads, PythonPreference, PythonRequest};
|
|||
use uv_settings::PythonInstallMirrors;
|
||||
use uv_workspace::pyproject_mut::Error;
|
||||
use uv_workspace::{
|
||||
DiscoveryOptions, WorkspaceCache,
|
||||
DiscoveryOptions, WorkspaceCache, WorkspaceError,
|
||||
pyproject_mut::{DependencyTarget, PyProjectTomlMut},
|
||||
};
|
||||
use uv_workspace::{VirtualProject, Workspace};
|
||||
|
|
@ -59,6 +59,7 @@ pub(crate) async fn project_version(
|
|||
output_format: VersionFormat,
|
||||
project_dir: &Path,
|
||||
package: Option<PackageName>,
|
||||
explicit_project: bool,
|
||||
dry_run: bool,
|
||||
locked: bool,
|
||||
frozen: bool,
|
||||
|
|
@ -78,7 +79,7 @@ pub(crate) async fn project_version(
|
|||
preview: PreviewMode,
|
||||
) -> Result<ExitStatus> {
|
||||
// Read the metadata
|
||||
let project = find_target(project_dir, package.as_ref()).await?;
|
||||
let project = find_target(project_dir, package.as_ref(), explicit_project).await?;
|
||||
|
||||
let pyproject_path = project.root().join("pyproject.toml");
|
||||
let Some(name) = project.project_name().cloned() else {
|
||||
|
|
@ -325,10 +326,30 @@ pub(crate) async fn project_version(
|
|||
Ok(status)
|
||||
}
|
||||
|
||||
/// Add hint to use `uv self version` when workspace discovery fails due to missing pyproject.toml
|
||||
/// and --project was not explicitly passed
|
||||
fn hint_uv_self_version(err: WorkspaceError, explicit_project: bool) -> anyhow::Error {
|
||||
if matches!(err, WorkspaceError::MissingPyprojectToml) && !explicit_project {
|
||||
anyhow!(
|
||||
"{}\n\n{}{} If you meant to view uv's version, use `{}` instead",
|
||||
err,
|
||||
"hint".bold().cyan(),
|
||||
":".bold(),
|
||||
"uv self version".green()
|
||||
)
|
||||
} else {
|
||||
err.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Find the pyproject.toml we're modifying
|
||||
///
|
||||
/// Note that `uv version` never needs to support PEP 723 scripts, as those are unversioned.
|
||||
async fn find_target(project_dir: &Path, package: Option<&PackageName>) -> Result<VirtualProject> {
|
||||
async fn find_target(
|
||||
project_dir: &Path,
|
||||
package: Option<&PackageName>,
|
||||
explicit_project: bool,
|
||||
) -> Result<VirtualProject> {
|
||||
// Find the project in the workspace.
|
||||
// No workspace caching since `uv version` changes the workspace definition.
|
||||
let project = if let Some(package) = package {
|
||||
|
|
@ -338,7 +359,8 @@ async fn find_target(project_dir: &Path, package: Option<&PackageName>) -> Resul
|
|||
&DiscoveryOptions::default(),
|
||||
&WorkspaceCache::default(),
|
||||
)
|
||||
.await?
|
||||
.await
|
||||
.map_err(|err| hint_uv_self_version(err, explicit_project))?
|
||||
.with_current_project(package.clone())
|
||||
.with_context(|| format!("Package `{package}` not found in workspace"))?,
|
||||
)
|
||||
|
|
@ -348,7 +370,8 @@ async fn find_target(project_dir: &Path, package: Option<&PackageName>) -> Resul
|
|||
&DiscoveryOptions::default(),
|
||||
&WorkspaceCache::default(),
|
||||
)
|
||||
.await?
|
||||
.await
|
||||
.map_err(|err| hint_uv_self_version(err, explicit_project))?
|
||||
};
|
||||
Ok(project)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -768,7 +768,7 @@ fn create_bin_links(
|
|||
installation.executable(false)
|
||||
};
|
||||
|
||||
match create_link_to_executable(&target, executable.clone()) {
|
||||
match create_link_to_executable(&target, &executable) {
|
||||
Ok(()) => {
|
||||
debug!(
|
||||
"Installed executable at `{}` for {}",
|
||||
|
|
@ -925,7 +925,7 @@ fn create_bin_links(
|
|||
.remove(&target);
|
||||
}
|
||||
|
||||
if let Err(err) = create_link_to_executable(&target, executable) {
|
||||
if let Err(err) = create_link_to_executable(&target, &executable) {
|
||||
errors.push((
|
||||
InstallErrorKind::Bin,
|
||||
installation.key().clone(),
|
||||
|
|
@ -953,7 +953,7 @@ fn create_bin_links(
|
|||
errors.push((
|
||||
InstallErrorKind::Bin,
|
||||
installation.key().clone(),
|
||||
anyhow::Error::new(err),
|
||||
Error::new(err),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::fmt::Write;
|
||||
use std::str::FromStr;
|
||||
|
||||
|
|
@ -261,7 +260,7 @@ pub(crate) async fn install(
|
|||
with,
|
||||
constraints,
|
||||
overrides,
|
||||
BTreeMap::default(),
|
||||
None,
|
||||
&client_builder,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::fmt::Display;
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
|
|
@ -871,7 +870,7 @@ async fn get_or_create_environment(
|
|||
with,
|
||||
constraints,
|
||||
overrides,
|
||||
BTreeMap::default(),
|
||||
None,
|
||||
&client_builder,
|
||||
)
|
||||
.await?;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::BTreeMap;
|
||||
use std::ffi::OsString;
|
||||
use std::fmt::Write;
|
||||
use std::io::stdout;
|
||||
|
|
@ -36,7 +35,7 @@ use uv_pep440::release_specifiers_to_ranges;
|
|||
use uv_pep508::VersionOrUrl;
|
||||
use uv_pypi_types::{ParsedDirectoryUrl, ParsedUrl};
|
||||
use uv_python::PythonRequest;
|
||||
use uv_requirements::RequirementsSource;
|
||||
use uv_requirements::{GroupsSpecification, RequirementsSource};
|
||||
use uv_requirements_txt::RequirementsTxtRequirement;
|
||||
use uv_scripts::{Pep723Error, Pep723Item, Pep723ItemRef, Pep723Metadata, Pep723Script};
|
||||
use uv_settings::{Combine, EnvironmentOptions, FilesystemOptions, Options};
|
||||
|
|
@ -477,20 +476,10 @@ async fn run(mut cli: Cli) -> Result<ExitStatus> {
|
|||
.into_iter()
|
||||
.map(RequirementsSource::from_constraints_txt)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let mut groups = BTreeMap::new();
|
||||
for group in args.settings.groups {
|
||||
// If there's no path provided, expect a pyproject.toml in the project-dir
|
||||
// (Which is typically the current working directory, matching pip's behaviour)
|
||||
let pyproject_path = group
|
||||
.path
|
||||
.clone()
|
||||
.unwrap_or_else(|| project_dir.join("pyproject.toml"));
|
||||
groups
|
||||
.entry(pyproject_path)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(group.name.clone());
|
||||
}
|
||||
let groups = GroupsSpecification {
|
||||
root: project_dir.to_path_buf(),
|
||||
groups: args.settings.groups,
|
||||
};
|
||||
|
||||
commands::pip_compile(
|
||||
&requirements,
|
||||
|
|
@ -588,11 +577,17 @@ async fn run(mut cli: Cli) -> Result<ExitStatus> {
|
|||
.into_iter()
|
||||
.map(RequirementsSource::from_constraints_txt)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let groups = GroupsSpecification {
|
||||
root: project_dir.to_path_buf(),
|
||||
groups: args.settings.groups,
|
||||
};
|
||||
|
||||
commands::pip_sync(
|
||||
&requirements,
|
||||
&constraints,
|
||||
&build_constraints,
|
||||
&args.settings.extras,
|
||||
&groups,
|
||||
args.settings.reinstall,
|
||||
args.settings.link_mode,
|
||||
args.settings.compile_bytecode,
|
||||
|
|
@ -674,20 +669,10 @@ async fn run(mut cli: Cli) -> Result<ExitStatus> {
|
|||
.into_iter()
|
||||
.map(RequirementsSource::from_overrides_txt)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let mut groups = BTreeMap::new();
|
||||
for group in args.settings.groups {
|
||||
// If there's no path provided, expect a pyproject.toml in the project-dir
|
||||
// (Which is typically the current working directory, matching pip's behaviour)
|
||||
let pyproject_path = group
|
||||
.path
|
||||
.clone()
|
||||
.unwrap_or_else(|| project_dir.join("pyproject.toml"));
|
||||
groups
|
||||
.entry(pyproject_path)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(group.name.clone());
|
||||
}
|
||||
let groups = GroupsSpecification {
|
||||
root: project_dir.to_path_buf(),
|
||||
groups: args.settings.groups,
|
||||
};
|
||||
|
||||
// Special-case: any source trees specified on the command-line are automatically
|
||||
// reinstalled. This matches user expectations: `uv pip install .` should always
|
||||
|
|
@ -747,7 +732,7 @@ async fn run(mut cli: Cli) -> Result<ExitStatus> {
|
|||
args.overrides_from_workspace,
|
||||
args.build_constraints_from_workspace,
|
||||
&args.settings.extras,
|
||||
groups,
|
||||
&groups,
|
||||
args.settings.resolution,
|
||||
args.settings.prerelease,
|
||||
args.settings.dependency_mode,
|
||||
|
|
@ -1091,6 +1076,7 @@ async fn run(mut cli: Cli) -> Result<ExitStatus> {
|
|||
script,
|
||||
globals,
|
||||
cli.top_level.no_config,
|
||||
cli.top_level.global_args.project.is_some(),
|
||||
filesystem,
|
||||
cache,
|
||||
printer,
|
||||
|
|
@ -1692,6 +1678,7 @@ async fn run_project(
|
|||
globals: GlobalSettings,
|
||||
// TODO(zanieb): Determine a better story for passing `no_config` in here
|
||||
no_config: bool,
|
||||
explicit_project: bool,
|
||||
filesystem: Option<FilesystemOptions>,
|
||||
cache: Cache,
|
||||
printer: Printer,
|
||||
|
|
@ -2083,6 +2070,7 @@ async fn run_project(
|
|||
args.output_format,
|
||||
project_dir,
|
||||
args.package,
|
||||
explicit_project,
|
||||
args.dry_run,
|
||||
args.locked,
|
||||
args.frozen,
|
||||
|
|
|
|||
|
|
@ -2058,6 +2058,10 @@ impl PipSyncSettings {
|
|||
src_file,
|
||||
constraints,
|
||||
build_constraints,
|
||||
extra,
|
||||
all_extras,
|
||||
no_all_extras,
|
||||
group,
|
||||
installer,
|
||||
refresh,
|
||||
require_hashes,
|
||||
|
|
@ -2122,6 +2126,9 @@ impl PipSyncSettings {
|
|||
python_version,
|
||||
python_platform,
|
||||
strict: flag(strict, no_strict, "strict"),
|
||||
extra,
|
||||
all_extras: flag(all_extras, no_all_extras, "all-extras"),
|
||||
group: Some(group),
|
||||
torch_backend,
|
||||
..PipOptions::from(installer)
|
||||
},
|
||||
|
|
|
|||
|
|
@ -61,16 +61,17 @@ fn branching_urls_overlapping() -> Result<()> {
|
|||
"# };
|
||||
make_project(context.temp_dir.path(), "a", deps)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version == '3.11.*'`:
|
||||
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
|
||||
- https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
|
||||
"###
|
||||
× Failed to resolve dependencies for `a` (v0.1.0)
|
||||
╰─▶ Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version == '3.11.*'`:
|
||||
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
|
||||
- https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -128,16 +129,18 @@ fn root_package_splits_but_transitive_conflict() -> Result<()> {
|
|||
"# };
|
||||
make_project(&context.temp_dir.path().join("b2"), "b2", deps)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version >= '3.12'`:
|
||||
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
|
||||
- https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
|
||||
"###
|
||||
× Failed to resolve dependencies for `b2` (v0.1.0)
|
||||
╰─▶ Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version >= '3.12'`:
|
||||
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
|
||||
- https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
|
||||
help: `b2` (v0.1.0) was included because `a` (v0.1.0) depends on `b` (v0.1.0) which depends on `b2`
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -727,16 +730,17 @@ fn branching_urls_of_different_sources_conflict() -> Result<()> {
|
|||
"# };
|
||||
make_project(context.temp_dir.path(), "a", deps)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
|
||||
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version == '3.11.*'`:
|
||||
- git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a
|
||||
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
|
||||
"###
|
||||
× Failed to resolve dependencies for `a` (v0.1.0)
|
||||
╰─▶ Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version == '3.11.*'`:
|
||||
- git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a
|
||||
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -1754,13 +1754,14 @@ fn build_with_symlink() -> Result<()> {
|
|||
build-backend = "hatchling.build"
|
||||
"#})?;
|
||||
fs_err::os::unix::fs::symlink(
|
||||
context.temp_dir.child("pyproject.toml.real"),
|
||||
"pyproject.toml.real",
|
||||
context.temp_dir.child("pyproject.toml"),
|
||||
)?;
|
||||
context
|
||||
.temp_dir
|
||||
.child("src/softlinked/__init__.py")
|
||||
.touch()?;
|
||||
fs_err::remove_dir_all(&context.venv)?;
|
||||
uv_snapshot!(context.filters(), context.build(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
|
|
@ -1799,6 +1800,7 @@ fn build_with_hardlink() -> Result<()> {
|
|||
.temp_dir
|
||||
.child("src/hardlinked/__init__.py")
|
||||
.touch()?;
|
||||
fs_err::remove_dir_all(&context.venv)?;
|
||||
uv_snapshot!(context.filters(), context.build(), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
|
|
|
|||
|
|
@ -3568,7 +3568,7 @@ fn add_update_git_reference_script() -> Result<()> {
|
|||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
script_content, @r###"
|
||||
script_content, @r##"
|
||||
# /// script
|
||||
# requires-python = ">=3.11"
|
||||
# dependencies = [
|
||||
|
|
@ -3581,7 +3581,7 @@ fn add_update_git_reference_script() -> Result<()> {
|
|||
|
||||
import time
|
||||
time.sleep(5)
|
||||
"###
|
||||
"##
|
||||
);
|
||||
});
|
||||
|
||||
|
|
@ -3601,7 +3601,7 @@ fn add_update_git_reference_script() -> Result<()> {
|
|||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
script_content, @r###"
|
||||
script_content, @r##"
|
||||
# /// script
|
||||
# requires-python = ">=3.11"
|
||||
# dependencies = [
|
||||
|
|
@ -3614,7 +3614,7 @@ fn add_update_git_reference_script() -> Result<()> {
|
|||
|
||||
import time
|
||||
time.sleep(5)
|
||||
"###
|
||||
"##
|
||||
);
|
||||
});
|
||||
|
||||
|
|
@ -10896,7 +10896,7 @@ fn add_preserves_empty_comment() -> Result<()> {
|
|||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
pyproject_toml, @r###"
|
||||
pyproject_toml, @r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
|
|
@ -10906,7 +10906,7 @@ fn add_preserves_empty_comment() -> Result<()> {
|
|||
# Second line.
|
||||
"anyio==3.7.0",
|
||||
]
|
||||
"###
|
||||
"#
|
||||
);
|
||||
});
|
||||
|
||||
|
|
@ -11307,6 +11307,115 @@ fn remove_all_with_comments() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// If multiple indexes are provided on the CLI, the first-provided index should take precedence
|
||||
/// during resolution, and should appear first in the `pyproject.toml` file.
|
||||
///
|
||||
/// See: <https://github.com/astral-sh/uv/issues/14817>
|
||||
#[test]
|
||||
fn multiple_index_cli() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(indoc! {r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = []
|
||||
"#})?;
|
||||
|
||||
uv_snapshot!(context.filters(), context
|
||||
.add()
|
||||
.arg("requests")
|
||||
.arg("--index")
|
||||
.arg("https://test.pypi.org/simple")
|
||||
.arg("--index")
|
||||
.arg("https://pypi.org/simple"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ requests==2.5.4.1
|
||||
");
|
||||
|
||||
let pyproject_toml = context.read("pyproject.toml");
|
||||
|
||||
insta::with_settings!({
|
||||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
pyproject_toml, @r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"requests>=2.5.4.1",
|
||||
]
|
||||
|
||||
[[tool.uv.index]]
|
||||
url = "https://test.pypi.org/simple"
|
||||
|
||||
[[tool.uv.index]]
|
||||
url = "https://pypi.org/simple"
|
||||
"#
|
||||
);
|
||||
});
|
||||
|
||||
let lock = context.read("uv.lock");
|
||||
|
||||
insta::with_settings!({
|
||||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
lock, @r#"
|
||||
version = 1
|
||||
revision = 2
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[options]
|
||||
exclude-newer = "2024-03-25T00:00:00Z"
|
||||
|
||||
[[package]]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "requests", specifier = ">=2.5.4.1" }]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.5.4.1"
|
||||
source = { registry = "https://test.pypi.org/simple" }
|
||||
sdist = { url = "https://test-files.pythonhosted.org/packages/6e/93/638dbb5f2c1f4120edaad4f3d45ffb1718e463733ad07d68f59e042901d6/requests-2.5.4.1.tar.gz", hash = "sha256:b19df51fa3e52a2bd7fc80a1ac11fb6b2f51a7c0bf31ba9ff6b5d11ea8605ae9", size = 448691, upload-time = "2015-03-13T21:30:03.228Z" }
|
||||
wheels = [
|
||||
{ url = "https://test-files.pythonhosted.org/packages/6d/00/8ed1b6ea43b10bfe28d08e6af29fd6aa5d8dab5e45ead9394a6268a2d2ec/requests-2.5.4.1-py2.py3-none-any.whl", hash = "sha256:0a2c98e46121e7507afb0edc89d342641a1fb9e8d56f7d592d4975ee6b685f9a", size = 468942, upload-time = "2015-03-13T21:29:55.769Z" },
|
||||
]
|
||||
"#
|
||||
);
|
||||
});
|
||||
|
||||
// Install from the lockfile.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Audited 1 package in [TIME]
|
||||
"###);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// If an index is repeated by the CLI and an environment variable, the CLI value should take
|
||||
/// precedence.
|
||||
///
|
||||
|
|
@ -11418,7 +11527,7 @@ fn repeated_index_cli_environment_variable() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// If an index is repeated on the CLI, the last-provided index should take precedence.
|
||||
/// If an index is repeated on the CLI, the first-provided index should take precedence.
|
||||
/// Newlines in `UV_INDEX` should be treated as separators.
|
||||
///
|
||||
/// The index that appears in the `pyproject.toml` should also be consistent with the index that
|
||||
|
|
@ -11524,7 +11633,7 @@ fn repeated_index_cli_environment_variable_newline() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// If an index is repeated on the CLI, the last-provided index should take precedence.
|
||||
/// If an index is repeated on the CLI, the first-provided index should take precedence.
|
||||
///
|
||||
/// The index that appears in the `pyproject.toml` should also be consistent with the index that
|
||||
/// appears in the `uv.lock`.
|
||||
|
|
@ -11634,7 +11743,7 @@ fn repeated_index_cli() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// If an index is repeated on the CLI, the last-provided index should take precedence.
|
||||
/// If an index is repeated on the CLI, the first-provided index should take precedence.
|
||||
///
|
||||
/// The index that appears in the `pyproject.toml` should also be consistent with the index that
|
||||
/// appears in the `uv.lock`.
|
||||
|
|
@ -13189,7 +13298,7 @@ fn add_path_with_existing_workspace() -> Result<()> {
|
|||
[tool.uv.workspace]
|
||||
members = [
|
||||
"project",
|
||||
"dep",
|
||||
"dep",
|
||||
]
|
||||
"#
|
||||
);
|
||||
|
|
|
|||
|
|
@ -11088,13 +11088,14 @@ fn lock_editable() -> Result<()> {
|
|||
|
||||
uv_snapshot!(context.filters(), context.lock(), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requirements contain conflicting URLs for package `library` in all marker environments:
|
||||
- file://[TEMP_DIR]/library
|
||||
- file://[TEMP_DIR]/library (editable)
|
||||
× Failed to resolve dependencies for `workspace` (v0.1.0)
|
||||
╰─▶ Requirements contain conflicting URLs for package `library` in all marker environments:
|
||||
- file://[TEMP_DIR]/library
|
||||
- file://[TEMP_DIR]/library (editable)
|
||||
");
|
||||
|
||||
Ok(())
|
||||
|
|
@ -18597,6 +18598,42 @@ fn lock_dependency_metadata() -> Result<()> {
|
|||
Removed sniffio v1.3.1
|
||||
"###);
|
||||
|
||||
// Update the static metadata.
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["anyio==3.7.0"]
|
||||
|
||||
[[tool.uv.dependency-metadata]]
|
||||
name = "anyio"
|
||||
version = "3.7.0"
|
||||
requires_dist = ["typing-extensions"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// The operation should warn.
|
||||
uv_snapshot!(context.filters(), context.lock(), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
warning: Failed to parse `pyproject.toml` during settings discovery:
|
||||
TOML parse error at line 11, column 9
|
||||
|
|
||||
11 | requires_dist = ["typing-extensions"]
|
||||
| ^^^^^^^^^^^^^
|
||||
unknown field `requires_dist`, expected one of `name`, `version`, `requires-dist`, `requires-python`, `provides-extras`
|
||||
|
||||
Resolved 4 packages in [TIME]
|
||||
Added idna v3.6
|
||||
Removed iniconfig v2.0.0
|
||||
Added sniffio v1.3.1
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -18868,7 +18905,7 @@ fn lock_duplicate_sources() -> Result<()> {
|
|||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock(), @r###"
|
||||
uv_snapshot!(context.filters(), context.lock(), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
|
@ -18878,17 +18915,16 @@ fn lock_duplicate_sources() -> Result<()> {
|
|||
TOML parse error at line 9, column 9
|
||||
|
|
||||
9 | python-multipart = { url = "https://files.pythonhosted.org/packages/c0/3e/9fbfd74e7f5b54f653f7ca99d44ceb56e718846920162165061c4c22b71a/python_multipart-0.0.8-py3-none-any.whl" }
|
||||
| ^
|
||||
duplicate key `python-multipart` in table `tool.uv.sources`
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
duplicate key
|
||||
|
||||
error: Failed to parse: `pyproject.toml`
|
||||
Caused by: TOML parse error at line 9, column 9
|
||||
|
|
||||
9 | python-multipart = { url = "https://files.pythonhosted.org/packages/c0/3e/9fbfd74e7f5b54f653f7ca99d44ceb56e718846920162165061c4c22b71a/python_multipart-0.0.8-py3-none-any.whl" }
|
||||
| ^
|
||||
duplicate key `python-multipart` in table `tool.uv.sources`
|
||||
|
||||
"###);
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
duplicate key
|
||||
"#);
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
|
|
@ -20693,16 +20729,17 @@ fn lock_multiple_sources_index_overlapping_extras() -> Result<()> {
|
|||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.lock(), @r###"
|
||||
uv_snapshot!(context.filters(), context.lock(), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requirements contain conflicting indexes for package `jinja2` in all marker environments:
|
||||
- https://astral-sh.github.io/pytorch-mirror/whl/cu118
|
||||
- https://astral-sh.github.io/pytorch-mirror/whl/cu124
|
||||
"###);
|
||||
× Failed to resolve dependencies for `project` (v0.1.0)
|
||||
╰─▶ Requirements contain conflicting indexes for package `jinja2` in all marker environments:
|
||||
- https://astral-sh.github.io/pytorch-mirror/whl/cu118
|
||||
- https://astral-sh.github.io/pytorch-mirror/whl/cu124
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -28776,8 +28813,7 @@ fn lock_trailing_slash_index_url_in_pyproject_not_index_argument() -> Result<()>
|
|||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
pyproject_toml.write_str(indoc! {r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
|
|
@ -28787,8 +28823,7 @@ fn lock_trailing_slash_index_url_in_pyproject_not_index_argument() -> Result<()>
|
|||
[[tool.uv.index]]
|
||||
name = "pypi-proxy"
|
||||
url = "https://pypi-proxy.fly.dev/simple/"
|
||||
"#,
|
||||
)?;
|
||||
"#})?;
|
||||
|
||||
let no_trailing_slash_url = "https://pypi-proxy.fly.dev/simple";
|
||||
|
||||
|
|
@ -28806,6 +28841,28 @@ fn lock_trailing_slash_index_url_in_pyproject_not_index_argument() -> Result<()>
|
|||
+ sniffio==1.3.1
|
||||
");
|
||||
|
||||
let pyproject_toml = context.read("pyproject.toml");
|
||||
|
||||
insta::with_settings!({
|
||||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
pyproject_toml, @r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"anyio>=4.3.0",
|
||||
]
|
||||
|
||||
[[tool.uv.index]]
|
||||
name = "pypi-proxy"
|
||||
url = "https://pypi-proxy.fly.dev/simple"
|
||||
"#
|
||||
);
|
||||
});
|
||||
|
||||
let lock = context.read("uv.lock");
|
||||
|
||||
insta::with_settings!({
|
||||
|
|
@ -28867,13 +28924,12 @@ fn lock_trailing_slash_index_url_in_pyproject_not_index_argument() -> Result<()>
|
|||
|
||||
// Re-run with `--locked`.
|
||||
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
|
||||
");
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -12186,7 +12186,7 @@ requires-python = ">3.8"
|
|||
fn prerelease_path_requirement() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
// Create an a package that requires a pre-release version of `flask`.
|
||||
// Create a package that requires a pre-release version of `flask`.
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"[project]
|
||||
|
|
@ -12240,7 +12240,7 @@ requires-python = ">3.8"
|
|||
fn prerelease_editable_requirement() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
// Create an a package that requires a pre-release version of `flask`.r
|
||||
// Create a package that requires a pre-release version of `flask`.r
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"[project]
|
||||
|
|
@ -14860,16 +14860,17 @@ fn universal_conflicting_override_urls() -> Result<()> {
|
|||
.arg("requirements.in")
|
||||
.arg("--overrides")
|
||||
.arg("overrides.txt")
|
||||
.arg("--universal"), @r###"
|
||||
.arg("--universal"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requirements contain conflicting URLs for package `sniffio` in split `sys_platform == 'win32'`:
|
||||
- https://files.pythonhosted.org/packages/c3/a0/5dba8ed157b0136607c7f2151db695885606968d1fae123dc3391e0cfdbf/sniffio-1.3.0-py3-none-any.whl
|
||||
- https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl
|
||||
"###
|
||||
× Failed to resolve dependencies for `anyio` (v4.3.0)
|
||||
╰─▶ Requirements contain conflicting URLs for package `sniffio` in split `sys_platform == 'win32'`:
|
||||
- https://files.pythonhosted.org/packages/c3/a0/5dba8ed157b0136607c7f2151db695885606968d1fae123dc3391e0cfdbf/sniffio-1.3.0-py3-none-any.whl
|
||||
- https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -118,7 +118,7 @@ fn invalid_pyproject_toml_syntax() -> Result<()> {
|
|||
|
||||
uv_snapshot!(context.pip_install()
|
||||
.arg("-r")
|
||||
.arg("pyproject.toml"), @r###"
|
||||
.arg("pyproject.toml"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
|
@ -129,16 +129,15 @@ fn invalid_pyproject_toml_syntax() -> Result<()> {
|
|||
|
|
||||
1 | 123 - 456
|
||||
| ^
|
||||
expected `.`, `=`
|
||||
key with no value, expected `=`
|
||||
|
||||
error: Failed to parse: `pyproject.toml`
|
||||
Caused by: TOML parse error at line 1, column 5
|
||||
|
|
||||
1 | 123 - 456
|
||||
| ^
|
||||
expected `.`, `=`
|
||||
|
||||
"###
|
||||
key with no value, expected `=`
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -1298,27 +1297,27 @@ fn install_extras() -> Result<()> {
|
|||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--all-extras")
|
||||
.arg("-e")
|
||||
.arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###"
|
||||
.arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `<dir>[extra]` syntax or `-r <file>` instead.
|
||||
"###
|
||||
error: Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `<dir>[extra]` syntax or `-r <file>` instead.
|
||||
"
|
||||
);
|
||||
|
||||
// Request extras for a source tree
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--all-extras")
|
||||
.arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r###"
|
||||
.arg(context.workspace_root.join("scripts/packages/poetry_editable")), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead.
|
||||
"###
|
||||
error: Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead.
|
||||
"
|
||||
);
|
||||
|
||||
let requirements_txt = context.temp_dir.child("requirements.txt");
|
||||
|
|
@ -1327,14 +1326,14 @@ fn install_extras() -> Result<()> {
|
|||
// Request extras for a requirements file
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--all-extras")
|
||||
.arg("-r").arg("requirements.txt"), @r###"
|
||||
.arg("-r").arg("requirements.txt"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Requesting extras requires a `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead.
|
||||
"###
|
||||
error: Requesting extras requires a `pylock.toml`, `pyproject.toml`, `setup.cfg`, or `setup.py` file. Use `package[extra]` syntax instead.
|
||||
"
|
||||
);
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
|
|
@ -11392,6 +11391,250 @@ fn pep_751_multiple_sources() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pep_751_groups() -> Result<()> {
|
||||
let context = TestContext::new("3.13");
|
||||
|
||||
let pylock_toml = context.temp_dir.child("pylock.toml");
|
||||
pylock_toml.write_str(
|
||||
r#"
|
||||
lock-version = "1.0"
|
||||
requires-python = "==3.13.*"
|
||||
environments = [
|
||||
"python_version == \"3.13\"",
|
||||
]
|
||||
extras = ["async", "dev"]
|
||||
dependency-groups = ["default", "test"]
|
||||
default-groups = ["default"]
|
||||
created-by = "pdm"
|
||||
[[packages]]
|
||||
name = "anyio"
|
||||
version = "4.9.0"
|
||||
requires-python = ">=3.9"
|
||||
sdist = {name = "anyio-4.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hashes = {sha256 = "673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}}
|
||||
wheels = [
|
||||
{name = "anyio-4.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl",hashes = {sha256 = "9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}},
|
||||
]
|
||||
marker = "\"async\" in extras"
|
||||
|
||||
[packages.tool.pdm]
|
||||
dependencies = [
|
||||
"exceptiongroup>=1.0.2; python_version < \"3.11\"",
|
||||
"idna>=2.8",
|
||||
"sniffio>=1.1",
|
||||
"typing-extensions>=4.5; python_version < \"3.13\"",
|
||||
]
|
||||
|
||||
[[packages]]
|
||||
name = "blinker"
|
||||
version = "1.9.0"
|
||||
requires-python = ">=3.9"
|
||||
sdist = {name = "blinker-1.9.0.tar.gz", url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hashes = {sha256 = "b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}}
|
||||
wheels = [
|
||||
{name = "blinker-1.9.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl",hashes = {sha256 = "ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}},
|
||||
]
|
||||
marker = "\"dev\" in extras"
|
||||
|
||||
[packages.tool.pdm]
|
||||
dependencies = []
|
||||
|
||||
[[packages]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
requires-python = ">=3.6"
|
||||
sdist = {name = "idna-3.10.tar.gz", url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hashes = {sha256 = "12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}}
|
||||
wheels = [
|
||||
{name = "idna-3.10-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl",hashes = {sha256 = "946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}},
|
||||
]
|
||||
marker = "\"async\" in extras"
|
||||
|
||||
[packages.tool.pdm]
|
||||
dependencies = []
|
||||
|
||||
[[packages]]
|
||||
name = "iniconfig"
|
||||
version = "2.1.0"
|
||||
requires-python = ">=3.8"
|
||||
sdist = {name = "iniconfig-2.1.0.tar.gz", url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hashes = {sha256 = "3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}}
|
||||
wheels = [
|
||||
{name = "iniconfig-2.1.0-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl",hashes = {sha256 = "9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}},
|
||||
]
|
||||
marker = "\"default\" in dependency_groups"
|
||||
|
||||
[packages.tool.pdm]
|
||||
dependencies = []
|
||||
|
||||
[[packages]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
requires-python = ">=3.8"
|
||||
sdist = {name = "pygments-2.19.2.tar.gz", url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hashes = {sha256 = "636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}}
|
||||
wheels = [
|
||||
{name = "pygments-2.19.2-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl",hashes = {sha256 = "86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}},
|
||||
]
|
||||
marker = "\"test\" in dependency_groups"
|
||||
|
||||
[packages.tool.pdm]
|
||||
dependencies = []
|
||||
|
||||
[[packages]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
requires-python = ">=3.7"
|
||||
sdist = {name = "sniffio-1.3.1.tar.gz", url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hashes = {sha256 = "f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}}
|
||||
wheels = [
|
||||
{name = "sniffio-1.3.1-py3-none-any.whl",url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl",hashes = {sha256 = "2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}},
|
||||
]
|
||||
marker = "\"async\" in extras"
|
||||
|
||||
[packages.tool.pdm]
|
||||
dependencies = []
|
||||
|
||||
[tool.pdm]
|
||||
hashes = {sha256 = "51795362d337720c28bd6c3a26eb33751f2b69590261f599ffb4172ee2c441c6"}
|
||||
|
||||
[[tool.pdm.targets]]
|
||||
requires_python = "==3.13.*"
|
||||
"#,
|
||||
)?;
|
||||
|
||||
// By default, only `iniconfig` should be installed, since it's in the default group.
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--preview")
|
||||
.arg("-r")
|
||||
.arg("pylock.toml"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ iniconfig==2.1.0
|
||||
"
|
||||
);
|
||||
|
||||
// With `--extra async`, `anyio` should be installed.
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--preview")
|
||||
.arg("-r")
|
||||
.arg("pylock.toml")
|
||||
.arg("--extra")
|
||||
.arg("async"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Prepared 3 packages in [TIME]
|
||||
Installed 3 packages in [TIME]
|
||||
+ anyio==4.9.0
|
||||
+ idna==3.10
|
||||
+ sniffio==1.3.1
|
||||
"
|
||||
);
|
||||
|
||||
// With `--group test`, `pygments` should be installed.
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--preview")
|
||||
.arg("-r")
|
||||
.arg("pylock.toml")
|
||||
.arg("--group")
|
||||
.arg("test"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ pygments==2.19.2
|
||||
"
|
||||
);
|
||||
|
||||
// With `--all-extras`, `blinker` should be installed.
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--preview")
|
||||
.arg("-r")
|
||||
.arg("pylock.toml")
|
||||
.arg("--all-extras"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ blinker==1.9.0
|
||||
"
|
||||
);
|
||||
|
||||
// `--group pylock.toml:test` should be rejeceted.
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--preview")
|
||||
.arg("-r")
|
||||
.arg("pylock.toml")
|
||||
.arg("--group")
|
||||
.arg("pylock.toml:test"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'pylock.toml:test' for '--group <GROUP>': The `--group` path is required to end in 'pyproject.toml' for compatibility with pip; got: pylock.toml
|
||||
|
||||
For more information, try '--help'.
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pep_751_requires_python() -> Result<()> {
|
||||
let context = TestContext::new_with_versions(&["3.12", "3.13"]);
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = ["iniconfig"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
context
|
||||
.export()
|
||||
.arg("-o")
|
||||
.arg("pylock.toml")
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
context
|
||||
.venv()
|
||||
.arg("--python")
|
||||
.arg("3.12")
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
uv_snapshot!(context.filters(), context.pip_install()
|
||||
.arg("--preview")
|
||||
.arg("-r")
|
||||
.arg("pylock.toml"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: The requested interpreter resolved to Python 3.12.[X], which is incompatible with the `pylock.toml`'s Python requirement: `>=3.13`
|
||||
"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test that uv doesn't hang if an index returns a distribution for the wrong package.
|
||||
#[tokio::test]
|
||||
async fn bogus_redirect() -> Result<()> {
|
||||
|
|
|
|||
|
|
@ -1087,6 +1087,65 @@ fn python_install_freethreaded() {
|
|||
----- stderr -----
|
||||
"###);
|
||||
|
||||
// Create a virtual environment with the freethreaded Python
|
||||
uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.13t"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Using CPython 3.13.5
|
||||
Creating virtual environment at: .venv
|
||||
Activate with: source .venv/[BIN]/activate
|
||||
");
|
||||
|
||||
// `python`, `python3`, `python3.13`, and `python3.13t` should all be present
|
||||
let scripts = context
|
||||
.venv
|
||||
.join(if cfg!(windows) { "Scripts" } else { "bin" });
|
||||
assert!(
|
||||
scripts
|
||||
.join(format!("python{}", std::env::consts::EXE_SUFFIX))
|
||||
.exists()
|
||||
);
|
||||
|
||||
#[cfg(windows)]
|
||||
assert!(
|
||||
scripts
|
||||
.join(format!("pythonw{}", std::env::consts::EXE_SUFFIX))
|
||||
.exists()
|
||||
);
|
||||
|
||||
#[cfg(unix)]
|
||||
assert!(
|
||||
scripts
|
||||
.join(format!("python3{}", std::env::consts::EXE_SUFFIX))
|
||||
.exists()
|
||||
);
|
||||
|
||||
#[cfg(unix)]
|
||||
assert!(
|
||||
scripts
|
||||
.join(format!("python3.13{}", std::env::consts::EXE_SUFFIX))
|
||||
.exists()
|
||||
);
|
||||
|
||||
assert!(
|
||||
scripts
|
||||
.join(format!("python3.13t{}", std::env::consts::EXE_SUFFIX))
|
||||
.exists()
|
||||
);
|
||||
|
||||
#[cfg(windows)]
|
||||
assert!(
|
||||
scripts
|
||||
.join(format!("pythonw3.13t{}", std::env::consts::EXE_SUFFIX))
|
||||
.exists()
|
||||
);
|
||||
|
||||
// Remove the virtual environment
|
||||
fs_err::remove_dir_all(&context.venv).unwrap();
|
||||
|
||||
// Should be distinct from 3.13
|
||||
uv_snapshot!(context.filters(), context.python_install().arg("3.13"), @r"
|
||||
success: true
|
||||
|
|
@ -1099,14 +1158,14 @@ fn python_install_freethreaded() {
|
|||
");
|
||||
|
||||
// Should not work with older Python versions
|
||||
uv_snapshot!(context.filters(), context.python_install().arg("3.12t"), @r###"
|
||||
uv_snapshot!(context.filters(), context.python_install().arg("3.12t"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: No download found for request: cpython-3.12t-[PLATFORM]
|
||||
"###);
|
||||
");
|
||||
|
||||
uv_snapshot!(context.filters(), context.python_uninstall().arg("--all"), @r"
|
||||
success: true
|
||||
|
|
@ -1996,8 +2055,8 @@ fn python_install_314() {
|
|||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Installed Python 3.14.0b4 in [TIME]
|
||||
+ cpython-3.14.0b4-[PLATFORM] (python3.14)
|
||||
Installed Python 3.14.0rc1 in [TIME]
|
||||
+ cpython-3.14.0rc1-[PLATFORM] (python3.14)
|
||||
");
|
||||
|
||||
// Install a specific pre-release
|
||||
|
|
@ -2028,7 +2087,7 @@ fn python_install_314() {
|
|||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
|
||||
[TEMP_DIR]/managed/cpython-3.14.0rc1-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
|
@ -2038,7 +2097,7 @@ fn python_install_314() {
|
|||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
|
||||
[TEMP_DIR]/managed/cpython-3.14.0rc1-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
|
@ -2047,7 +2106,7 @@ fn python_install_314() {
|
|||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[TEMP_DIR]/managed/cpython-3.14.0b4-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
|
||||
[TEMP_DIR]/managed/cpython-3.14.0rc1-[PLATFORM]/[INSTALL-BIN]/[PYTHON]
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
|
|
|||
|
|
@ -1319,6 +1319,181 @@ fn run_with_pyvenv_cfg_file() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn run_with_overlay_interpreter() -> Result<()> {
|
||||
let context = TestContext::new("3.12").with_filtered_exe_suffix();
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(indoc! { r#"
|
||||
[project]
|
||||
name = "foo"
|
||||
version = "1.0.0"
|
||||
requires-python = ">=3.8"
|
||||
dependencies = ["anyio"]
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=42"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project.scripts]
|
||||
main = "foo:main"
|
||||
"#
|
||||
})?;
|
||||
|
||||
let foo = context.temp_dir.child("src").child("foo");
|
||||
foo.create_dir_all()?;
|
||||
let init_py = foo.child("__init__.py");
|
||||
init_py.write_str(indoc! { r#"
|
||||
import sys
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
def show_python():
|
||||
print(sys.executable)
|
||||
|
||||
def copy_entrypoint():
|
||||
base = Path(sys.executable)
|
||||
shutil.copyfile(base.with_name("main").with_suffix(base.suffix), sys.argv[1])
|
||||
|
||||
def main():
|
||||
show_python()
|
||||
if len(sys.argv) > 1:
|
||||
copy_entrypoint()
|
||||
"#
|
||||
})?;
|
||||
|
||||
// The project's entrypoint should be rewritten to use the overlay interpreter.
|
||||
uv_snapshot!(context.filters(), context.run().arg("--with").arg("iniconfig").arg("main").arg(context.temp_dir.child("main").as_os_str()), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[CACHE_DIR]/builds-v0/[TMP]/python
|
||||
|
||||
----- stderr -----
|
||||
Resolved 6 packages in [TIME]
|
||||
Prepared 4 packages in [TIME]
|
||||
Installed 4 packages in [TIME]
|
||||
+ anyio==4.3.0
|
||||
+ foo==1.0.0 (from file://[TEMP_DIR]/)
|
||||
+ idna==3.6
|
||||
+ sniffio==1.3.1
|
||||
Resolved 1 package in [TIME]
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ iniconfig==2.0.0
|
||||
");
|
||||
|
||||
#[cfg(unix)]
|
||||
insta::with_settings!({
|
||||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
context.read("main"), @r##"
|
||||
#![CACHE_DIR]/builds-v0/[TMP]/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from foo import main
|
||||
if __name__ == "__main__":
|
||||
if sys.argv[0].endswith("-script.pyw"):
|
||||
sys.argv[0] = sys.argv[0][:-11]
|
||||
elif sys.argv[0].endswith(".exe"):
|
||||
sys.argv[0] = sys.argv[0][:-4]
|
||||
sys.exit(main())
|
||||
"##
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
// The package, its dependencies, and the overlay dependencies should be available.
|
||||
context
|
||||
.run()
|
||||
.arg("--with")
|
||||
.arg("iniconfig")
|
||||
.arg("python")
|
||||
.arg("-c")
|
||||
.arg("import foo; import anyio; import iniconfig")
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
// When layering the project on top (via `--with`), the overlay interpreter also should be used.
|
||||
uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--with").arg(".").arg("main"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[CACHE_DIR]/builds-v0/[TMP]/python
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
Prepared 1 package in [TIME]
|
||||
Installed 4 packages in [TIME]
|
||||
+ anyio==4.3.0
|
||||
+ foo==1.0.0 (from file://[TEMP_DIR]/)
|
||||
+ idna==3.6
|
||||
+ sniffio==1.3.1
|
||||
");
|
||||
|
||||
// Switch to a relocatable virtual environment.
|
||||
context.venv().arg("--relocatable").assert().success();
|
||||
|
||||
// The project's entrypoint should be rewritten to use the overlay interpreter.
|
||||
uv_snapshot!(context.filters(), context.run().arg("--with").arg("iniconfig").arg("main").arg(context.temp_dir.child("main").as_os_str()), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[CACHE_DIR]/builds-v0/[TMP]/python
|
||||
|
||||
----- stderr -----
|
||||
Resolved 6 packages in [TIME]
|
||||
Audited 4 packages in [TIME]
|
||||
Resolved 1 package in [TIME]
|
||||
");
|
||||
|
||||
// The package, its dependencies, and the overlay dependencies should be available.
|
||||
context
|
||||
.run()
|
||||
.arg("--with")
|
||||
.arg("iniconfig")
|
||||
.arg("python")
|
||||
.arg("-c")
|
||||
.arg("import foo; import anyio; import iniconfig")
|
||||
.assert()
|
||||
.success();
|
||||
|
||||
#[cfg(unix)]
|
||||
insta::with_settings!({
|
||||
filters => context.filters(),
|
||||
}, {
|
||||
assert_snapshot!(
|
||||
context.read("main"), @r##"
|
||||
#![CACHE_DIR]/builds-v0/[TMP]/python
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from foo import main
|
||||
if __name__ == "__main__":
|
||||
if sys.argv[0].endswith("-script.pyw"):
|
||||
sys.argv[0] = sys.argv[0][:-11]
|
||||
elif sys.argv[0].endswith(".exe"):
|
||||
sys.argv[0] = sys.argv[0][:-4]
|
||||
sys.exit(main())
|
||||
"##
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
// When layering the project on top (via `--with`), the overlay interpreter also should be used.
|
||||
uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("--with").arg(".").arg("main"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
[CACHE_DIR]/builds-v0/[TMP]/python
|
||||
|
||||
----- stderr -----
|
||||
Resolved 4 packages in [TIME]
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn run_with_build_constraints() -> Result<()> {
|
||||
let context = TestContext::new("3.9");
|
||||
|
|
|
|||
|
|
@ -3530,6 +3530,8 @@ fn resolve_poetry_toml() -> anyhow::Result<()> {
|
|||
}
|
||||
|
||||
/// Read from both a `uv.toml` and `pyproject.toml` file in the current directory.
|
||||
///
|
||||
/// Some fields in `[tool.uv]` are masked by `uv.toml` being defined, and should be warned about.
|
||||
#[test]
|
||||
#[cfg_attr(
|
||||
windows,
|
||||
|
|
@ -3554,6 +3556,10 @@ fn resolve_both() -> anyhow::Result<()> {
|
|||
name = "example"
|
||||
version = "0.0.0"
|
||||
|
||||
[tool.uv]
|
||||
offline = true
|
||||
dev-dependencies = ["pytest"]
|
||||
|
||||
[tool.uv.pip]
|
||||
resolution = "highest"
|
||||
extra-index-url = ["https://test.pypi.org/simple"]
|
||||
|
|
@ -3744,7 +3750,236 @@ fn resolve_both() -> anyhow::Result<()> {
|
|||
}
|
||||
|
||||
----- stderr -----
|
||||
warning: Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The `[tool.uv]` section will be ignored in favor of the `uv.toml` file.
|
||||
warning: Found both a `uv.toml` file and a `[tool.uv]` section in an adjacent `pyproject.toml`. The following fields from `[tool.uv]` will be ignored in favor of the `uv.toml` file:
|
||||
- offline
|
||||
- pip
|
||||
"#
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Read from both a `uv.toml` and `pyproject.toml` file in the current directory.
|
||||
///
|
||||
/// But the fields `[tool.uv]` defines aren't allowed in `uv.toml` so there's no warning.
|
||||
#[test]
|
||||
#[cfg_attr(
|
||||
windows,
|
||||
ignore = "Configuration tests are not yet supported on Windows"
|
||||
)]
|
||||
fn resolve_both_special_fields() -> anyhow::Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
// Write a `uv.toml` file to the directory.
|
||||
let config = context.temp_dir.child("uv.toml");
|
||||
config.write_str(indoc::indoc! {r#"
|
||||
[pip]
|
||||
resolution = "lowest-direct"
|
||||
generate-hashes = true
|
||||
index-url = "https://pypi.org/simple"
|
||||
"#})?;
|
||||
|
||||
// Write a `pyproject.toml` file to the directory
|
||||
let config = context.temp_dir.child("pyproject.toml");
|
||||
config.write_str(indoc::indoc! {r#"
|
||||
[project]
|
||||
name = "example"
|
||||
version = "0.0.0"
|
||||
|
||||
[dependency-groups]
|
||||
mygroup = ["iniconfig"]
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = ["pytest"]
|
||||
|
||||
[tool.uv.dependency-groups]
|
||||
mygroup = {requires-python = ">=3.12"}
|
||||
"#})?;
|
||||
|
||||
let requirements_in = context.temp_dir.child("requirements.in");
|
||||
requirements_in.write_str("anyio>3.0.0")?;
|
||||
|
||||
// Resolution should succeed, but warn that the `pip` section in `pyproject.toml` is ignored.
|
||||
uv_snapshot!(context.filters(), add_shared_args(context.pip_compile(), context.temp_dir.path())
|
||||
.arg("--show-settings")
|
||||
.arg("requirements.in"), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
GlobalSettings {
|
||||
required_version: None,
|
||||
quiet: 0,
|
||||
verbose: 0,
|
||||
color: Auto,
|
||||
network_settings: NetworkSettings {
|
||||
connectivity: Online,
|
||||
native_tls: false,
|
||||
allow_insecure_host: [],
|
||||
},
|
||||
concurrency: Concurrency {
|
||||
downloads: 50,
|
||||
builds: 16,
|
||||
installs: 8,
|
||||
},
|
||||
show_settings: true,
|
||||
preview: Disabled,
|
||||
python_preference: Managed,
|
||||
python_downloads: Automatic,
|
||||
no_progress: false,
|
||||
installer_metadata: true,
|
||||
}
|
||||
CacheSettings {
|
||||
no_cache: false,
|
||||
cache_dir: Some(
|
||||
"[CACHE_DIR]/",
|
||||
),
|
||||
}
|
||||
PipCompileSettings {
|
||||
format: None,
|
||||
src_file: [
|
||||
"requirements.in",
|
||||
],
|
||||
constraints: [],
|
||||
overrides: [],
|
||||
build_constraints: [],
|
||||
constraints_from_workspace: [],
|
||||
overrides_from_workspace: [],
|
||||
build_constraints_from_workspace: [],
|
||||
environments: SupportedEnvironments(
|
||||
[],
|
||||
),
|
||||
refresh: None(
|
||||
Timestamp(
|
||||
SystemTime {
|
||||
tv_sec: [TIME],
|
||||
tv_nsec: [TIME],
|
||||
},
|
||||
),
|
||||
),
|
||||
settings: PipSettings {
|
||||
index_locations: IndexLocations {
|
||||
indexes: [
|
||||
Index {
|
||||
name: None,
|
||||
url: Pypi(
|
||||
VerbatimUrl {
|
||||
url: DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
cannot_be_a_base: false,
|
||||
username: "",
|
||||
password: None,
|
||||
host: Some(
|
||||
Domain(
|
||||
"pypi.org",
|
||||
),
|
||||
),
|
||||
port: None,
|
||||
path: "/simple",
|
||||
query: None,
|
||||
fragment: None,
|
||||
},
|
||||
given: Some(
|
||||
"https://pypi.org/simple",
|
||||
),
|
||||
},
|
||||
),
|
||||
explicit: false,
|
||||
default: true,
|
||||
origin: None,
|
||||
format: Simple,
|
||||
publish_url: None,
|
||||
authenticate: Auto,
|
||||
ignore_error_codes: None,
|
||||
cache_control: None,
|
||||
},
|
||||
],
|
||||
flat_index: [],
|
||||
no_index: false,
|
||||
},
|
||||
python: None,
|
||||
install_mirrors: PythonInstallMirrors {
|
||||
python_install_mirror: None,
|
||||
pypy_install_mirror: None,
|
||||
python_downloads_json_url: None,
|
||||
},
|
||||
system: false,
|
||||
extras: ExtrasSpecification(
|
||||
ExtrasSpecificationInner {
|
||||
include: Some(
|
||||
[],
|
||||
),
|
||||
exclude: [],
|
||||
only_extras: false,
|
||||
history: ExtrasSpecificationHistory {
|
||||
extra: [],
|
||||
only_extra: [],
|
||||
no_extra: [],
|
||||
all_extras: false,
|
||||
no_default_extras: false,
|
||||
defaults: List(
|
||||
[],
|
||||
),
|
||||
},
|
||||
},
|
||||
),
|
||||
groups: [],
|
||||
break_system_packages: false,
|
||||
target: None,
|
||||
prefix: None,
|
||||
index_strategy: FirstIndex,
|
||||
keyring_provider: Disabled,
|
||||
torch_backend: None,
|
||||
no_build_isolation: false,
|
||||
no_build_isolation_package: [],
|
||||
build_options: BuildOptions {
|
||||
no_binary: None,
|
||||
no_build: None,
|
||||
},
|
||||
allow_empty_requirements: false,
|
||||
strict: false,
|
||||
dependency_mode: Transitive,
|
||||
resolution: LowestDirect,
|
||||
prerelease: IfNecessaryOrExplicit,
|
||||
fork_strategy: RequiresPython,
|
||||
dependency_metadata: DependencyMetadata(
|
||||
{},
|
||||
),
|
||||
output_file: None,
|
||||
no_strip_extras: false,
|
||||
no_strip_markers: false,
|
||||
no_annotate: false,
|
||||
no_header: false,
|
||||
custom_compile_command: None,
|
||||
generate_hashes: true,
|
||||
config_setting: ConfigSettings(
|
||||
{},
|
||||
),
|
||||
config_settings_package: PackageConfigSettings(
|
||||
{},
|
||||
),
|
||||
python_version: None,
|
||||
python_platform: None,
|
||||
universal: false,
|
||||
exclude_newer: None,
|
||||
no_emit_package: [],
|
||||
emit_index_url: false,
|
||||
emit_find_links: false,
|
||||
emit_build_options: false,
|
||||
emit_marker_expression: false,
|
||||
emit_index_annotation: false,
|
||||
annotation_style: Split,
|
||||
link_mode: Clone,
|
||||
compile_bytecode: false,
|
||||
sources: Enabled,
|
||||
hash_checking: Some(
|
||||
Verify,
|
||||
),
|
||||
upgrade: None,
|
||||
reinstall: None,
|
||||
},
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#
|
||||
);
|
||||
|
||||
|
|
@ -4109,7 +4344,7 @@ fn resolve_config_file() -> anyhow::Result<()> {
|
|||
.arg("--show-settings")
|
||||
.arg("--config-file")
|
||||
.arg(config.path())
|
||||
.arg("requirements.in"), @r###"
|
||||
.arg("requirements.in"), @r#"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
|
@ -4121,9 +4356,8 @@ fn resolve_config_file() -> anyhow::Result<()> {
|
|||
|
|
||||
9 | ""
|
||||
| ^
|
||||
expected `.`, `=`
|
||||
|
||||
"###
|
||||
key with no value, expected `=`
|
||||
"#
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
|||
|
|
@ -60,14 +60,14 @@ fn locked() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running with `--locked` should error, if no lockfile is present.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r"
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`.
|
||||
");
|
||||
"###);
|
||||
|
||||
// Lock the initial requirements.
|
||||
context.lock().assert().success();
|
||||
|
|
@ -86,7 +86,7 @@ fn locked() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running with `--locked` should error.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r"
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
|
@ -94,7 +94,7 @@ fn locked() -> Result<()> {
|
|||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
|
||||
");
|
||||
"###);
|
||||
|
||||
let updated = context.read("uv.lock");
|
||||
|
||||
|
|
@ -120,14 +120,14 @@ fn frozen() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running with `--frozen` should error, if no lockfile is present.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`.
|
||||
");
|
||||
"###);
|
||||
|
||||
context.lock().assert().success();
|
||||
|
||||
|
|
@ -422,7 +422,7 @@ fn sync_json() -> Result<()> {
|
|||
|
||||
uv_snapshot!(context.filters(), context.sync()
|
||||
.arg("--locked")
|
||||
.arg("--output-format").arg("json"), @r"
|
||||
.arg("--output-format").arg("json"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
|
@ -430,7 +430,47 @@ fn sync_json() -> Result<()> {
|
|||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
|
||||
");
|
||||
"###);
|
||||
|
||||
// Test that JSON output is shown even with --quiet flag
|
||||
uv_snapshot!(context.filters(), context.sync()
|
||||
.arg("--quiet")
|
||||
.arg("--frozen")
|
||||
.arg("--output-format").arg("json"), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"schema": {
|
||||
"version": "preview"
|
||||
},
|
||||
"target": "project",
|
||||
"project": {
|
||||
"path": "[TEMP_DIR]/",
|
||||
"workspace": {
|
||||
"path": "[TEMP_DIR]/"
|
||||
}
|
||||
},
|
||||
"sync": {
|
||||
"environment": {
|
||||
"path": "[VENV]/",
|
||||
"python": {
|
||||
"path": "[VENV]/[BIN]/[PYTHON]",
|
||||
"version": "3.12.[X]",
|
||||
"implementation": "cpython"
|
||||
}
|
||||
},
|
||||
"action": "check"
|
||||
},
|
||||
"lock": {
|
||||
"path": "[TEMP_DIR]/uv.lock",
|
||||
"action": "use"
|
||||
},
|
||||
"dry_run": false
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
"#);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -892,7 +932,7 @@ fn check() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running `uv sync --check` should fail.
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--check"), @r"
|
||||
uv_snapshot!(context.filters(), context.sync().arg("--check"), @r###"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
|
|
@ -905,7 +945,7 @@ fn check() -> Result<()> {
|
|||
Would install 1 package
|
||||
+ iniconfig==2.0.0
|
||||
The environment is outdated; run `uv sync` to update the environment
|
||||
");
|
||||
"###);
|
||||
|
||||
// Sync the environment.
|
||||
uv_snapshot!(context.filters(), context.sync(), @r"
|
||||
|
|
@ -2108,7 +2148,7 @@ fn sync_environment() -> Result<()> {
|
|||
"#,
|
||||
)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.sync(), @r"
|
||||
uv_snapshot!(context.filters(), context.sync(), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
|
@ -2116,7 +2156,7 @@ fn sync_environment() -> Result<()> {
|
|||
----- stderr -----
|
||||
Resolved 2 packages in [TIME]
|
||||
error: The current Python platform is not compatible with the lockfile's supported environments: `python_full_version < '3.11'`
|
||||
");
|
||||
"###);
|
||||
|
||||
assert!(context.temp_dir.child("uv.lock").exists());
|
||||
|
||||
|
|
@ -5140,7 +5180,7 @@ fn sync_active_project_environment() -> Result<()> {
|
|||
)?;
|
||||
|
||||
// Running `uv sync` with `VIRTUAL_ENV` should warn
|
||||
uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r"
|
||||
uv_snapshot!(context.filters(), context.sync().env(EnvVars::VIRTUAL_ENV, "foo"), @r###"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
|
@ -5153,7 +5193,7 @@ fn sync_active_project_environment() -> Result<()> {
|
|||
Prepared 1 package in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ iniconfig==2.0.0
|
||||
");
|
||||
"###);
|
||||
|
||||
context
|
||||
.temp_dir
|
||||
|
|
@ -7140,16 +7180,21 @@ fn sync_invalid_environment() -> Result<()> {
|
|||
");
|
||||
}
|
||||
|
||||
// But if the Python executable is missing entirely we should also fail
|
||||
// If the Python executable is missing entirely, we'll delete and use it
|
||||
fs_err::remove_dir_all(&bin)?;
|
||||
uv_snapshot!(context.filters(), context.sync(), @r###"
|
||||
success: false
|
||||
exit_code: 2
|
||||
uv_snapshot!(context.filters(), context.sync(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Project virtual environment directory `[VENV]/` cannot be used because it is not a valid Python environment (no Python executable was found)
|
||||
"###);
|
||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||
Removed virtual environment at: .venv
|
||||
Creating virtual environment at: .venv
|
||||
Resolved 2 packages in [TIME]
|
||||
Installed 1 package in [TIME]
|
||||
+ iniconfig==2.0.0
|
||||
");
|
||||
|
||||
// But if it's not a virtual environment...
|
||||
fs_err::remove_dir_all(context.temp_dir.join(".venv"))?;
|
||||
|
|
@ -7182,6 +7227,17 @@ fn sync_invalid_environment() -> Result<()> {
|
|||
error: Project virtual environment directory `[VENV]/` cannot be used because it is not a compatible environment but cannot be recreated because it is not a virtual environment
|
||||
"###);
|
||||
|
||||
// Even if there's no Python executable
|
||||
fs_err::remove_dir_all(&bin)?;
|
||||
uv_snapshot!(context.filters(), context.sync(), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: Project virtual environment directory `[VENV]/` cannot be used because it is not a valid Python environment (no Python executable was found)
|
||||
");
|
||||
|
||||
context
|
||||
.temp_dir
|
||||
.child(".venv")
|
||||
|
|
@ -7196,6 +7252,74 @@ fn sync_invalid_environment() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
#[test]
|
||||
fn sync_partial_environment_delete() -> Result<()> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
let context = TestContext::new_with_versions(&["3.13", "3.12"]);
|
||||
|
||||
context.init().arg("-p").arg("3.12").assert().success();
|
||||
uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.13"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Using CPython 3.13.[X] interpreter at: [PYTHON-3.13]
|
||||
Creating virtual environment at: .venv
|
||||
Resolved 1 package in [TIME]
|
||||
Audited in [TIME]
|
||||
");
|
||||
|
||||
// Create a directory that's unreadable, erroring on trying to delete its children.
|
||||
// This relies on our implementation listing directory entries before deleting them — which is a
|
||||
// bit of a hack but accomplishes the goal here.
|
||||
let unreadable2 = context.temp_dir.child(".venv/z2.txt");
|
||||
fs_err::create_dir(&unreadable2)?;
|
||||
let perms = std::fs::Permissions::from_mode(0o000);
|
||||
fs_err::set_permissions(&unreadable2, perms)?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||
error: failed to remove directory `[VENV]/z2.txt`: Permission denied (os error 13)
|
||||
");
|
||||
|
||||
uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||
error: failed to remove directory `[VENV]/z2.txt`: Permission denied (os error 13)
|
||||
");
|
||||
|
||||
// Remove the unreadable directory
|
||||
fs_err::remove_dir(unreadable2)?;
|
||||
|
||||
// We should be able to remove the venv now
|
||||
uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||
Removed virtual environment at: .venv
|
||||
Creating virtual environment at: .venv
|
||||
Resolved 1 package in [TIME]
|
||||
Audited in [TIME]
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Avoid validating workspace members when `--no-sources` is provided. Rather than reporting that
|
||||
/// `./anyio` is missing, install `anyio` from the registry.
|
||||
#[test]
|
||||
|
|
@ -7955,7 +8079,7 @@ fn sync_derivation_chain() -> Result<()> {
|
|||
[[tool.uv.dependency-metadata]]
|
||||
name = "wsgiref"
|
||||
version = "0.1.2"
|
||||
dependencies = []
|
||||
requires-dist = []
|
||||
"#,
|
||||
)?;
|
||||
|
||||
|
|
@ -8018,7 +8142,7 @@ fn sync_derivation_chain_extra() -> Result<()> {
|
|||
[[tool.uv.dependency-metadata]]
|
||||
name = "wsgiref"
|
||||
version = "0.1.2"
|
||||
dependencies = []
|
||||
requires-dist = []
|
||||
"#,
|
||||
)?;
|
||||
|
||||
|
|
@ -8083,7 +8207,7 @@ fn sync_derivation_chain_group() -> Result<()> {
|
|||
[[tool.uv.dependency-metadata]]
|
||||
name = "wsgiref"
|
||||
version = "0.1.2"
|
||||
dependencies = []
|
||||
requires-dist = []
|
||||
"#,
|
||||
)?;
|
||||
|
||||
|
|
@ -10889,7 +11013,7 @@ fn sync_required_environment_hint() -> Result<()> {
|
|||
[project]
|
||||
name = "example"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.13.2"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = ["no-sdist-no-wheels-with-matching-platform-a"]
|
||||
|
||||
[[tool.uv.index]]
|
||||
|
|
@ -10939,7 +11063,7 @@ fn sync_url_with_query_parameters() -> Result<()> {
|
|||
[project]
|
||||
name = "example"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.13.2"
|
||||
requires-python = ">=3.13"
|
||||
dependencies = ["source-distribution @ https://files.pythonhosted.org/packages/1f/e5/5b016c945d745f8b108e759d428341488a6aee8f51f07c6c4e33498bb91f/source_distribution-0.0.3.tar.gz?foo=bar"]
|
||||
"#
|
||||
)?;
|
||||
|
|
@ -11664,3 +11788,49 @@ fn sync_config_settings_package() -> Result<()> {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Ensure that when we sync to an empty virtual environment directory, we don't attempt to remove
|
||||
/// it, which breaks Docker volume mounts.
|
||||
#[test]
|
||||
#[cfg(unix)]
|
||||
fn sync_does_not_remove_empty_virtual_environment_directory() -> Result<()> {
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
let context = TestContext::new_with_versions(&["3.12"]);
|
||||
|
||||
let project_dir = context.temp_dir.child("project");
|
||||
fs_err::create_dir(&project_dir)?;
|
||||
|
||||
let pyproject_toml = project_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "project"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = ["iniconfig"]
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let venv_dir = project_dir.child(".venv");
|
||||
fs_err::create_dir(&venv_dir)?;
|
||||
|
||||
// Ensure the parent is read-only, to prevent deletion of the virtual environment
|
||||
fs_err::set_permissions(&project_dir, std::fs::Permissions::from_mode(0o555))?;
|
||||
|
||||
// Note we do _not_ fail to create the virtual environment — we fail later when writing to the
|
||||
// project directory
|
||||
uv_snapshot!(context.filters(), context.sync().current_dir(&project_dir), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||
Creating virtual environment at: .venv
|
||||
Resolved 2 packages in [TIME]
|
||||
error: failed to write to file `[TEMP_DIR]/project/uv.lock`: Permission denied (os error 13)
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -718,7 +718,7 @@ fn create_venv_warns_user_on_requires_python_discovery_error() -> Result<()> {
|
|||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(indoc! { r"invalid toml" })?;
|
||||
|
||||
uv_snapshot!(context.filters(), context.venv(), @r###"
|
||||
uv_snapshot!(context.filters(), context.venv(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
|
|
@ -729,19 +729,19 @@ fn create_venv_warns_user_on_requires_python_discovery_error() -> Result<()> {
|
|||
|
|
||||
1 | invalid toml
|
||||
| ^
|
||||
expected `.`, `=`
|
||||
key with no value, expected `=`
|
||||
|
||||
warning: Failed to parse `pyproject.toml` during environment creation:
|
||||
TOML parse error at line 1, column 9
|
||||
|
|
||||
1 | invalid toml
|
||||
| ^
|
||||
expected `.`, `=`
|
||||
key with no value, expected `=`
|
||||
|
||||
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
|
||||
Creating virtual environment at: .venv
|
||||
Activate with: source .venv/[BIN]/activate
|
||||
"###
|
||||
"
|
||||
);
|
||||
|
||||
context.venv.assert(predicates::path::is_dir());
|
||||
|
|
|
|||
|
|
@ -1545,86 +1545,6 @@ fn git_version_info_expected() -> bool {
|
|||
git_dir.exists()
|
||||
}
|
||||
|
||||
// version_get_fallback with `--json`
|
||||
#[test]
|
||||
fn version_get_fallback_unmanaged_json() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
let pyproject_toml = context.temp_dir.child("pyproject.toml");
|
||||
pyproject_toml.write_str(
|
||||
r#"
|
||||
[project]
|
||||
name = "myapp"
|
||||
version = "0.1.2"
|
||||
|
||||
[tool.uv]
|
||||
managed = false
|
||||
"#,
|
||||
)?;
|
||||
|
||||
let filters = context
|
||||
.filters()
|
||||
.into_iter()
|
||||
.chain([
|
||||
(
|
||||
r#"version": "\d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?""#,
|
||||
r#"version": "[VERSION]""#,
|
||||
),
|
||||
(
|
||||
r#"short_commit_hash": ".*""#,
|
||||
r#"short_commit_hash": "[HASH]""#,
|
||||
),
|
||||
(r#"commit_hash": ".*""#, r#"commit_hash": "[LONGHASH]""#),
|
||||
(r#"commit_date": ".*""#, r#"commit_date": "[DATE]""#),
|
||||
(r#"last_tag": (".*"|null)"#, r#"last_tag": "[TAG]""#),
|
||||
(
|
||||
r#"commits_since_last_tag": .*"#,
|
||||
r#"commits_since_last_tag": [COUNT]"#,
|
||||
),
|
||||
])
|
||||
.collect::<Vec<_>>();
|
||||
if git_version_info_expected() {
|
||||
uv_snapshot!(filters, context.version()
|
||||
.arg("--output-format").arg("json"), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: The project is marked as unmanaged: `[TEMP_DIR]/`
|
||||
");
|
||||
} else {
|
||||
uv_snapshot!(filters, context.version()
|
||||
.arg("--output-format").arg("json"), @r#"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"package_name": "uv",
|
||||
"version": "[VERSION]",
|
||||
"commit_info": null
|
||||
}
|
||||
|
||||
----- stderr -----
|
||||
warning: Failed to read project metadata (The project is marked as unmanaged: `[TEMP_DIR]/`). Running `uv self version` for compatibility. This fallback will be removed in the future; pass `--preview` to force an error.
|
||||
"#);
|
||||
}
|
||||
|
||||
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
|
||||
assert_snapshot!(
|
||||
pyproject,
|
||||
@r#"
|
||||
[project]
|
||||
name = "myapp"
|
||||
version = "0.1.2"
|
||||
|
||||
[tool.uv]
|
||||
managed = false
|
||||
"#
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Should error if this pyproject.toml isn't usable for whatever reason
|
||||
// and --project was passed explicitly.
|
||||
#[test]
|
||||
|
|
@ -1687,20 +1607,20 @@ fn version_get_fallback_missing_strict() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// Should error if this pyproject.toml is missing
|
||||
// and --preview was passed explicitly.
|
||||
/// Should error with hint if pyproject.toml is missing in normal mode
|
||||
#[test]
|
||||
fn version_get_fallback_missing_strict_preview() -> Result<()> {
|
||||
fn version_get_missing_with_hint() -> Result<()> {
|
||||
let context = TestContext::new("3.12");
|
||||
|
||||
uv_snapshot!(context.filters(), context.version()
|
||||
.arg("--preview"), @r"
|
||||
uv_snapshot!(context.filters(), context.version(), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: No `pyproject.toml` found in current directory or any parent directory
|
||||
|
||||
hint: If you meant to view uv's version, use `uv self version` instead
|
||||
");
|
||||
|
||||
Ok(())
|
||||
|
|
@ -2073,7 +1993,7 @@ fn version_set_workspace() -> Result<()> {
|
|||
);
|
||||
});
|
||||
|
||||
// Set the other child's version, refereshing the lock and sync
|
||||
// Set the other child's version, refreshing the lock and sync
|
||||
let mut version_cmd = context.version();
|
||||
version_cmd
|
||||
.arg("--package")
|
||||
|
|
|
|||
|
|
@ -151,3 +151,18 @@ insecure.
|
|||
|
||||
Use `allow-insecure-host` with caution and only in trusted environments, as it can expose you to
|
||||
security risks due to the lack of certificate verification.
|
||||
|
||||
## Hugging Face support
|
||||
|
||||
uv supports automatic authentication for the Hugging Face Hub. Specifically, if the `HF_TOKEN`
|
||||
environment variable is set, uv will propagate it to requests to `huggingface.co`.
|
||||
|
||||
This is particularly useful for accessing private scripts in Hugging Face Datasets. For example, you
|
||||
can run the following command to execute the script `main.py` script from a private dataset:
|
||||
|
||||
```console
|
||||
$ HF_TOKEN=hf_... uv run https://huggingface.co/datasets/<user>/<name>/resolve/<branch>/main.py
|
||||
```
|
||||
|
||||
You can disable automatic Hugging Face authentication by setting the `UV_NO_HF_TOKEN=1` environment
|
||||
variable.
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ To use uv as a build backend in an existing project, add `uv_build` to the
|
|||
|
||||
```toml title="pyproject.toml"
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.8.0,<0.9.0"]
|
||||
requires = ["uv_build>=0.8.2,<0.9.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
|
|
|
|||
|
|
@ -367,9 +367,9 @@ in the deployed environment without a dependency on the originating source code.
|
|||
|
||||
## Conflicting dependencies
|
||||
|
||||
uv requires resolves all project dependencies together, including optional dependencies ("extras")
|
||||
and dependency groups. If dependencies declared in one section are not compatible with those in
|
||||
another section, uv will fail to resolve the requirements of the project with an error.
|
||||
uv resolves all project dependencies together, including optional dependencies ("extras") and
|
||||
dependency groups. If dependencies declared in one section are not compatible with those in another
|
||||
section, uv will fail to resolve the requirements of the project with an error.
|
||||
|
||||
uv supports explicit declaration of conflicting dependency groups. For example, to declare that the
|
||||
`optional-dependency` groups `extra1` and `extra2` are incompatible:
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue