mirror of https://github.com/astral-sh/uv
Compare commits
No commits in common. "main" and "0.8.21" have entirely different histories.
|
|
@ -1,81 +0,0 @@
|
|||
# /// script
|
||||
# requires-python = ">=3.12"
|
||||
# dependencies = []
|
||||
# ///
|
||||
|
||||
"""Post-edit hook to auto-format files after Claude edits."""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_rust(file_path: str, cwd: str) -> None:
|
||||
"""Format Rust files with cargo fmt."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["cargo", "fmt", "--", file_path],
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def format_python(file_path: str, cwd: str) -> None:
|
||||
"""Format Python files with ruff."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["uvx", "ruff", "format", file_path],
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def format_prettier(file_path: str, cwd: str, prose_wrap: bool = False) -> None:
|
||||
"""Format files with prettier."""
|
||||
args = ["npx", "prettier", "--write"]
|
||||
if prose_wrap:
|
||||
args.extend(["--prose-wrap", "always"])
|
||||
args.append(file_path)
|
||||
try:
|
||||
subprocess.run(args, cwd=cwd, capture_output=True)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def main() -> None:
|
||||
import os
|
||||
|
||||
input_data = json.load(sys.stdin)
|
||||
|
||||
tool_name = input_data.get("tool_name")
|
||||
tool_input = input_data.get("tool_input", {})
|
||||
file_path = tool_input.get("file_path")
|
||||
|
||||
# Only process Write, Edit, and MultiEdit tools
|
||||
if tool_name not in ("Write", "Edit", "MultiEdit"):
|
||||
return
|
||||
|
||||
if not file_path:
|
||||
return
|
||||
|
||||
cwd = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
||||
path = Path(file_path)
|
||||
ext = path.suffix
|
||||
|
||||
if ext == ".rs":
|
||||
format_rust(file_path, cwd)
|
||||
elif ext in (".py", ".pyi"):
|
||||
format_python(file_path, cwd)
|
||||
elif ext in (".json5", ".yaml", ".yml"):
|
||||
format_prettier(file_path, cwd)
|
||||
elif ext == ".md":
|
||||
format_prettier(file_path, cwd, prose_wrap=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"hooks": {
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Edit|Write|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "uv run .claude/hooks/post-edit-format.py"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -3,19 +3,20 @@
|
|||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: [
|
||||
"github>astral-sh/renovate-config",
|
||||
"config:recommended",
|
||||
// For tool versions defined in GitHub Actions:
|
||||
"customManagers:githubActionsVersions",
|
||||
],
|
||||
labels: ["internal"],
|
||||
schedule: ["* 0-3 * * 1"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
managerFilePatterns: ["/^Cargo\\.toml$/", "/^crates/.*Cargo\\.toml$/"],
|
||||
managerFilePatterns: ["/^crates/.*Cargo\\.toml$/"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
|
|
@ -85,12 +86,6 @@
|
|||
description: "Weekly update of pyo3 dependencies",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pubgrub",
|
||||
matchManagers: ["cargo"],
|
||||
matchDepNames: ["pubgrub", "version-ranges"],
|
||||
description: "version-ranges and pubgrub are in the same Git repository",
|
||||
},
|
||||
{
|
||||
commitMessageTopic: "MSRV",
|
||||
matchManagers: ["custom.regex"],
|
||||
|
|
|
|||
|
|
@ -61,7 +61,6 @@ jobs:
|
|||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
|
|
@ -82,7 +81,6 @@ jobs:
|
|||
- name: "Build sdist uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
command: sdist
|
||||
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test sdist uv-build"
|
||||
|
|
@ -98,7 +96,7 @@ jobs:
|
|||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-macos-14
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
|
|
@ -115,7 +113,6 @@ jobs:
|
|||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: x86_64
|
||||
args: --release --locked --out dist --features self-update
|
||||
- name: "Upload wheels"
|
||||
|
|
@ -146,7 +143,6 @@ jobs:
|
|||
- name: "Build wheels uv-build - x86_64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: x86_64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Upload wheels uv-build"
|
||||
|
|
@ -157,7 +153,7 @@ jobs:
|
|||
|
||||
macos-aarch64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-macos-14
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
|
|
@ -174,7 +170,6 @@ jobs:
|
|||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: aarch64
|
||||
args: --release --locked --out dist --features self-update
|
||||
- name: "Test wheel - aarch64"
|
||||
|
|
@ -211,7 +206,6 @@ jobs:
|
|||
- name: "Build wheels uv-build - aarch64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: aarch64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test wheel - aarch64"
|
||||
|
|
@ -253,7 +247,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist --features self-update,windows-gui-bin
|
||||
- name: "Test wheel"
|
||||
|
|
@ -292,7 +285,6 @@ jobs:
|
|||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test wheel uv-build"
|
||||
|
|
@ -332,7 +324,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
# Generally, we try to build in a target docker container. In this case however, a
|
||||
# 32-bit compiler runs out of memory (4GB memory limit for 32-bit), so we cross compile
|
||||
|
|
@ -399,7 +390,6 @@ jobs:
|
|||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
@ -417,7 +407,7 @@ jobs:
|
|||
|
||||
linux-arm:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-ubuntu-22.04-8
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
matrix:
|
||||
|
|
@ -447,7 +437,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||
|
|
@ -501,7 +490,6 @@ jobs:
|
|||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||
|
|
@ -556,12 +544,13 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist --features self-update
|
||||
rust-toolchain: ${{ matrix.platform.toolchain || null }}
|
||||
# Until the llvm updates hit stable
|
||||
# https://github.com/rust-lang/rust/issues/141287
|
||||
rust-toolchain: nightly-2025-05-25
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: "Test wheel"
|
||||
|
|
@ -611,7 +600,6 @@ jobs:
|
|||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -671,7 +659,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -730,7 +717,6 @@ jobs:
|
|||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -775,7 +761,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -829,7 +814,6 @@ jobs:
|
|||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -882,7 +866,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --release --locked --out dist --features self-update
|
||||
|
|
@ -931,7 +914,6 @@ jobs:
|
|||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
@ -956,7 +938,7 @@ jobs:
|
|||
|
||||
musllinux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-ubuntu-22.04-8
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
|
|
@ -982,7 +964,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --release --locked --out dist --features self-update ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}}
|
||||
|
|
@ -1055,7 +1036,6 @@ jobs:
|
|||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --profile minimal-size --locked ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
|
|||
|
|
@ -178,45 +178,45 @@ jobs:
|
|||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.22,alpine3.22,alpine
|
||||
- alpine:3.21,alpine3.21
|
||||
- debian:trixie-slim,trixie-slim,debian-slim
|
||||
- buildpack-deps:trixie,trixie,debian
|
||||
- debian:bookworm-slim,bookworm-slim
|
||||
- buildpack-deps:bookworm,bookworm
|
||||
- python:3.14-alpine3.23,python3.14-alpine3.23,python3.14-alpine
|
||||
- python:3.13-alpine3.23,python3.13-alpine3.23,python3.13-alpine
|
||||
- python:3.12-alpine3.23,python3.12-alpine3.23,python3.12-alpine
|
||||
- python:3.11-alpine3.23,python3.11-alpine3.23,python3.11-alpine
|
||||
- python:3.10-alpine3.23,python3.10-alpine3.23,python3.10-alpine
|
||||
- python:3.9-alpine3.22,python3.9-alpine3.22,python3.9-alpine
|
||||
- python:3.8-alpine3.20,python3.8-alpine3.20,python3.8-alpine
|
||||
- python:3.14-trixie,python3.14-trixie
|
||||
- python:3.13-trixie,python3.13-trixie
|
||||
- python:3.12-trixie,python3.12-trixie
|
||||
- python:3.11-trixie,python3.11-trixie
|
||||
- python:3.10-trixie,python3.10-trixie
|
||||
- python:3.9-trixie,python3.9-trixie
|
||||
- python:3.14-slim-trixie,python3.14-trixie-slim
|
||||
- python:3.13-slim-trixie,python3.13-trixie-slim
|
||||
- python:3.12-slim-trixie,python3.12-trixie-slim
|
||||
- python:3.11-slim-trixie,python3.11-trixie-slim
|
||||
- python:3.10-slim-trixie,python3.10-trixie-slim
|
||||
- python:3.9-slim-trixie,python3.9-trixie-slim
|
||||
- python:3.14-bookworm,python3.14-bookworm
|
||||
- alpine:3.21,alpine3.21,alpine
|
||||
- alpine:3.22,alpine3.22
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
- debian:trixie-slim,trixie-slim
|
||||
- buildpack-deps:trixie,trixie
|
||||
- python:3.14-rc-alpine,python3.14-rc-alpine
|
||||
- python:3.13-alpine,python3.13-alpine
|
||||
- python:3.12-alpine,python3.12-alpine
|
||||
- python:3.11-alpine,python3.11-alpine
|
||||
- python:3.10-alpine,python3.10-alpine
|
||||
- python:3.9-alpine,python3.9-alpine
|
||||
- python:3.8-alpine,python3.8-alpine
|
||||
- python:3.14-rc-bookworm,python3.14-rc-bookworm
|
||||
- python:3.13-bookworm,python3.13-bookworm
|
||||
- python:3.12-bookworm,python3.12-bookworm
|
||||
- python:3.11-bookworm,python3.11-bookworm
|
||||
- python:3.10-bookworm,python3.10-bookworm
|
||||
- python:3.9-bookworm,python3.9-bookworm
|
||||
- python:3.8-bookworm,python3.8-bookworm
|
||||
- python:3.14-slim-bookworm,python3.14-bookworm-slim
|
||||
- python:3.14-rc-slim-bookworm,python3.14-rc-bookworm-slim
|
||||
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
||||
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
||||
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
||||
- python:3.10-slim-bookworm,python3.10-bookworm-slim
|
||||
- python:3.9-slim-bookworm,python3.9-bookworm-slim
|
||||
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
||||
- python:3.14-rc-trixie,python3.14-rc-trixie
|
||||
- python:3.13-trixie,python3.13-trixie
|
||||
- python:3.12-trixie,python3.12-trixie
|
||||
- python:3.11-trixie,python3.11-trixie
|
||||
- python:3.10-trixie,python3.10-trixie
|
||||
- python:3.9-trixie,python3.9-trixie
|
||||
- python:3.14-rc-slim-trixie,python3.14-rc-trixie-slim
|
||||
- python:3.13-slim-trixie,python3.13-trixie-slim
|
||||
- python:3.12-slim-trixie,python3.12-trixie-slim
|
||||
- python:3.11-slim-trixie,python3.11-trixie-slim
|
||||
- python:3.10-slim-trixie,python3.10-trixie-slim
|
||||
- python:3.9-slim-trixie,python3.9-trixie-slim
|
||||
steps:
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
|
|
|
|||
|
|
@ -27,8 +27,6 @@ jobs:
|
|||
outputs:
|
||||
# Flag that is raised when any code is changed
|
||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||
# Flag that is raised when uv.schema.json is changed (e.g., in a release PR)
|
||||
schema: ${{ steps.changed.outputs.schema_changed }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
|
|
@ -42,16 +40,10 @@ jobs:
|
|||
CHANGED_FILES=$(git diff --name-only ${{ github.event.pull_request.base.sha || 'origin/main' }}...HEAD)
|
||||
|
||||
CODE_CHANGED=false
|
||||
SCHEMA_CHANGED=false
|
||||
|
||||
while IFS= read -r file; do
|
||||
# Check if the schema file changed (e.g., in a release PR)
|
||||
if [[ "${file}" == "uv.schema.json" ]]; then
|
||||
echo "Detected schema change: ${file}"
|
||||
SCHEMA_CHANGED=true
|
||||
fi
|
||||
|
||||
if [[ "${file}" =~ ^docs/ ]]; then
|
||||
# Generated markdown and JSON files are checked during test runs.
|
||||
if [[ "${file}" =~ ^docs/ && ! "${file}" =~ ^docs/reference/(cli|settings).md && ! "${file}" =~ ^docs/reference/environment.md ]]; then
|
||||
echo "Skipping ${file} (matches docs/ pattern)"
|
||||
continue
|
||||
fi
|
||||
|
|
@ -78,7 +70,6 @@ jobs:
|
|||
|
||||
done <<< "${CHANGED_FILES}"
|
||||
echo "code_any_changed=${CODE_CHANGED}" >> "${GITHUB_OUTPUT}"
|
||||
echo "schema_changed=${SCHEMA_CHANGED}" >> "${GITHUB_OUTPUT}"
|
||||
lint:
|
||||
timeout-minutes: 10
|
||||
name: "lint"
|
||||
|
|
@ -96,9 +87,7 @@ jobs:
|
|||
run: rustup component add rustfmt
|
||||
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
version: "0.9.13"
|
||||
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
|
||||
- name: "rustfmt"
|
||||
run: cargo fmt --all --check
|
||||
|
|
@ -144,7 +133,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Check uv_build dependencies"
|
||||
|
|
@ -176,7 +165,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
|
@ -187,22 +176,6 @@ jobs:
|
|||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||
|
||||
cargo-publish-dry-run:
|
||||
timeout-minutes: 20
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
runs-on: depot-ubuntu-22.04-8
|
||||
name: "cargo publish dry-run"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "cargo publish dry-run"
|
||||
run: cargo publish --workspace --dry-run
|
||||
|
||||
cargo-dev-generate-all:
|
||||
timeout-minutes: 10
|
||||
needs: determine_changes
|
||||
|
|
@ -213,16 +186,11 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Generate all"
|
||||
run: cargo dev generate-all --mode dry-run
|
||||
- name: "Check sysconfig mappings"
|
||||
run: cargo dev generate-sysconfig-metadata --mode check
|
||||
- name: "Check JSON schema"
|
||||
if: ${{ needs.determine_changes.outputs.schema == 'true' }}
|
||||
run: cargo dev generate-json-schema --mode check
|
||||
run: cargo dev generate-all --mode check
|
||||
|
||||
cargo-shear:
|
||||
timeout-minutes: 10
|
||||
|
|
@ -233,7 +201,7 @@ jobs:
|
|||
with:
|
||||
persist-credentials: false
|
||||
- name: "Install cargo shear"
|
||||
uses: taiki-e/install-action@d850aa816998e5cf15f67a78c7b933f2a5033f8a # v2.63.3
|
||||
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
|
||||
with:
|
||||
tool: cargo-shear
|
||||
- run: cargo shear
|
||||
|
|
@ -255,15 +223,12 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
version: "0.9.13"
|
||||
|
||||
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
|
|
@ -289,13 +254,12 @@ jobs:
|
|||
UV_HTTP_RETRIES: 5
|
||||
run: |
|
||||
cargo nextest run \
|
||||
--cargo-profile fast-build \
|
||||
--features python-patch,native-auth,secret-service \
|
||||
--workspace \
|
||||
--status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow
|
||||
|
||||
cargo-test-macos:
|
||||
timeout-minutes: 20
|
||||
timeout-minutes: 15
|
||||
needs: determine_changes
|
||||
# Only run macOS tests on main without opt-in
|
||||
if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos') || github.ref == 'refs/heads/main' }}
|
||||
|
|
@ -308,15 +272,12 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
version: "0.9.13"
|
||||
|
||||
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
|
|
@ -331,9 +292,8 @@ jobs:
|
|||
UV_HTTP_RETRIES: 5
|
||||
run: |
|
||||
cargo nextest run \
|
||||
--cargo-profile fast-build \
|
||||
--no-default-features \
|
||||
--features python,python-managed,pypi,git,git-lfs,performance,crates-io,native-auth,apple-native \
|
||||
--features python,python-managed,pypi,git,performance,crates-io,native-auth,apple-native \
|
||||
--workspace \
|
||||
--status-level skip --failure-output immediate-final --no-fail-fast -j 12 --final-status-level slow
|
||||
|
||||
|
|
@ -356,14 +316,11 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
version: "0.9.13"
|
||||
|
||||
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
- name: "Install required Python versions"
|
||||
run: uv python install
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
|
@ -387,7 +344,6 @@ jobs:
|
|||
shell: bash
|
||||
run: |
|
||||
cargo nextest run \
|
||||
--cargo-profile fast-build \
|
||||
--no-default-features \
|
||||
--features python,pypi,python-managed,native-auth,windows-native \
|
||||
--workspace \
|
||||
|
|
@ -417,7 +373,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||
|
||||
|
|
@ -477,7 +433,7 @@ jobs:
|
|||
- name: Copy Git Repo to Dev Drive
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||
- name: "Install Rust toolchain"
|
||||
|
|
@ -495,8 +451,8 @@ jobs:
|
|||
working-directory: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||
run: |
|
||||
cargo build --target ${{ matrix.target-arch }}-pc-windows-msvc
|
||||
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-console.exe ../uv-trampoline-builder/trampolines/uv-trampoline-${{ matrix.target-arch }}-console.exe
|
||||
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-gui.exe ../uv-trampoline-builder/trampolines/uv-trampoline-${{ matrix.target-arch }}-gui.exe
|
||||
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-console.exe trampolines/uv-trampoline-${{ matrix.target-arch }}-console.exe
|
||||
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-gui.exe trampolines/uv-trampoline-${{ matrix.target-arch }}-gui.exe
|
||||
- name: "Test new binaries"
|
||||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
run: |
|
||||
|
|
@ -509,7 +465,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: crate-ci/typos@64e4db431eb262bb5c6baa19dce280d78532830c # v1.37.3
|
||||
- uses: crate-ci/typos@85f62a8a84f939ae994ab3763f01a0296d61a7ee # v1.36.2
|
||||
|
||||
docs:
|
||||
timeout-minutes: 10
|
||||
|
|
@ -522,19 +478,8 @@ jobs:
|
|||
with:
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
version: "0.9.13"
|
||||
|
||||
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
- name: "Generate reference documentation"
|
||||
run: |
|
||||
cargo dev generate-options-reference
|
||||
cargo dev generate-cli-reference
|
||||
cargo dev generate-env-vars-reference
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
|
|
@ -561,18 +506,18 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build --profile no-debug
|
||||
run: cargo build
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-linux-libc-${{ github.sha }}
|
||||
path: |
|
||||
./target/no-debug/uv
|
||||
./target/no-debug/uvx
|
||||
./target/debug/uv
|
||||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-aarch64:
|
||||
|
|
@ -588,18 +533,18 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build --profile no-debug
|
||||
run: cargo build
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-linux-aarch64-${{ github.sha }}
|
||||
path: |
|
||||
./target/no-debug/uv
|
||||
./target/no-debug/uvx
|
||||
./target/debug/uv
|
||||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-linux-musl:
|
||||
|
|
@ -620,18 +565,18 @@ jobs:
|
|||
sudo apt-get install musl-tools
|
||||
rustup target add x86_64-unknown-linux-musl
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Build"
|
||||
run: cargo build --profile no-debug --target x86_64-unknown-linux-musl --bin uv --bin uvx
|
||||
run: cargo build --target x86_64-unknown-linux-musl --bin uv --bin uvx
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-linux-musl-${{ github.sha }}
|
||||
path: |
|
||||
./target/x86_64-unknown-linux-musl/no-debug/uv
|
||||
./target/x86_64-unknown-linux-musl/no-debug/uvx
|
||||
./target/x86_64-unknown-linux-musl/debug/uv
|
||||
./target/x86_64-unknown-linux-musl/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-macos-aarch64:
|
||||
|
|
@ -647,17 +592,17 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Build"
|
||||
run: cargo build --profile no-debug --bin uv --bin uvx
|
||||
run: cargo build --bin uv --bin uvx
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-macos-aarch64-${{ github.sha }}
|
||||
path: |
|
||||
./target/no-debug/uv
|
||||
./target/no-debug/uvx
|
||||
./target/debug/uv
|
||||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-macos-x86_64:
|
||||
|
|
@ -673,17 +618,17 @@ jobs:
|
|||
|
||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Build"
|
||||
run: cargo build --profile no-debug --bin uv --bin uvx
|
||||
run: cargo build --bin uv --bin uvx
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-macos-x86_64-${{ github.sha }}
|
||||
path: |
|
||||
./target/no-debug/uv
|
||||
./target/no-debug/uvx
|
||||
./target/debug/uv
|
||||
./target/debug/uvx
|
||||
retention-days: 1
|
||||
|
||||
build-binary-windows-x86_64:
|
||||
|
|
@ -705,21 +650,21 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
- name: "Build"
|
||||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
run: cargo build --profile no-debug --bin uv --bin uvx
|
||||
run: cargo build --bin uv --bin uvx
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-windows-x86_64-${{ github.sha }}
|
||||
path: |
|
||||
${{ env.UV_WORKSPACE }}/target/no-debug/uv.exe
|
||||
${{ env.UV_WORKSPACE }}/target/no-debug/uvx.exe
|
||||
${{ env.UV_WORKSPACE }}/target/debug/uv.exe
|
||||
${{ env.UV_WORKSPACE }}/target/debug/uvx.exe
|
||||
retention-days: 1
|
||||
|
||||
build-binary-windows-aarch64:
|
||||
|
|
@ -742,7 +687,7 @@ jobs:
|
|||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
with:
|
||||
workspaces: ${{ env.UV_WORKSPACE }}
|
||||
|
||||
|
|
@ -751,15 +696,15 @@ jobs:
|
|||
|
||||
- name: "Build"
|
||||
working-directory: ${{ env.UV_WORKSPACE }}
|
||||
run: cargo build --profile no-debug --target aarch64-pc-windows-msvc
|
||||
run: cargo build --target aarch64-pc-windows-msvc
|
||||
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: uv-windows-aarch64-${{ github.sha }}
|
||||
path: |
|
||||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/no-debug/uv.exe
|
||||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/no-debug/uvx.exe
|
||||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uv.exe
|
||||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uvx.exe
|
||||
retention-days: 1
|
||||
|
||||
build-binary-msrv:
|
||||
|
|
@ -783,11 +728,11 @@ jobs:
|
|||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- run: cargo +${MSRV} build --profile no-debug
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- run: cargo +${MSRV} build
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
- run: ./target/no-debug/uv --version
|
||||
- run: ./target/debug/uv --version
|
||||
|
||||
build-binary-freebsd:
|
||||
needs: determine_changes
|
||||
|
|
@ -800,7 +745,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
- name: "Cross build"
|
||||
run: |
|
||||
# Install cross from `freebsd-firecracker`
|
||||
|
|
@ -808,7 +753,7 @@ jobs:
|
|||
chmod +x cross
|
||||
mv cross /usr/local/bin/cross
|
||||
|
||||
cross build --target x86_64-unknown-freebsd --profile no-debug
|
||||
cross build --target x86_64-unknown-freebsd
|
||||
|
||||
- name: Test in Firecracker VM
|
||||
uses: acj/freebsd-firecracker-action@a5a3fc1709c5b5368141a5699f10259aca3cd965 # v0.6.0
|
||||
|
|
@ -822,8 +767,8 @@ jobs:
|
|||
cat <<EOF > $include_path
|
||||
target
|
||||
target/x86_64-unknown-freebsd
|
||||
target/x86_64-unknown-freebsd/no-debug
|
||||
target/x86_64-unknown-freebsd/no-debug/uv
|
||||
target/x86_64-unknown-freebsd/debug
|
||||
target/x86_64-unknown-freebsd/debug/uv
|
||||
EOF
|
||||
|
||||
rsync -r -e "ssh" \
|
||||
|
|
@ -833,7 +778,7 @@ jobs:
|
|||
--exclude "*" \
|
||||
. firecracker:
|
||||
run-in-vm: |
|
||||
mv target/x86_64-unknown-freebsd/no-debug/uv uv
|
||||
mv target/x86_64-unknown-freebsd/debug/uv uv
|
||||
chmod +x uv
|
||||
./uv --version
|
||||
|
||||
|
|
@ -1330,30 +1275,6 @@ jobs:
|
|||
./uv run python -c ""
|
||||
./uv run -p 3.13 python -c ""
|
||||
|
||||
integration-test-windows-python-install-manager:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-windows-x86_64
|
||||
name: "integration test | windows python install manager"
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
name: uv-windows-x86_64-${{ github.sha }}
|
||||
|
||||
- name: "Install Python via Python Install manager"
|
||||
run: |
|
||||
# https://www.python.org/downloads/release/pymanager-250/
|
||||
winget install --accept-package-agreements --accept-source-agreements 9NQ7512CXL7T
|
||||
# Call Python Install Manager's py.exe by full path to avoid legacy py.exe
|
||||
& "$env:LOCALAPPDATA\Microsoft\WindowsApps\py.exe" install 3.14
|
||||
|
||||
# https://github.com/astral-sh/uv/issues/16204
|
||||
- name: "Check temporary environment creation"
|
||||
run: |
|
||||
./uv run -p $env:LOCALAPPDATA\Python\pythoncore-3.14-64\python.exe --with numpy python -c "import sys; print(sys.executable)"
|
||||
|
||||
integration-test-pypy-linux:
|
||||
timeout-minutes: 10
|
||||
needs: build-binary-linux-libc
|
||||
|
|
@ -1773,7 +1694,7 @@ jobs:
|
|||
name: uv-linux-musl-${{ github.sha }}
|
||||
|
||||
- name: "Setup WSL"
|
||||
uses: Vampire/setup-wsl@6a8db447be7ed35f2f499c02c6e60ff77ef11278 # v6.0.0
|
||||
uses: Vampire/setup-wsl@6a8db447be7ed35f2f499c02c6e60ff77ef11278 # v6
|
||||
with:
|
||||
distribution: Ubuntu-22.04
|
||||
|
||||
|
|
@ -1873,7 +1794,7 @@ jobs:
|
|||
run: chmod +x ./uv
|
||||
|
||||
- name: "Configure AWS credentials"
|
||||
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5.1.1
|
||||
uses: aws-actions/configure-aws-credentials@351d894493fd3289754a5471c0892ba92fa0abe2
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
|
|
@ -1999,7 +1920,7 @@ jobs:
|
|||
../uv build
|
||||
|
||||
- name: "Publish astral-test-pypa-gh-action"
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # release/v1
|
||||
with:
|
||||
# With this GitHub action, we can't do as rigid checks as with our custom Python script, so we publish more
|
||||
# leniently
|
||||
|
|
@ -2032,7 +1953,6 @@ jobs:
|
|||
UV_TEST_PUBLISH_GITLAB_PAT: ${{ secrets.UV_TEST_PUBLISH_GITLAB_PAT }}
|
||||
UV_TEST_PUBLISH_CODEBERG_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CODEBERG_TOKEN }}
|
||||
UV_TEST_PUBLISH_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CLOUDSMITH_TOKEN }}
|
||||
UV_TEST_PUBLISH_PYX_TOKEN: ${{ secrets.UV_TEST_PUBLISH_PYX_TOKEN }}
|
||||
UV_TEST_PUBLISH_PYTHON_VERSION: ${{ env.PYTHON_VERSION }}
|
||||
|
||||
integration-uv-build-backend:
|
||||
|
|
@ -2067,22 +1987,22 @@ jobs:
|
|||
|
||||
# Test the main path (`build_wheel`) through pip
|
||||
./uv venv -v --seed
|
||||
./uv run --no-project python -m pip install -v test/packages/built-by-uv --find-links crates/uv-build/dist --no-index --no-deps
|
||||
./uv run --no-project python -m pip install -v scripts/packages/built-by-uv --find-links crates/uv-build/dist --no-index --no-deps
|
||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||
|
||||
# Test both `build_wheel` and `build_sdist` through uv
|
||||
./uv venv -c -v
|
||||
./uv build -v --force-pep517 test/packages/built-by-uv --find-links crates/uv-build/dist --offline
|
||||
./uv pip install -v test/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||
./uv build -v --force-pep517 scripts/packages/built-by-uv --find-links crates/uv-build/dist --offline
|
||||
./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||
|
||||
# Test both `build_wheel` and `build_sdist` through the official `build`
|
||||
rm -rf test/packages/built-by-uv/dist/
|
||||
rm -rf scripts/packages/built-by-uv/dist/
|
||||
./uv venv -c -v
|
||||
./uv pip install build
|
||||
# Add the uv binary to PATH for `build` to find
|
||||
PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv test/packages/built-by-uv
|
||||
./uv pip install -v test/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||
PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv scripts/packages/built-by-uv
|
||||
./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||
|
||||
cache-test-ubuntu:
|
||||
|
|
@ -2268,11 +2188,11 @@ jobs:
|
|||
needs: build-binary-linux-musl
|
||||
name: "check system | python on rocky linux ${{ matrix.rocky-version }}"
|
||||
runs-on: ubuntu-latest
|
||||
container: rockylinux/rockylinux:${{ matrix.rocky-version }}
|
||||
container: rockylinux:${{ matrix.rocky-version }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
rocky-version: ["8", "9", "10"]
|
||||
rocky-version: ["8", "9"]
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
|
|
@ -2281,29 +2201,12 @@ jobs:
|
|||
- name: "Install Python"
|
||||
if: matrix.rocky-version == '8'
|
||||
run: |
|
||||
for i in {1..5}; do
|
||||
dnf install python39 python39-pip which -y && break || { echo "Attempt $i failed, retrying in 10 seconds..."; sleep 10; }
|
||||
if [ $i -eq 5 ]; then
|
||||
echo "Failed to install Python after 5 attempts"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
dnf install python39 python39-pip which -y
|
||||
|
||||
- name: "Install Python"
|
||||
if: matrix.rocky-version == '9'
|
||||
run: |
|
||||
for i in {1..5}; do
|
||||
dnf install python3.9 python3.9-pip which -y && break || { echo "Attempt $i failed, retrying in 10 seconds..."; sleep 10; }
|
||||
if [ $i -eq 5 ]; then
|
||||
echo "Failed to install Python after 5 attempts"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
- name: "Install Python"
|
||||
if: matrix.rocky-version == '10'
|
||||
run: |
|
||||
dnf install python3 python3-pip which -y
|
||||
dnf install python3.9 python3.9-pip which -y
|
||||
|
||||
- name: "Download binary"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
|
|
@ -2516,7 +2419,7 @@ jobs:
|
|||
timeout-minutes: 10
|
||||
needs: build-binary-macos-x86_64
|
||||
name: "check system | python on macos x86-64"
|
||||
runs-on: macos-15-intel # github-macos-15-x86_64-4
|
||||
runs-on: macos-13 # github-macos-13-x86_64-4
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
|
|
@ -2538,7 +2441,7 @@ jobs:
|
|||
run: echo $(which python3)
|
||||
|
||||
- name: "Validate global Python install"
|
||||
run: python3 scripts/check_system_python.py --uv ./uv --externally-managed
|
||||
run: python3 scripts/check_system_python.py --uv ./uv
|
||||
|
||||
system-test-windows-python-310:
|
||||
timeout-minutes: 10
|
||||
|
|
@ -2924,14 +2827,14 @@ jobs:
|
|||
runs-on: codspeed-macro
|
||||
needs: determine_changes
|
||||
if: ${{ github.repository == 'astral-sh/uv' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 25
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
|
@ -2946,17 +2849,16 @@ jobs:
|
|||
sudo apt-get update
|
||||
sudo apt-get install -y libsasl2-dev libldap2-dev libkrb5-dev
|
||||
cargo run --bin uv -- venv --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile test/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile test/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile scripts/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile scripts/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --profile profiling -p uv-bench
|
||||
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
mode: walltime
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
||||
benchmarks-instrumented:
|
||||
|
|
@ -2971,7 +2873,7 @@ jobs:
|
|||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
|
|
@ -2986,15 +2888,14 @@ jobs:
|
|||
sudo apt-get update
|
||||
sudo apt-get install -y libsasl2-dev libldap2-dev libkrb5-dev
|
||||
cargo run --bin uv -- venv --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile test/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile test/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile scripts/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
cargo run --bin uv -- pip compile scripts/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||
|
||||
- name: "Build benchmarks"
|
||||
run: cargo codspeed build --profile profiling -p uv-bench
|
||||
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||
with:
|
||||
run: cargo codspeed run
|
||||
mode: instrumentation
|
||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||
|
|
|
|||
|
|
@ -1,33 +0,0 @@
|
|||
# Publish a release to crates.io.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "Publish to crates.io"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
crates-publish-uv:
|
||||
name: Upload uv to crates.io
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# TODO(zanieb): Switch to trusted publishing once published
|
||||
# - uses: rust-lang/crates-io-auth-action@v1
|
||||
# id: auth
|
||||
- name: Publish workspace crates
|
||||
# Note `--no-verify` is safe because we do a publish dry-run elsewhere in CI
|
||||
run: cargo publish --workspace --no-verify
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_TOKEN }}
|
||||
|
|
@ -36,14 +36,6 @@ jobs:
|
|||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Generate reference documentation"
|
||||
run: |
|
||||
cargo dev generate-options-reference
|
||||
cargo dev generate-cli-reference
|
||||
cargo dev generate-env-vars-reference
|
||||
|
||||
- name: "Set docs display name"
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
|
|
|
|||
|
|
@ -18,10 +18,11 @@ jobs:
|
|||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write # For PyPI's trusted publishing
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv-*
|
||||
|
|
@ -36,10 +37,11 @@ jobs:
|
|||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write # For PyPI's trusted publishing
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv_build-*
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ jobs:
|
|||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
|
|
@ -168,8 +168,8 @@ jobs:
|
|||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
|
|
@ -222,36 +222,17 @@ jobs:
|
|||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-crates:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi # DIRTY: see #16989
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-crates.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-crates
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-crates.result == 'skipped' || needs.custom-publish-crates.result == 'success') }}
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
permissions:
|
||||
"attestations": "write"
|
||||
"contents": "write"
|
||||
"id-token": "write"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
|
|
@ -270,15 +251,6 @@ jobs:
|
|||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2
|
||||
with:
|
||||
subject-path: |
|
||||
artifacts/*.json
|
||||
artifacts/*.sh
|
||||
artifacts/*.ps1
|
||||
artifacts/*.zip
|
||||
artifacts/*.tar.gz
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@b75a909f75acd358c2196fb9a5f1299a9a8868a4 # v6.7.0
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
|
|
@ -49,4 +49,3 @@ jobs:
|
|||
title: "Sync latest Python releases"
|
||||
body: "Automated update for Python releases."
|
||||
base: "main"
|
||||
draft: true
|
||||
|
|
|
|||
|
|
@ -37,11 +37,6 @@ profile.json.gz
|
|||
# MkDocs
|
||||
/site
|
||||
|
||||
# Generated reference docs (use `cargo dev generate-all` to regenerate)
|
||||
/docs/reference/cli.md
|
||||
/docs/reference/environment.md
|
||||
/docs/reference/settings.md
|
||||
|
||||
# macOS
|
||||
**/.DS_Store
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ repos:
|
|||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.37.2
|
||||
rev: v1.36.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.3
|
||||
rev: v0.13.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
|
|||
|
|
@ -4,5 +4,5 @@ PREVIEW-CHANGELOG.md
|
|||
docs/reference/cli.md
|
||||
docs/reference/settings.md
|
||||
docs/reference/environment.md
|
||||
test/ecosystem/home-assistant-core/LICENSE.md
|
||||
ecosystem/home-assistant-core/LICENSE.md
|
||||
docs/guides/integration/gitlab.md
|
||||
|
|
|
|||
1084
CHANGELOG.md
1084
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
|
|
@ -1,125 +0,0 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
- [Our Pledge](#our-pledge)
|
||||
- [Our Standards](#our-standards)
|
||||
- [Enforcement Responsibilities](#enforcement-responsibilities)
|
||||
- [Scope](#scope)
|
||||
- [Enforcement](#enforcement)
|
||||
- [Enforcement Guidelines](#enforcement-guidelines)
|
||||
- [1. Correction](#1-correction)
|
||||
- [2. Warning](#2-warning)
|
||||
- [3. Temporary Ban](#3-temporary-ban)
|
||||
- [4. Permanent Ban](#4-permanent-ban)
|
||||
- [Attribution](#attribution)
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our community a
|
||||
harassment-free experience for everyone, regardless of age, body size, visible or invisible
|
||||
disability, ethnicity, sex characteristics, gender identity and expression, level of experience,
|
||||
education, socio-economic status, nationality, personal appearance, race, religion, or sexual
|
||||
identity and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and
|
||||
healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the
|
||||
experience
|
||||
- Focusing on what is best not just for us as individuals, but for the overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email address, without their
|
||||
explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior
|
||||
and will take appropriate and fair corrective action in response to any behavior that they deem
|
||||
inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject comments, commits,
|
||||
code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and
|
||||
will communicate reasons for moderation decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when an individual is
|
||||
officially representing the community in public spaces. Examples of representing our community
|
||||
include using an official e-mail address, posting via an official social media account, or acting as
|
||||
an appointed representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community
|
||||
leaders responsible for enforcement at <hey@astral.sh>. All complaints will be reviewed and
|
||||
investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the reporter of any
|
||||
incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining the consequences for
|
||||
any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or
|
||||
unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing clarity around the
|
||||
nature of the violation and an explanation of why the behavior was inappropriate. A public apology
|
||||
may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No interaction with the people
|
||||
involved, including unsolicited interaction with those enforcing the Code of Conduct, for a
|
||||
specified period of time. This includes avoiding interactions in community spaces as well as
|
||||
external channels like social media. Violating these terms may lead to a temporary or permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including sustained inappropriate
|
||||
behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public communication with the
|
||||
community for a specified period of time. No public or private interaction with the people involved,
|
||||
including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this
|
||||
period. Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community standards, including
|
||||
sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement
|
||||
of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available
|
||||
[here](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
||||
|
||||
Community Impact Guidelines were inspired by
|
||||
[Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
For answers to common questions about this code of conduct, see the
|
||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available
|
||||
[here](https://www.contributor-covenant.org/translations).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
|
@ -86,13 +86,6 @@ cargo test --package <package> --test <test> -- <test_name> -- --exact
|
|||
cargo insta review
|
||||
```
|
||||
|
||||
### Git and Git LFS
|
||||
|
||||
A subset of uv tests require both [Git](https://git-scm.com) and [Git LFS](https://git-lfs.com/) to
|
||||
execute properly.
|
||||
|
||||
These tests can be disabled by turning off either `git` or `git-lfs` uv features.
|
||||
|
||||
### Local testing
|
||||
|
||||
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
||||
|
|
@ -102,15 +95,6 @@ cargo run -- venv
|
|||
cargo run -- pip install requests
|
||||
```
|
||||
|
||||
## Crate structure
|
||||
|
||||
Rust does not allow circular dependencies between crates. To visualize the crate hierarchy, install
|
||||
[cargo-depgraph](https://github.com/jplatte/cargo-depgraph) and graphviz, then run:
|
||||
|
||||
```shell
|
||||
cargo depgraph --dedup-transitive-deps --workspace-only | dot -Tpng > graph.png
|
||||
```
|
||||
|
||||
## Running inside a Docker container
|
||||
|
||||
Source distributions can run arbitrary code on build and can make unwanted modifications to your
|
||||
|
|
@ -136,7 +120,7 @@ Please refer to Ruff's
|
|||
it applies to uv, too.
|
||||
|
||||
We provide diverse sets of requirements for testing and benchmarking the resolver in
|
||||
`test/requirements` and for the installer in `test/requirements/compiled`.
|
||||
`scripts/requirements` and for the installer in `scripts/requirements/compiled`.
|
||||
|
||||
You can use `scripts/benchmark` to benchmark predefined workloads between uv versions and with other
|
||||
tools, e.g., from the `scripts/benchmark` directory:
|
||||
|
|
@ -147,7 +131,7 @@ uv run resolver \
|
|||
--poetry \
|
||||
--benchmark \
|
||||
resolve-cold \
|
||||
../test/requirements/trio.in
|
||||
../scripts/requirements/trio.in
|
||||
```
|
||||
|
||||
### Analyzing concurrency
|
||||
|
|
@ -157,7 +141,7 @@ visualize parallel requests and find any spots where uv is CPU-bound. Example us
|
|||
`uv-dev` respectively:
|
||||
|
||||
```shell
|
||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile test/requirements/jupyter.in
|
||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile scripts/requirements/jupyter.in
|
||||
```
|
||||
|
||||
```shell
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
178
Cargo.toml
178
Cargo.toml
|
|
@ -4,88 +4,90 @@ exclude = [
|
|||
"scripts",
|
||||
# Needs nightly
|
||||
"crates/uv-trampoline",
|
||||
# Only used to pull in features, allocators, etc. — we specifically don't want them
|
||||
# to be part of a workspace-wide cargo check, cargo clippy, etc.
|
||||
"crates/uv-performance-memory-allocator",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2024"
|
||||
rust-version = "1.89"
|
||||
rust-version = "1.88"
|
||||
homepage = "https://pypi.org/project/uv/"
|
||||
documentation = "https://pypi.org/project/uv/"
|
||||
repository = "https://github.com/astral-sh/uv"
|
||||
authors = ["uv"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
uv-auth = { version = "0.0.8", path = "crates/uv-auth" }
|
||||
uv-bin-install = { version = "0.0.8", path = "crates/uv-bin-install" }
|
||||
uv-build-backend = { version = "0.0.8", path = "crates/uv-build-backend" }
|
||||
uv-build-frontend = { version = "0.0.8", path = "crates/uv-build-frontend" }
|
||||
uv-cache = { version = "0.0.8", path = "crates/uv-cache" }
|
||||
uv-cache-info = { version = "0.0.8", path = "crates/uv-cache-info" }
|
||||
uv-cache-key = { version = "0.0.8", path = "crates/uv-cache-key" }
|
||||
uv-cli = { version = "0.0.8", path = "crates/uv-cli" }
|
||||
uv-client = { version = "0.0.8", path = "crates/uv-client" }
|
||||
uv-configuration = { version = "0.0.8", path = "crates/uv-configuration" }
|
||||
uv-console = { version = "0.0.8", path = "crates/uv-console" }
|
||||
uv-dirs = { version = "0.0.8", path = "crates/uv-dirs" }
|
||||
uv-dispatch = { version = "0.0.8", path = "crates/uv-dispatch" }
|
||||
uv-distribution = { version = "0.0.8", path = "crates/uv-distribution" }
|
||||
uv-distribution-filename = { version = "0.0.8", path = "crates/uv-distribution-filename" }
|
||||
uv-distribution-types = { version = "0.0.8", path = "crates/uv-distribution-types" }
|
||||
uv-extract = { version = "0.0.8", path = "crates/uv-extract" }
|
||||
uv-flags = { version = "0.0.8", path = "crates/uv-flags" }
|
||||
uv-fs = { version = "0.0.8", path = "crates/uv-fs", features = ["serde", "tokio"] }
|
||||
uv-git = { version = "0.0.8", path = "crates/uv-git" }
|
||||
uv-git-types = { version = "0.0.8", path = "crates/uv-git-types" }
|
||||
uv-globfilter = { version = "0.0.8", path = "crates/uv-globfilter" }
|
||||
uv-install-wheel = { version = "0.0.8", path = "crates/uv-install-wheel", default-features = false }
|
||||
uv-installer = { version = "0.0.8", path = "crates/uv-installer" }
|
||||
uv-keyring = { version = "0.0.8", path = "crates/uv-keyring" }
|
||||
uv-logging = { version = "0.0.8", path = "crates/uv-logging" }
|
||||
uv-macros = { version = "0.0.8", path = "crates/uv-macros" }
|
||||
uv-metadata = { version = "0.0.8", path = "crates/uv-metadata" }
|
||||
uv-normalize = { version = "0.0.8", path = "crates/uv-normalize" }
|
||||
uv-once-map = { version = "0.0.8", path = "crates/uv-once-map" }
|
||||
uv-options-metadata = { version = "0.0.8", path = "crates/uv-options-metadata" }
|
||||
uv-performance-memory-allocator = { version = "0.0.8", path = "crates/uv-performance-memory-allocator" }
|
||||
uv-pep440 = { version = "0.0.8", path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
||||
uv-pep508 = { version = "0.0.8", path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||
uv-platform = { version = "0.0.8", path = "crates/uv-platform" }
|
||||
uv-platform-tags = { version = "0.0.8", path = "crates/uv-platform-tags" }
|
||||
uv-preview = { version = "0.0.8", path = "crates/uv-preview" }
|
||||
uv-publish = { version = "0.0.8", path = "crates/uv-publish" }
|
||||
uv-pypi-types = { version = "0.0.8", path = "crates/uv-pypi-types" }
|
||||
uv-python = { version = "0.0.8", path = "crates/uv-python" }
|
||||
uv-redacted = { version = "0.0.8", path = "crates/uv-redacted" }
|
||||
uv-requirements = { version = "0.0.8", path = "crates/uv-requirements" }
|
||||
uv-requirements-txt = { version = "0.0.8", path = "crates/uv-requirements-txt" }
|
||||
uv-resolver = { version = "0.0.8", path = "crates/uv-resolver" }
|
||||
uv-scripts = { version = "0.0.8", path = "crates/uv-scripts" }
|
||||
uv-settings = { version = "0.0.8", path = "crates/uv-settings" }
|
||||
uv-shell = { version = "0.0.8", path = "crates/uv-shell" }
|
||||
uv-small-str = { version = "0.0.8", path = "crates/uv-small-str" }
|
||||
uv-state = { version = "0.0.8", path = "crates/uv-state" }
|
||||
uv-static = { version = "0.0.8", path = "crates/uv-static" }
|
||||
uv-tool = { version = "0.0.8", path = "crates/uv-tool" }
|
||||
uv-torch = { version = "0.0.8", path = "crates/uv-torch" }
|
||||
uv-trampoline-builder = { version = "0.0.8", path = "crates/uv-trampoline-builder" }
|
||||
uv-types = { version = "0.0.8", path = "crates/uv-types" }
|
||||
uv-version = { version = "0.9.18", path = "crates/uv-version" }
|
||||
uv-virtualenv = { version = "0.0.8", path = "crates/uv-virtualenv" }
|
||||
uv-warnings = { version = "0.0.8", path = "crates/uv-warnings" }
|
||||
uv-workspace = { version = "0.0.8", path = "crates/uv-workspace" }
|
||||
uv-auth = { path = "crates/uv-auth" }
|
||||
uv-bin-install = { path = "crates/uv-bin-install" }
|
||||
uv-build-backend = { path = "crates/uv-build-backend" }
|
||||
uv-build-frontend = { path = "crates/uv-build-frontend" }
|
||||
uv-cache = { path = "crates/uv-cache" }
|
||||
uv-cache-info = { path = "crates/uv-cache-info" }
|
||||
uv-cache-key = { path = "crates/uv-cache-key" }
|
||||
uv-cli = { path = "crates/uv-cli" }
|
||||
uv-client = { path = "crates/uv-client" }
|
||||
uv-configuration = { path = "crates/uv-configuration" }
|
||||
uv-console = { path = "crates/uv-console" }
|
||||
uv-dirs = { path = "crates/uv-dirs" }
|
||||
uv-dispatch = { path = "crates/uv-dispatch" }
|
||||
uv-distribution = { path = "crates/uv-distribution" }
|
||||
uv-distribution-filename = { path = "crates/uv-distribution-filename" }
|
||||
uv-distribution-types = { path = "crates/uv-distribution-types" }
|
||||
uv-extract = { path = "crates/uv-extract" }
|
||||
uv-fs = { path = "crates/uv-fs", features = ["serde", "tokio"] }
|
||||
uv-git = { path = "crates/uv-git" }
|
||||
uv-git-types = { path = "crates/uv-git-types" }
|
||||
uv-globfilter = { path = "crates/uv-globfilter" }
|
||||
uv-install-wheel = { path = "crates/uv-install-wheel", default-features = false }
|
||||
uv-installer = { path = "crates/uv-installer" }
|
||||
uv-keyring = { path = "crates/uv-keyring" }
|
||||
uv-logging = { path = "crates/uv-logging" }
|
||||
uv-macros = { path = "crates/uv-macros" }
|
||||
uv-metadata = { path = "crates/uv-metadata" }
|
||||
uv-normalize = { path = "crates/uv-normalize" }
|
||||
uv-once-map = { path = "crates/uv-once-map" }
|
||||
uv-options-metadata = { path = "crates/uv-options-metadata" }
|
||||
uv-pep440 = { path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
||||
uv-pep508 = { path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||
uv-platform = { path = "crates/uv-platform" }
|
||||
uv-platform-tags = { path = "crates/uv-platform-tags" }
|
||||
uv-preview = { path = "crates/uv-preview" }
|
||||
uv-publish = { path = "crates/uv-publish" }
|
||||
uv-pypi-types = { path = "crates/uv-pypi-types" }
|
||||
uv-python = { path = "crates/uv-python" }
|
||||
uv-redacted = { path = "crates/uv-redacted" }
|
||||
uv-requirements = { path = "crates/uv-requirements" }
|
||||
uv-requirements-txt = { path = "crates/uv-requirements-txt" }
|
||||
uv-resolver = { path = "crates/uv-resolver" }
|
||||
uv-scripts = { path = "crates/uv-scripts" }
|
||||
uv-settings = { path = "crates/uv-settings" }
|
||||
uv-shell = { path = "crates/uv-shell" }
|
||||
uv-small-str = { path = "crates/uv-small-str" }
|
||||
uv-state = { path = "crates/uv-state" }
|
||||
uv-static = { path = "crates/uv-static" }
|
||||
uv-tool = { path = "crates/uv-tool" }
|
||||
uv-torch = { path = "crates/uv-torch" }
|
||||
uv-trampoline-builder = { path = "crates/uv-trampoline-builder" }
|
||||
uv-types = { path = "crates/uv-types" }
|
||||
uv-version = { path = "crates/uv-version" }
|
||||
uv-virtualenv = { path = "crates/uv-virtualenv" }
|
||||
uv-warnings = { path = "crates/uv-warnings" }
|
||||
uv-workspace = { path = "crates/uv-workspace" }
|
||||
|
||||
ambient-id = { version = "0.0.7", default-features = false, features = ["astral-reqwest-middleware"] }
|
||||
ambient-id = { version = "0.0.5" }
|
||||
anstream = { version = "0.6.15" }
|
||||
anyhow = { version = "1.0.89" }
|
||||
arcstr = { version = "1.2.0" }
|
||||
arrayvec = { version = "0.7.6" }
|
||||
astral-tokio-tar = { version = "0.5.6" }
|
||||
astral-tokio-tar = { version = "0.5.3" }
|
||||
async-channel = { version = "2.3.1" }
|
||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||
async-trait = { version = "0.1.82" }
|
||||
async_http_range_reader = { version = "0.9.1", package = "astral_async_http_range_reader" }
|
||||
async_zip = { version = "0.0.17", package = "astral_async_zip", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
async_http_range_reader = { version = "0.9.1" }
|
||||
async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "285e48742b74ab109887d62e1ae79e7c15fd4878", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
axoupdater = { version = "0.9.0", default-features = false }
|
||||
backon = { version = "1.3.0" }
|
||||
base64 = { version = "0.22.1" }
|
||||
|
|
@ -100,19 +102,16 @@ configparser = { version = "3.1.0" }
|
|||
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
||||
csv = { version = "1.3.0" }
|
||||
ctrlc = { version = "3.4.5" }
|
||||
cyclonedx-bom = { version = "0.8.0" }
|
||||
dashmap = { version = "6.1.0" }
|
||||
data-encoding = { version = "2.6.0" }
|
||||
diskus = { version = "0.9.0", default-features = false }
|
||||
dotenvy = { version = "0.15.7" }
|
||||
dunce = { version = "1.0.5" }
|
||||
either = { version = "1.13.0" }
|
||||
encoding_rs_io = { version = "0.1.7" }
|
||||
embed-manifest = { version = "1.5.0" }
|
||||
etcetera = { version = "0.11.0" }
|
||||
fastrand = { version = "2.3.0" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
flate2 = { version = "1.0.33", default-features = false, features = ["zlib-rs"] }
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
fs2 = { version = "0.4.3" }
|
||||
futures = { version = "0.3.30" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.15" }
|
||||
|
|
@ -121,6 +120,7 @@ goblin = { version = "0.10.0", default-features = false, features = ["std", "elf
|
|||
h2 = { version = "0.4.7" }
|
||||
hashbrown = { version = "0.16.0" }
|
||||
hex = { version = "0.4.3" }
|
||||
home = { version = "0.5.9" }
|
||||
html-escape = { version = "0.2.13" }
|
||||
http = { version = "1.1.0" }
|
||||
indexmap = { version = "2.5.0" }
|
||||
|
|
@ -135,6 +135,7 @@ memchr = { version = "2.7.4" }
|
|||
miette = { version = "7.2.0", features = ["fancy-no-backtrace"] }
|
||||
nanoid = { version = "0.4.0" }
|
||||
nix = { version = "0.30.0", features = ["signal"] }
|
||||
once_cell = { version = "1.20.2" }
|
||||
open = { version = "5.3.2" }
|
||||
owo-colors = { version = "4.1.0" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
|
|
@ -143,17 +144,17 @@ percent-encoding = { version = "2.3.1" }
|
|||
petgraph = { version = "0.8.0" }
|
||||
proc-macro2 = { version = "1.0.86" }
|
||||
procfs = { version = "0.17.0", default-features = false, features = ["flate2"] }
|
||||
pubgrub = { version = "0.3.3" , package = "astral-pubgrub" }
|
||||
pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "d8efd77673c9a90792da9da31b6c0da7ea8a324b" }
|
||||
quote = { version = "1.0.37" }
|
||||
rayon = { version = "1.10.0" }
|
||||
ref-cast = { version = "1.0.24" }
|
||||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||
reqsign = { version = "0.18.0", features = ["aws", "default-context"], default-features = false }
|
||||
reqsign = { version = "0.17.0", features = ["aws", "default-context"], default-features = false }
|
||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "system-proxy", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { version = "0.4.2", package = "astral-reqwest-middleware", features = ["multipart"] }
|
||||
reqwest-retry = { version = "0.7.0", package = "astral-reqwest-retry" }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2", features = ["multipart"] }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
rmp-serde = { version = "1.3.0" }
|
||||
rust-netrc = { version = "0.1.2" }
|
||||
|
|
@ -170,7 +171,7 @@ serde-untagged = { version = "0.1.6" }
|
|||
serde_json = { version = "1.0.128" }
|
||||
sha2 = { version = "0.10.8" }
|
||||
smallvec = { version = "1.13.2" }
|
||||
spdx = { version = "0.13.0" }
|
||||
spdx = { version = "0.10.6" }
|
||||
syn = { version = "2.0.77" }
|
||||
sys-info = { version = "0.9.1" }
|
||||
tar = { version = "0.4.43" }
|
||||
|
|
@ -178,8 +179,8 @@ target-lexicon = { version = "0.13.0" }
|
|||
tempfile = { version = "3.14.0" }
|
||||
textwrap = { version = "0.16.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
astral-tl = { version = "0.7.11" }
|
||||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync", "time"] }
|
||||
tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101a8ff9f591ef51f314ec" }
|
||||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] }
|
||||
tokio-stream = { version = "0.1.16" }
|
||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||
|
|
@ -193,13 +194,12 @@ unicode-width = { version = "0.2.0" }
|
|||
unscanny = { version = "0.1.0" }
|
||||
url = { version = "2.5.2", features = ["serde"] }
|
||||
uuid = { version = "1.16.0" }
|
||||
version-ranges = { version = "0.1.3", package = "astral-version-ranges" }
|
||||
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "d8efd77673c9a90792da9da31b6c0da7ea8a324b" }
|
||||
walkdir = { version = "2.5.0" }
|
||||
which = { version = "8.0.0", features = ["regex"] }
|
||||
windows = { version = "0.59.0", features = ["std", "Win32_Globalization", "Win32_System_LibraryLoader", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem", "Win32_Security", "Win32_System_Registry", "Win32_System_IO", "Win32_System_Ioctl"] }
|
||||
windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_Security", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem", "Win32_System_Registry", "Win32_System_IO", "Win32_System_Ioctl"] }
|
||||
windows-registry = { version = "0.5.0" }
|
||||
wiremock = { version = "0.6.4" }
|
||||
wmi = { version = "0.16.0", default-features = false }
|
||||
xz2 = { version = "0.1.7" }
|
||||
zeroize = { version = "1.8.1" }
|
||||
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
||||
|
|
@ -212,19 +212,19 @@ byteorder = { version = "1.5.0" }
|
|||
filetime = { version = "0.2.25" }
|
||||
http-body-util = { version = "0.1.2" }
|
||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||
hyper-util = { version = "0.1.8", features = ["tokio", "server", "http1"] }
|
||||
hyper-util = { version = "0.1.8", features = ["tokio"] }
|
||||
ignore = { version = "0.4.23" }
|
||||
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
||||
predicates = { version = "3.1.2" }
|
||||
rcgen = { version = "0.14.5", features = ["crypto", "pem", "ring"], default-features = false }
|
||||
rustls = { version = "0.23.29", default-features = false }
|
||||
similar = { version = "2.6.0" }
|
||||
temp-env = { version = "0.3.6" }
|
||||
test-case = { version = "3.3.1" }
|
||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||
tokio-rustls = { version = "0.26.2", default-features = false }
|
||||
whoami = { version = "1.6.0" }
|
||||
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["flate2", "xz2", "h2"]
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
unreachable_pub = "warn"
|
||||
|
|
@ -310,18 +310,8 @@ strip = false
|
|||
debug = "full"
|
||||
lto = false
|
||||
|
||||
# Profile for fast test execution: Skip debug info generation, and
|
||||
# apply basic optimization, which speed up build and running tests.
|
||||
[profile.fast-build]
|
||||
inherits = "dev"
|
||||
opt-level = 1
|
||||
debug = 0
|
||||
strip = "debuginfo"
|
||||
|
||||
# Profile for faster builds: Skip debug info generation, for faster
|
||||
# builds of smaller binaries.
|
||||
[profile.no-debug]
|
||||
inherits = "dev"
|
||||
debug = 0
|
||||
strip = "debuginfo"
|
||||
|
||||
|
|
@ -336,3 +326,7 @@ codegen-units = 1
|
|||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
[patch.crates-io]
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" }
|
||||
|
|
|
|||
|
|
@ -23,15 +23,8 @@ RUN case "$TARGETPLATFORM" in \
|
|||
*) exit 1 ;; \
|
||||
esac
|
||||
|
||||
# Temporarily using nightly-2025-11-02 for bundled musl v1.2.5
|
||||
# Ref: https://github.com/rust-lang/rust/pull/142682
|
||||
# TODO(samypr100): Remove when toolchain updates to 1.93
|
||||
COPY <<EOF rust-toolchain.toml
|
||||
[toolchain]
|
||||
channel = "nightly-2025-11-02"
|
||||
EOF
|
||||
# Update rustup whenever we bump the rust version
|
||||
# COPY rust-toolchain.toml rust-toolchain.toml
|
||||
COPY rust-toolchain.toml rust-toolchain.toml
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
||||
# Install the toolchain then the musl target
|
||||
|
|
|
|||
33
README.md
33
README.md
|
|
@ -42,7 +42,7 @@ An extremely fast Python package and project manager, written in Rust.
|
|||
- 🖥️ Supports macOS, Linux, and Windows.
|
||||
|
||||
uv is backed by [Astral](https://astral.sh), the creators of
|
||||
[Ruff](https://github.com/astral-sh/ruff) and [ty](https://github.com/astral-sh/ty).
|
||||
[Ruff](https://github.com/astral-sh/ruff).
|
||||
|
||||
## Installation
|
||||
|
||||
|
|
@ -192,12 +192,14 @@ uv installs Python and allows quickly switching between versions.
|
|||
Install multiple Python versions:
|
||||
|
||||
```console
|
||||
$ uv python install 3.12 3.13 3.14
|
||||
Installed 3 versions in 972ms
|
||||
+ cpython-3.12.12-macos-aarch64-none (python3.12)
|
||||
+ cpython-3.13.9-macos-aarch64-none (python3.13)
|
||||
+ cpython-3.14.0-macos-aarch64-none (python3.14)
|
||||
|
||||
$ uv python install 3.10 3.11 3.12
|
||||
Searching for Python versions matching: Python 3.10
|
||||
Searching for Python versions matching: Python 3.11
|
||||
Searching for Python versions matching: Python 3.12
|
||||
Installed 3 versions in 3.42s
|
||||
+ cpython-3.10.14-macos-aarch64-none
|
||||
+ cpython-3.11.9-macos-aarch64-none
|
||||
+ cpython-3.12.4-macos-aarch64-none
|
||||
```
|
||||
|
||||
Download Python versions as needed:
|
||||
|
|
@ -268,6 +270,14 @@ Installed 43 packages in 208ms
|
|||
|
||||
See the [pip interface documentation](https://docs.astral.sh/uv/pip/index/) to get started.
|
||||
|
||||
## Platform support
|
||||
|
||||
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
||||
|
||||
## Versioning policy
|
||||
|
||||
See uv's [versioning policy](https://docs.astral.sh/uv/reference/versioning/) document.
|
||||
|
||||
## Contributing
|
||||
|
||||
We are passionate about supporting contributors of all levels of experience and would love to see
|
||||
|
|
@ -284,15 +294,6 @@ It's pronounced as "you - vee" ([`/juː viː/`](https://en.wikipedia.org/wiki/He
|
|||
|
||||
Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details.
|
||||
|
||||
#### What platforms does uv support?
|
||||
|
||||
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
||||
|
||||
#### Is uv ready for production?
|
||||
|
||||
Yes, uv is stable and widely used in production. See uv's
|
||||
[versioning policy](https://docs.astral.sh/uv/reference/versioning/) document for details.
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're
|
||||
|
|
|
|||
2
STYLE.md
2
STYLE.md
|
|
@ -16,7 +16,7 @@ documentation_.
|
|||
1. If a message ends with a single relevant value, precede it with a colon, e.g.,
|
||||
`This is the value: value`. If the value is a literal, wrap it in backticks.
|
||||
1. Markdown files should be wrapped at 100 characters.
|
||||
1. Use a space, not an equals sign, for command-line arguments with a value, e.g.
|
||||
1. Use a space, not an equals sign, for command line arguments with a value, e.g.
|
||||
`--resolution lowest`, not `--resolution=lowest`.
|
||||
|
||||
## Styling uv
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
[files]
|
||||
extend-exclude = [
|
||||
"**/snapshots/",
|
||||
"test/ecosystem/**",
|
||||
"test/requirements/**/*.in",
|
||||
"ecosystem/**",
|
||||
"scripts/**/*.in",
|
||||
"crates/uv-build-frontend/src/pipreqs/mapping",
|
||||
]
|
||||
ignore-hidden = false
|
||||
|
|
|
|||
|
|
@ -982,7 +982,7 @@ for more details.
|
|||
([#9135](https://github.com/astral-sh/uv/pull/9135))
|
||||
- Tweak script `--no-project` comment ([#10331](https://github.com/astral-sh/uv/pull/10331))
|
||||
- Update copyright year ([#10297](https://github.com/astral-sh/uv/pull/10297))
|
||||
- Add instructions for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
||||
- Add instructinos for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
||||
|
||||
## 0.5.16
|
||||
|
||||
|
|
|
|||
1108
changelogs/0.8.x.md
1108
changelogs/0.8.x.md
File diff suppressed because it is too large
Load Diff
|
|
@ -1,13 +1,7 @@
|
|||
[package]
|
||||
name = "uv-auth"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-auth
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-auth).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -29,6 +29,6 @@ impl AsRef<[u8]> for AccessToken {
|
|||
|
||||
impl std::fmt::Display for AccessToken {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "****")
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,8 +11,8 @@ use url::Url;
|
|||
use uv_once_map::OnceMap;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::Realm;
|
||||
use crate::credentials::{Authentication, Username};
|
||||
use crate::{Credentials, Realm};
|
||||
|
||||
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
|
|
@ -33,7 +33,6 @@ impl Display for FetchUrl {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)] // All internal types are redacted.
|
||||
pub struct CredentialsCache {
|
||||
/// A cache per realm and username
|
||||
realms: RwLock<FxHashMap<(Realm, Username), Arc<Authentication>>>,
|
||||
|
|
@ -59,27 +58,6 @@ impl CredentialsCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
||||
if let Some(credentials) = Credentials::from_url(url) {
|
||||
trace!("Caching credentials for {url}");
|
||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
||||
trace!("Caching credentials for {url}");
|
||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
||||
}
|
||||
|
||||
/// Return the credentials that should be used for a realm and username, if any.
|
||||
pub(crate) fn get_realm(
|
||||
&self,
|
||||
|
|
|
|||
|
|
@ -20,17 +20,15 @@ use uv_static::EnvVars;
|
|||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum Credentials {
|
||||
/// RFC 7617 HTTP Basic Authentication
|
||||
Basic {
|
||||
/// The username to use for authentication.
|
||||
username: Username,
|
||||
/// The password to use for authentication.
|
||||
password: Option<Password>,
|
||||
},
|
||||
/// RFC 6750 Bearer Token Authentication
|
||||
Bearer {
|
||||
/// The token to use for authentication.
|
||||
token: Token,
|
||||
token: Vec<u8>,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -102,36 +100,6 @@ impl fmt::Debug for Password {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct Token(Vec<u8>);
|
||||
|
||||
impl Token {
|
||||
pub fn new(token: Vec<u8>) -> Self {
|
||||
Self(token)
|
||||
}
|
||||
|
||||
/// Return the [`Token`] as a byte slice.
|
||||
pub fn as_slice(&self) -> &[u8] {
|
||||
self.0.as_slice()
|
||||
}
|
||||
|
||||
/// Convert the [`Token`] into its underlying [`Vec<u8>`].
|
||||
pub fn into_bytes(self) -> Vec<u8> {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Return whether the [`Token`] is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Token {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "****")
|
||||
}
|
||||
}
|
||||
impl Credentials {
|
||||
/// Create a set of HTTP Basic Authentication credentials.
|
||||
#[allow(dead_code)]
|
||||
|
|
@ -145,9 +113,7 @@ impl Credentials {
|
|||
/// Create a set of Bearer Authentication credentials.
|
||||
#[allow(dead_code)]
|
||||
pub fn bearer(token: Vec<u8>) -> Self {
|
||||
Self::Bearer {
|
||||
token: Token::new(token),
|
||||
}
|
||||
Self::Bearer { token }
|
||||
}
|
||||
|
||||
pub fn username(&self) -> Option<&str> {
|
||||
|
|
@ -318,7 +284,7 @@ impl Credentials {
|
|||
// Parse a `Bearer` authentication header.
|
||||
if let Some(token) = header.as_bytes().strip_prefix(b"Bearer ") {
|
||||
return Some(Self::Bearer {
|
||||
token: Token::new(token.to_vec()),
|
||||
token: token.to_vec(),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -623,15 +589,4 @@ mod tests {
|
|||
"Basic { username: Username(Some(\"user\")), password: Some(****) }"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bearer_token_obfuscation() {
|
||||
let token = "super_secret_token";
|
||||
let credentials = Credentials::bearer(token.into());
|
||||
let debugged = format!("{credentials:?}");
|
||||
assert!(
|
||||
!debugged.contains(token),
|
||||
"Token should be obfuscated in Debug impl: {debugged}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -404,13 +404,12 @@ mod tests {
|
|||
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user"));
|
||||
if cfg!(debug_assertions) {
|
||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
||||
assert!(result.is_err());
|
||||
} else {
|
||||
assert_eq!(fetch.await, None);
|
||||
}
|
||||
let result = std::panic::AssertUnwindSafe(
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user")),
|
||||
)
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -418,13 +417,12 @@ mod tests {
|
|||
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
||||
if cfg!(debug_assertions) {
|
||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
||||
assert!(result.is_err());
|
||||
} else {
|
||||
assert_eq!(fetch.await, None);
|
||||
}
|
||||
let result = std::panic::AssertUnwindSafe(
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||
)
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -432,13 +430,12 @@ mod tests {
|
|||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
||||
if cfg!(debug_assertions) {
|
||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
||||
assert!(result.is_err());
|
||||
} else {
|
||||
assert_eq!(fetch.await, None);
|
||||
}
|
||||
let result = std::panic::AssertUnwindSafe(
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||
)
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,12 @@
|
|||
use std::sync::{Arc, LazyLock};
|
||||
|
||||
use tracing::trace;
|
||||
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::credentials::Authentication;
|
||||
pub use access_token::AccessToken;
|
||||
pub use cache::CredentialsCache;
|
||||
use cache::CredentialsCache;
|
||||
pub use credentials::{Credentials, Username};
|
||||
pub use index::{AuthPolicy, Index, Indexes};
|
||||
pub use keyring::KeyringProvider;
|
||||
|
|
@ -7,7 +14,7 @@ pub use middleware::AuthMiddleware;
|
|||
pub use pyx::{
|
||||
DEFAULT_TOLERANCE_SECS, PyxJwt, PyxOAuthTokens, PyxTokenStore, PyxTokens, TokenStoreError,
|
||||
};
|
||||
pub use realm::{Realm, RealmRef};
|
||||
pub use realm::Realm;
|
||||
pub use service::{Service, ServiceParseError};
|
||||
pub use store::{AuthBackend, AuthScheme, TextCredentialStore, TomlCredentialError};
|
||||
|
||||
|
|
@ -22,3 +29,32 @@ mod pyx;
|
|||
mod realm;
|
||||
mod service;
|
||||
mod store;
|
||||
|
||||
// TODO(zanieb): Consider passing a cache explicitly throughout
|
||||
|
||||
/// Global authentication cache for a uv invocation
|
||||
///
|
||||
/// This is used to share credentials across uv clients.
|
||||
pub(crate) static CREDENTIALS_CACHE: LazyLock<CredentialsCache> =
|
||||
LazyLock::new(CredentialsCache::default);
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials_from_url(url: &DisplaySafeUrl) -> bool {
|
||||
if let Some(credentials) = Credentials::from_url(url) {
|
||||
trace!("Caching credentials for {url}");
|
||||
CREDENTIALS_CACHE.insert(url, Arc::new(Authentication::from(credentials)));
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials(url: &DisplaySafeUrl, credentials: Credentials) {
|
||||
trace!("Caching credentials for {url}");
|
||||
CREDENTIALS_CACHE.insert(url, Arc::new(Authentication::from(credentials)));
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,24 +10,19 @@ use tracing::{debug, trace, warn};
|
|||
|
||||
use uv_preview::{Preview, PreviewFeatures};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_static::EnvVars;
|
||||
use uv_warnings::owo_colors::OwoColorize;
|
||||
|
||||
use crate::credentials::Authentication;
|
||||
use crate::providers::{HuggingFaceProvider, S3EndpointProvider};
|
||||
use crate::pyx::{DEFAULT_TOLERANCE_SECS, PyxTokenStore};
|
||||
use crate::{
|
||||
AccessToken, CredentialsCache, KeyringProvider,
|
||||
AccessToken, CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||
cache::FetchUrl,
|
||||
credentials::{Credentials, Username},
|
||||
index::{AuthPolicy, Indexes},
|
||||
realm::Realm,
|
||||
};
|
||||
use crate::{Index, TextCredentialStore};
|
||||
|
||||
/// Cached check for whether we're running in Dependabot.
|
||||
static IS_DEPENDABOT: LazyLock<bool> =
|
||||
LazyLock::new(|| std::env::var(EnvVars::DEPENDABOT).is_ok_and(|value| value == "true"));
|
||||
use crate::{Index, TextCredentialStore, TomlCredentialError};
|
||||
|
||||
/// Strategy for loading netrc files.
|
||||
enum NetrcMode {
|
||||
|
|
@ -65,55 +60,49 @@ impl NetrcMode {
|
|||
|
||||
/// Strategy for loading text-based credential files.
|
||||
enum TextStoreMode {
|
||||
Automatic(tokio::sync::OnceCell<Option<TextCredentialStore>>),
|
||||
Automatic(LazyLock<Option<TextCredentialStore>>),
|
||||
Enabled(TextCredentialStore),
|
||||
Disabled,
|
||||
}
|
||||
|
||||
impl Default for TextStoreMode {
|
||||
fn default() -> Self {
|
||||
Self::Automatic(tokio::sync::OnceCell::new())
|
||||
// TODO(zanieb): Reconsider this pattern. We're just mirroring the [`NetrcMode`]
|
||||
// implementation for now.
|
||||
Self::Automatic(LazyLock::new(|| {
|
||||
let path = TextCredentialStore::default_file()
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to determine credentials file path: {}", err);
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
match TextCredentialStore::read(&path) {
|
||||
Ok((store, _lock)) => {
|
||||
debug!("Loaded credential file {}", path.display());
|
||||
Some(store)
|
||||
}
|
||||
Err(TomlCredentialError::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
debug!("No credentials file found at {}", path.display());
|
||||
None
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to load credentials from {}: {}",
|
||||
path.display(),
|
||||
err
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl TextStoreMode {
|
||||
async fn load_default_store() -> Option<TextCredentialStore> {
|
||||
let path = TextCredentialStore::default_file()
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to determine credentials file path: {}", err);
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
match TextCredentialStore::read(&path).await {
|
||||
Ok((store, _lock)) => {
|
||||
debug!("Loaded credential file {}", path.display());
|
||||
Some(store)
|
||||
}
|
||||
Err(err)
|
||||
if err
|
||||
.as_io_error()
|
||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
||||
{
|
||||
debug!("No credentials file found at {}", path.display());
|
||||
None
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to load credentials from {}: {}",
|
||||
path.display(),
|
||||
err
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the parsed credential store, if enabled.
|
||||
async fn get(&self) -> Option<&TextCredentialStore> {
|
||||
fn get(&self) -> Option<&TextCredentialStore> {
|
||||
match self {
|
||||
// TODO(zanieb): Reconsider this pattern. We're just mirroring the [`NetrcMode`]
|
||||
// implementation for now.
|
||||
Self::Automatic(lock) => lock.get_or_init(Self::load_default_store).await.as_ref(),
|
||||
Self::Automatic(lock) => lock.as_ref(),
|
||||
Self::Enabled(store) => Some(store),
|
||||
Self::Disabled => None,
|
||||
}
|
||||
|
|
@ -129,15 +118,6 @@ enum TokenState {
|
|||
Initialized(Option<AccessToken>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum S3CredentialState {
|
||||
/// The S3 credential state has not yet been initialized.
|
||||
Uninitialized,
|
||||
/// The S3 credential state has been initialized, with either a signer or `None` if
|
||||
/// no S3 endpoint is configured.
|
||||
Initialized(Option<Arc<Authentication>>),
|
||||
}
|
||||
|
||||
/// A middleware that adds basic authentication to requests.
|
||||
///
|
||||
/// Uses a cache to propagate credentials from previously seen requests and
|
||||
|
|
@ -146,8 +126,7 @@ pub struct AuthMiddleware {
|
|||
netrc: NetrcMode,
|
||||
text_store: TextStoreMode,
|
||||
keyring: Option<KeyringProvider>,
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
cache: Arc<CredentialsCache>,
|
||||
cache: Option<CredentialsCache>,
|
||||
/// Auth policies for specific URLs.
|
||||
indexes: Indexes,
|
||||
/// Set all endpoints as needing authentication. We never try to send an
|
||||
|
|
@ -159,31 +138,21 @@ pub struct AuthMiddleware {
|
|||
pyx_token_store: Option<PyxTokenStore>,
|
||||
/// Tokens to use for persistent credentials.
|
||||
pyx_token_state: Mutex<TokenState>,
|
||||
/// Cached S3 credentials to avoid running the credential helper multiple times.
|
||||
s3_credential_state: Mutex<S3CredentialState>,
|
||||
preview: Preview,
|
||||
}
|
||||
|
||||
impl Default for AuthMiddleware {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthMiddleware {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
netrc: NetrcMode::default(),
|
||||
text_store: TextStoreMode::default(),
|
||||
keyring: None,
|
||||
// TODO(konsti): There shouldn't be a credential cache without that in the initializer.
|
||||
cache: Arc::new(CredentialsCache::default()),
|
||||
cache: None,
|
||||
indexes: Indexes::new(),
|
||||
only_authenticated: false,
|
||||
base_client: None,
|
||||
pyx_token_store: None,
|
||||
pyx_token_state: Mutex::new(TokenState::Uninitialized),
|
||||
s3_credential_state: Mutex::new(S3CredentialState::Uninitialized),
|
||||
preview: Preview::default(),
|
||||
}
|
||||
}
|
||||
|
|
@ -231,14 +200,7 @@ impl AuthMiddleware {
|
|||
/// Configure the [`CredentialsCache`] to use.
|
||||
#[must_use]
|
||||
pub fn with_cache(mut self, cache: CredentialsCache) -> Self {
|
||||
self.cache = Arc::new(cache);
|
||||
self
|
||||
}
|
||||
|
||||
/// Configure the [`CredentialsCache`] to use from an existing [`Arc`].
|
||||
#[must_use]
|
||||
pub fn with_cache_arc(mut self, cache: Arc<CredentialsCache>) -> Self {
|
||||
self.cache = cache;
|
||||
self.cache = Some(cache);
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -271,9 +233,17 @@ impl AuthMiddleware {
|
|||
self
|
||||
}
|
||||
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
/// Get the configured authentication store.
|
||||
///
|
||||
/// If not set, the global store is used.
|
||||
fn cache(&self) -> &CredentialsCache {
|
||||
&self.cache
|
||||
self.cache.as_ref().unwrap_or(&CREDENTIALS_CACHE)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AuthMiddleware {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -382,15 +352,11 @@ impl Middleware for AuthMiddleware {
|
|||
.is_some_and(|token_store| token_store.is_known_url(request.url()));
|
||||
|
||||
let must_authenticate = self.only_authenticated
|
||||
|| (match auth_policy {
|
||||
AuthPolicy::Auto => is_known_url,
|
||||
AuthPolicy::Always => true,
|
||||
AuthPolicy::Never => false,
|
||||
}
|
||||
// Dependabot intercepts HTTP requests and injects credentials, which means that we
|
||||
// cannot eagerly enforce an `AuthPolicy` as we don't know whether credentials will be
|
||||
// added outside of uv.
|
||||
&& !*IS_DEPENDABOT);
|
||||
|| match auth_policy {
|
||||
AuthPolicy::Auto => is_known_url,
|
||||
AuthPolicy::Always => true,
|
||||
AuthPolicy::Never => false,
|
||||
};
|
||||
|
||||
let (mut retry_request, response) = if !must_authenticate {
|
||||
let url = tracing_url(&request, credentials.as_deref());
|
||||
|
|
@ -536,7 +502,7 @@ impl AuthMiddleware {
|
|||
// Nothing to insert into the cache if we don't have credentials
|
||||
return next.run(request, extensions).await;
|
||||
};
|
||||
let url = DisplaySafeUrl::from_url(request.url().clone());
|
||||
let url = DisplaySafeUrl::from(request.url().clone());
|
||||
if matches!(auth_policy, AuthPolicy::Always) && credentials.password().is_none() {
|
||||
return Err(Error::Middleware(format_err!("Missing password for {url}")));
|
||||
}
|
||||
|
|
@ -690,26 +656,13 @@ impl AuthMiddleware {
|
|||
return Some(credentials);
|
||||
}
|
||||
|
||||
if S3EndpointProvider::is_s3_endpoint(url, self.preview) {
|
||||
let mut s3_state = self.s3_credential_state.lock().await;
|
||||
|
||||
// If the S3 credential state is uninitialized, initialize it.
|
||||
let credentials = match &*s3_state {
|
||||
S3CredentialState::Uninitialized => {
|
||||
trace!("Initializing S3 credentials for {url}");
|
||||
let signer = S3EndpointProvider::create_signer();
|
||||
let credentials = Arc::new(Authentication::from(signer));
|
||||
*s3_state = S3CredentialState::Initialized(Some(credentials.clone()));
|
||||
Some(credentials)
|
||||
}
|
||||
S3CredentialState::Initialized(credentials) => credentials.clone(),
|
||||
};
|
||||
|
||||
if let Some(credentials) = credentials {
|
||||
debug!("Found S3 credentials for {url}");
|
||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||
return Some(credentials);
|
||||
}
|
||||
if let Some(credentials) = S3EndpointProvider::credentials_for(url, self.preview)
|
||||
.map(Authentication::from)
|
||||
.map(Arc::new)
|
||||
{
|
||||
debug!("Found S3 credentials for {url}");
|
||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||
return Some(credentials);
|
||||
}
|
||||
|
||||
// If this is a known URL, authenticate it via the token store.
|
||||
|
|
@ -767,16 +720,9 @@ impl AuthMiddleware {
|
|||
Some(credentials)
|
||||
|
||||
// Text credential store support.
|
||||
} else if let Some(credentials) = self.text_store.get().await.and_then(|text_store| {
|
||||
} else if let Some(credentials) = self.text_store.get().and_then(|text_store| {
|
||||
debug!("Checking text store for credentials for {url}");
|
||||
text_store
|
||||
.get_credentials(
|
||||
url,
|
||||
credentials
|
||||
.as_ref()
|
||||
.and_then(|credentials| credentials.username()),
|
||||
)
|
||||
.cloned()
|
||||
text_store.get_credentials(url, credentials.as_ref().and_then(|credentials| credentials.username())).cloned()
|
||||
}) {
|
||||
debug!("Found credentials in plaintext store for {url}");
|
||||
Some(credentials)
|
||||
|
|
@ -792,16 +738,10 @@ impl AuthMiddleware {
|
|||
if let Some(index) = index {
|
||||
// N.B. The native store performs an exact look up right now, so we use the root
|
||||
// URL of the index instead of relying on prefix-matching.
|
||||
debug!(
|
||||
"Checking native store for credentials for index URL {}{}",
|
||||
display_username, index.root_url
|
||||
);
|
||||
debug!("Checking native store for credentials for index URL {}{}", display_username, index.root_url);
|
||||
native_store.fetch(&index.root_url, username).await
|
||||
} else {
|
||||
debug!(
|
||||
"Checking native store for credentials for URL {}{}",
|
||||
display_username, url
|
||||
);
|
||||
debug!("Checking native store for credentials for URL {}{}", display_username, url);
|
||||
native_store.fetch(url, username).await
|
||||
}
|
||||
// TODO(zanieb): We should have a realm fallback here too
|
||||
|
|
@ -822,18 +762,10 @@ impl AuthMiddleware {
|
|||
// always authenticate.
|
||||
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
|
||||
if let Some(index) = index {
|
||||
debug!(
|
||||
"Checking keyring for credentials for index URL {}@{}",
|
||||
username, index.url
|
||||
);
|
||||
keyring
|
||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), Some(username))
|
||||
.await
|
||||
debug!("Checking keyring for credentials for index URL {}@{}", username, index.url);
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&index.url), Some(username)).await
|
||||
} else {
|
||||
debug!(
|
||||
"Checking keyring for credentials for full URL {}@{}",
|
||||
username, url
|
||||
);
|
||||
debug!("Checking keyring for credentials for full URL {}@{}", username, url);
|
||||
keyring.fetch(url, Some(username)).await
|
||||
}
|
||||
} else if matches!(auth_policy, AuthPolicy::Always) {
|
||||
|
|
@ -842,16 +774,12 @@ impl AuthMiddleware {
|
|||
"Checking keyring for credentials for index URL {} without username due to `authenticate = always`",
|
||||
index.url
|
||||
);
|
||||
keyring
|
||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), None)
|
||||
.await
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&index.url), None).await
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
debug!(
|
||||
"Skipping keyring fetch for {url} without username; use `authenticate = always` to force"
|
||||
);
|
||||
debug!("Skipping keyring fetch for {url} without username; use `authenticate = always` to force");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
@ -861,9 +789,9 @@ impl AuthMiddleware {
|
|||
Some(credentials)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let credentials = credentials.map(Authentication::from).map(Arc::new);
|
||||
}
|
||||
.map(Authentication::from)
|
||||
.map(Arc::new);
|
||||
|
||||
// Register the fetch for this key
|
||||
self.cache().fetches.done(key, credentials.clone());
|
||||
|
|
@ -873,7 +801,7 @@ impl AuthMiddleware {
|
|||
}
|
||||
|
||||
fn tracing_url(request: &Request, credentials: Option<&Authentication>) -> DisplaySafeUrl {
|
||||
let mut url = DisplaySafeUrl::from_url(request.url().clone());
|
||||
let mut url = DisplaySafeUrl::from(request.url().clone());
|
||||
if let Some(Authentication::Credentials(creds)) = credentials {
|
||||
if let Some(username) = creds.username() {
|
||||
let _ = url.set_username(username);
|
||||
|
|
@ -2062,13 +1990,13 @@ mod tests {
|
|||
let base_url_2 = base_url.join("prefix_2")?;
|
||||
let indexes = Indexes::from_indexes(vec![
|
||||
Index {
|
||||
url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
||||
root_url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
||||
url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||
root_url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||
auth_policy: AuthPolicy::Auto,
|
||||
},
|
||||
Index {
|
||||
url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
||||
root_url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
||||
url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||
root_url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||
auth_policy: AuthPolicy::Auto,
|
||||
},
|
||||
]);
|
||||
|
|
@ -2170,8 +2098,8 @@ mod tests {
|
|||
let base_url = Url::parse(&server.uri())?;
|
||||
let index_url = base_url.join("prefix_1")?;
|
||||
let indexes = Indexes::from_indexes(vec![Index {
|
||||
url: DisplaySafeUrl::from_url(index_url.clone()),
|
||||
root_url: DisplaySafeUrl::from_url(index_url.clone()),
|
||||
url: DisplaySafeUrl::from(index_url.clone()),
|
||||
root_url: DisplaySafeUrl::from(index_url.clone()),
|
||||
auth_policy: AuthPolicy::Auto,
|
||||
}]);
|
||||
|
||||
|
|
@ -2225,7 +2153,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn indexes_for(url: &Url, policy: AuthPolicy) -> Indexes {
|
||||
let mut url = DisplaySafeUrl::from_url(url.clone());
|
||||
let mut url = DisplaySafeUrl::from(url.clone());
|
||||
url.set_password(None).ok();
|
||||
url.set_username("").ok();
|
||||
Indexes::from_indexes(vec![Index {
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ use uv_static::EnvVars;
|
|||
use uv_warnings::warn_user_once;
|
||||
|
||||
use crate::Credentials;
|
||||
use crate::credentials::Token;
|
||||
use crate::realm::{Realm, RealmRef};
|
||||
|
||||
/// The [`Realm`] for the Hugging Face platform.
|
||||
|
|
@ -46,7 +45,7 @@ impl HuggingFaceProvider {
|
|||
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
||||
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
||||
return Some(Credentials::Bearer {
|
||||
token: Token::new(token.clone()),
|
||||
token: token.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -66,8 +65,8 @@ static S3_ENDPOINT_REALM: LazyLock<Option<Realm>> = LazyLock::new(|| {
|
|||
pub(crate) struct S3EndpointProvider;
|
||||
|
||||
impl S3EndpointProvider {
|
||||
/// Returns `true` if the URL matches the configured S3 endpoint.
|
||||
pub(crate) fn is_s3_endpoint(url: &Url, preview: Preview) -> bool {
|
||||
/// Returns the credentials for the S3 endpoint, if available.
|
||||
pub(crate) fn credentials_for(url: &Url, preview: Preview) -> Option<DefaultSigner> {
|
||||
if let Some(s3_endpoint_realm) = S3_ENDPOINT_REALM.as_ref().map(RealmRef::from) {
|
||||
if !preview.is_enabled(PreviewFeatures::S3_ENDPOINT) {
|
||||
warn_user_once!(
|
||||
|
|
@ -79,26 +78,19 @@ impl S3EndpointProvider {
|
|||
// Treat any URL on the same domain or subdomain as available for S3 signing.
|
||||
let realm = RealmRef::from(url);
|
||||
if realm == s3_endpoint_realm || realm.is_subdomain_of(s3_endpoint_realm) {
|
||||
return true;
|
||||
// TODO(charlie): Can `reqsign` infer the region for us? Profiles, for example,
|
||||
// often have a region set already.
|
||||
let region = std::env::var(EnvVars::AWS_REGION)
|
||||
.map(Cow::Owned)
|
||||
.unwrap_or_else(|_| {
|
||||
std::env::var(EnvVars::AWS_DEFAULT_REGION)
|
||||
.map(Cow::Owned)
|
||||
.unwrap_or_else(|_| Cow::Borrowed("us-east-1"))
|
||||
});
|
||||
let signer = reqsign::aws::default_signer("s3", ®ion);
|
||||
return Some(signer);
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Creates a new S3 signer with the configured region.
|
||||
///
|
||||
/// This is potentially expensive as it may invoke credential helpers, so the result
|
||||
/// should be cached.
|
||||
pub(crate) fn create_signer() -> DefaultSigner {
|
||||
// TODO(charlie): Can `reqsign` infer the region for us? Profiles, for example,
|
||||
// often have a region set already.
|
||||
let region = std::env::var(EnvVars::AWS_REGION)
|
||||
.map(Cow::Owned)
|
||||
.unwrap_or_else(|_| {
|
||||
std::env::var(EnvVars::AWS_DEFAULT_REGION)
|
||||
.map(Cow::Owned)
|
||||
.unwrap_or_else(|_| Cow::Borrowed("us-east-1"))
|
||||
});
|
||||
reqsign::aws::default_signer("s3", ®ion)
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,12 +10,11 @@ use tracing::debug;
|
|||
use url::Url;
|
||||
|
||||
use uv_cache_key::CanonicalUrl;
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_small_str::SmallString;
|
||||
use uv_state::{StateBucket, StateStore};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::credentials::Token;
|
||||
use crate::{AccessToken, Credentials, Realm};
|
||||
|
||||
/// Retrieve the pyx API key from the environment variable, or return `None`.
|
||||
|
|
@ -85,7 +84,7 @@ impl From<PyxTokens> for Credentials {
|
|||
impl From<AccessToken> for Credentials {
|
||||
fn from(access_token: AccessToken) -> Self {
|
||||
Self::Bearer {
|
||||
token: Token::new(access_token.into_bytes()),
|
||||
token: access_token.into_bytes(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -284,6 +283,7 @@ impl PyxTokenStore {
|
|||
|
||||
/// Read the tokens from the store.
|
||||
pub async fn read(&self) -> Result<Option<PyxTokens>, TokenStoreError> {
|
||||
// Retrieve the API URL from the environment variable, or error if unset.
|
||||
if let Some(api_key) = read_pyx_api_key() {
|
||||
// Read the API key tokens from a file based on the API key.
|
||||
let digest = uv_cache_key::cache_digest(&api_key);
|
||||
|
|
@ -473,7 +473,7 @@ impl PyxTokenStore {
|
|||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum TokenStoreError {
|
||||
#[error(transparent)]
|
||||
Url(#[from] DisplaySafeUrlError),
|
||||
Url(#[from] url::ParseError),
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error(transparent)]
|
||||
|
|
@ -591,7 +591,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_is_known_url() {
|
||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
||||
let api_url = DisplaySafeUrl::from(Url::parse("https://api.pyx.dev").unwrap());
|
||||
let cdn_domain = "astralhosted.com";
|
||||
|
||||
// Same realm as API.
|
||||
|
|
@ -646,7 +646,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_is_known_domain() {
|
||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
||||
let api_url = DisplaySafeUrl::from(Url::parse("https://api.pyx.dev").unwrap());
|
||||
let cdn_domain = "astralhosted.com";
|
||||
|
||||
// Same realm as API.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
use std::hash::{Hash, Hasher};
|
||||
use std::{fmt::Display, fmt::Formatter};
|
||||
use url::Url;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
/// Used to determine if authentication information should be retained on a new URL.
|
||||
|
|
@ -30,12 +29,6 @@ pub struct Realm {
|
|||
port: Option<u16>,
|
||||
}
|
||||
|
||||
impl From<&DisplaySafeUrl> for Realm {
|
||||
fn from(url: &DisplaySafeUrl) -> Self {
|
||||
Self::from(&**url)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Url> for Realm {
|
||||
fn from(url: &Url) -> Self {
|
||||
Self {
|
||||
|
|
@ -82,7 +75,7 @@ impl Hash for Realm {
|
|||
|
||||
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct RealmRef<'a> {
|
||||
pub(crate) struct RealmRef<'a> {
|
||||
scheme: &'a str,
|
||||
host: Option<&'a str>,
|
||||
port: Option<u16>,
|
||||
|
|
|
|||
|
|
@ -2,12 +2,12 @@ use serde::{Deserialize, Serialize};
|
|||
use std::str::FromStr;
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ServiceParseError {
|
||||
#[error(transparent)]
|
||||
InvalidUrl(#[from] DisplaySafeUrlError),
|
||||
InvalidUrl(#[from] url::ParseError),
|
||||
#[error("Unsupported scheme: {0}")]
|
||||
UnsupportedScheme(String),
|
||||
#[error("HTTPS is required for non-local hosts")]
|
||||
|
|
@ -51,7 +51,7 @@ impl FromStr for Service {
|
|||
// First try parsing as-is
|
||||
let url = match DisplaySafeUrl::parse(s) {
|
||||
Ok(url) => url,
|
||||
Err(DisplaySafeUrlError::Url(url::ParseError::RelativeUrlWithoutBase)) => {
|
||||
Err(url::ParseError::RelativeUrlWithoutBase) => {
|
||||
// If it's a relative URL, try prepending https://
|
||||
let with_https = format!("https://{s}");
|
||||
DisplaySafeUrl::parse(&with_https)?
|
||||
|
|
|
|||
|
|
@ -5,14 +5,15 @@ use fs_err as fs;
|
|||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, with_added_extension};
|
||||
use url::Url;
|
||||
use uv_fs::{LockedFile, with_added_extension};
|
||||
use uv_preview::{Preview, PreviewFeatures};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use uv_state::{StateBucket, StateStore};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::credentials::{Password, Token, Username};
|
||||
use crate::credentials::{Password, Username};
|
||||
use crate::realm::Realm;
|
||||
use crate::service::Service;
|
||||
use crate::{Credentials, KeyringProvider};
|
||||
|
|
@ -28,7 +29,7 @@ pub enum AuthBackend {
|
|||
}
|
||||
|
||||
impl AuthBackend {
|
||||
pub async fn from_settings(preview: Preview) -> Result<Self, TomlCredentialError> {
|
||||
pub fn from_settings(preview: Preview) -> Result<Self, TomlCredentialError> {
|
||||
// If preview is enabled, we'll use the system-native store
|
||||
if preview.is_enabled(PreviewFeatures::NATIVE_AUTH) {
|
||||
return Ok(Self::System(KeyringProvider::native()));
|
||||
|
|
@ -36,16 +37,12 @@ impl AuthBackend {
|
|||
|
||||
// Otherwise, we'll use the plaintext credential store
|
||||
let path = TextCredentialStore::default_file()?;
|
||||
match TextCredentialStore::read(&path).await {
|
||||
match TextCredentialStore::read(&path) {
|
||||
Ok((store, lock)) => Ok(Self::TextStore(store, lock)),
|
||||
Err(err)
|
||||
if err
|
||||
.as_io_error()
|
||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
||||
{
|
||||
Err(TomlCredentialError::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
Ok(Self::TextStore(
|
||||
TextCredentialStore::default(),
|
||||
TextCredentialStore::lock(&path).await?,
|
||||
TextCredentialStore::lock(&path)?,
|
||||
))
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
|
|
@ -73,8 +70,6 @@ pub enum AuthScheme {
|
|||
pub enum TomlCredentialError {
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error(transparent)]
|
||||
LockedFile(#[from] LockedFileError),
|
||||
#[error("Failed to parse TOML credential file: {0}")]
|
||||
ParseError(#[from] toml::de::Error),
|
||||
#[error("Failed to serialize credentials to TOML")]
|
||||
|
|
@ -89,21 +84,6 @@ pub enum TomlCredentialError {
|
|||
TokenNotUnicode(#[from] std::string::FromUtf8Error),
|
||||
}
|
||||
|
||||
impl TomlCredentialError {
|
||||
pub fn as_io_error(&self) -> Option<&std::io::Error> {
|
||||
match self {
|
||||
Self::Io(err) => Some(err),
|
||||
Self::LockedFile(err) => err.as_io_error(),
|
||||
Self::ParseError(_)
|
||||
| Self::SerializeError(_)
|
||||
| Self::BasicAuthError(_)
|
||||
| Self::BearerAuthError(_)
|
||||
| Self::CredentialsDirError
|
||||
| Self::TokenNotUnicode(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum BasicAuthError {
|
||||
#[error("`username` is required with `scheme = basic`")]
|
||||
|
|
@ -162,7 +142,7 @@ impl From<TomlCredential> for TomlCredentialWire {
|
|||
username: Username::new(None),
|
||||
scheme: AuthScheme::Bearer,
|
||||
password: None,
|
||||
token: Some(String::from_utf8(token.into_bytes()).expect("Token is valid UTF-8")),
|
||||
token: Some(String::from_utf8(token).expect("Token is valid UTF-8")),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
@ -210,7 +190,7 @@ impl TryFrom<TomlCredentialWire> for TomlCredential {
|
|||
));
|
||||
}
|
||||
let credentials = Credentials::Bearer {
|
||||
token: Token::new(value.token.unwrap().into_bytes()),
|
||||
token: value.token.unwrap().into_bytes(),
|
||||
};
|
||||
Ok(Self {
|
||||
service: value.service,
|
||||
|
|
@ -254,12 +234,12 @@ impl TextCredentialStore {
|
|||
}
|
||||
|
||||
/// Acquire a lock on the credentials file at the given path.
|
||||
pub async fn lock(path: &Path) -> Result<LockedFile, TomlCredentialError> {
|
||||
pub fn lock(path: &Path) -> Result<LockedFile, TomlCredentialError> {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let lock = with_added_extension(path, ".lock");
|
||||
Ok(LockedFile::acquire(lock, LockedFileMode::Exclusive, "credentials store").await?)
|
||||
Ok(LockedFile::acquire_blocking(lock, "credentials store")?)
|
||||
}
|
||||
|
||||
/// Read credentials from a file.
|
||||
|
|
@ -290,8 +270,8 @@ impl TextCredentialStore {
|
|||
/// Returns [`TextCredentialStore`] and a [`LockedFile`] to hold if mutating the store.
|
||||
///
|
||||
/// If the store will not be written to following the read, the lock can be dropped.
|
||||
pub async fn read<P: AsRef<Path>>(path: P) -> Result<(Self, LockedFile), TomlCredentialError> {
|
||||
let lock = Self::lock(path.as_ref()).await?;
|
||||
pub fn read<P: AsRef<Path>>(path: P) -> Result<(Self, LockedFile), TomlCredentialError> {
|
||||
let lock = Self::lock(path.as_ref())?;
|
||||
let store = Self::from_file(path)?;
|
||||
Ok((store, lock))
|
||||
}
|
||||
|
|
@ -330,17 +310,13 @@ impl TextCredentialStore {
|
|||
/// Get credentials for a given URL and username.
|
||||
///
|
||||
/// The most specific URL prefix match in the same [`Realm`] is returned, if any.
|
||||
pub fn get_credentials(
|
||||
&self,
|
||||
url: &DisplaySafeUrl,
|
||||
username: Option<&str>,
|
||||
) -> Option<&Credentials> {
|
||||
pub fn get_credentials(&self, url: &Url, username: Option<&str>) -> Option<&Credentials> {
|
||||
let request_realm = Realm::from(url);
|
||||
|
||||
// Perform an exact lookup first
|
||||
// TODO(zanieb): Consider adding `DisplaySafeUrlRef` so we can avoid this clone
|
||||
// TODO(zanieb): We could also return early here if we can't normalize to a `Service`
|
||||
if let Ok(url_service) = Service::try_from(url.clone()) {
|
||||
if let Ok(url_service) = Service::try_from(DisplaySafeUrl::from(url.clone())) {
|
||||
if let Some(credential) = self.credentials.get(&(
|
||||
url_service.clone(),
|
||||
Username::from(username.map(str::to_string)),
|
||||
|
|
@ -454,10 +430,10 @@ mod tests {
|
|||
|
||||
let service = Service::from_str("https://example.com").unwrap();
|
||||
store.insert(service.clone(), credentials.clone());
|
||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_some());
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com/path").unwrap();
|
||||
let url = Url::parse("https://example.com/path").unwrap();
|
||||
let retrieved = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(retrieved.username(), Some("user"));
|
||||
assert_eq!(retrieved.password(), Some("pass"));
|
||||
|
|
@ -467,12 +443,12 @@ mod tests {
|
|||
.remove(&service, Username::from(Some("user".to_string())))
|
||||
.is_some()
|
||||
);
|
||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_file_operations() {
|
||||
#[test]
|
||||
fn test_file_operations() {
|
||||
let mut temp_file = NamedTempFile::new().unwrap();
|
||||
writeln!(
|
||||
temp_file,
|
||||
|
|
@ -493,12 +469,12 @@ password = "pass2"
|
|||
|
||||
let store = TextCredentialStore::from_file(temp_file.path()).unwrap();
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
||||
let url = Url::parse("https://example.com/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_some());
|
||||
let url = DisplaySafeUrl::parse("https://test.org/").unwrap();
|
||||
let url = Url::parse("https://test.org/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_some());
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let cred = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(cred.username(), Some("testuser"));
|
||||
assert_eq!(cred.password(), Some("testpass"));
|
||||
|
|
@ -508,7 +484,7 @@ password = "pass2"
|
|||
store
|
||||
.write(
|
||||
temp_output.path(),
|
||||
TextCredentialStore::lock(temp_file.path()).await.unwrap(),
|
||||
TextCredentialStore::lock(temp_file.path()).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
|
@ -534,7 +510,7 @@ password = "pass2"
|
|||
];
|
||||
|
||||
for url_str in matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let url = Url::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(cred.is_some(), "Failed to match URL with prefix: {url_str}");
|
||||
}
|
||||
|
|
@ -547,7 +523,7 @@ password = "pass2"
|
|||
];
|
||||
|
||||
for url_str in non_matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let url = Url::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(cred.is_none(), "Should not match non-prefix URL: {url_str}");
|
||||
}
|
||||
|
|
@ -571,7 +547,7 @@ password = "pass2"
|
|||
];
|
||||
|
||||
for url_str in matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let url = Url::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(
|
||||
cred.is_some(),
|
||||
|
|
@ -587,7 +563,7 @@ password = "pass2"
|
|||
];
|
||||
|
||||
for url_str in non_matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let url = Url::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(
|
||||
cred.is_none(),
|
||||
|
|
@ -611,12 +587,12 @@ password = "pass2"
|
|||
store.insert(specific_service.clone(), specific_cred);
|
||||
|
||||
// Should match the most specific prefix
|
||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
||||
let url = Url::parse("https://example.com/api/v1/users").unwrap();
|
||||
let cred = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(cred.username(), Some("specific"));
|
||||
|
||||
// Should match the general prefix for non-specific paths
|
||||
let url = DisplaySafeUrl::parse("https://example.com/api/v2").unwrap();
|
||||
let url = Url::parse("https://example.com/api/v2").unwrap();
|
||||
let cred = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(cred.username(), Some("general"));
|
||||
}
|
||||
|
|
@ -624,7 +600,7 @@ password = "pass2"
|
|||
#[test]
|
||||
fn test_username_exact_url_match() {
|
||||
let mut store = TextCredentialStore::default();
|
||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
||||
let url = Url::parse("https://example.com").unwrap();
|
||||
let service = Service::from_str("https://example.com").unwrap();
|
||||
let user1_creds = Credentials::basic(Some("user1".to_string()), Some("pass1".to_string()));
|
||||
store.insert(service.clone(), user1_creds.clone());
|
||||
|
|
@ -665,7 +641,7 @@ password = "pass2"
|
|||
store.insert(general_service, general_creds);
|
||||
store.insert(specific_service, specific_creds);
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
||||
let url = Url::parse("https://example.com/api/v1/users").unwrap();
|
||||
|
||||
// Should match specific credentials when username matches
|
||||
let result = store.get_credentials(&url, Some("specific_user"));
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
[package]
|
||||
name = "uv-bench"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.0"
|
||||
description = "uv Micro-benchmarks"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
|
|
@ -22,14 +23,14 @@ name = "uv"
|
|||
path = "benches/uv.rs"
|
||||
harness = false
|
||||
|
||||
[dev-dependencies]
|
||||
[dependencies]
|
||||
uv-cache = { workspace = true }
|
||||
uv-client = { workspace = true }
|
||||
uv-configuration = { workspace = true }
|
||||
uv-dispatch = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
uv-extract = { workspace = true }
|
||||
uv-extract = { workspace = true, optional = true }
|
||||
uv-install-wheel = { workspace = true }
|
||||
uv-pep440 = { workspace = true }
|
||||
uv-pep508 = { workspace = true }
|
||||
|
|
@ -42,7 +43,10 @@ uv-types = { workspace = true }
|
|||
uv-workspace = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
criterion = { version = "4.0.3", default-features = false, package = "codspeed-criterion-compat", features = ["async_tokio"] }
|
||||
codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true }
|
||||
criterion = { version = "0.7.0", default-features = false, features = [
|
||||
"async_tokio",
|
||||
] }
|
||||
jiff = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
|
|
@ -50,4 +54,5 @@ tokio = { workspace = true }
|
|||
ignored = ["uv-extract"]
|
||||
|
||||
[features]
|
||||
codspeed = ["codspeed-criterion-compat"]
|
||||
static = ["uv-extract/static"]
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-bench
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bench).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
use std::hint::black_box;
|
||||
use std::str::FromStr;
|
||||
|
||||
use criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||
use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{BaseClientBuilder, RegistryClientBuilder};
|
||||
use uv_distribution_types::Requirement;
|
||||
|
|
@ -59,10 +59,7 @@ fn setup(manifest: Manifest) -> impl Fn(bool) {
|
|||
.build()
|
||||
.unwrap();
|
||||
|
||||
let cache = Cache::from_path("../../.cache")
|
||||
.init_no_wait()
|
||||
.expect("No cache contention when running benchmarks")
|
||||
.unwrap();
|
||||
let cache = Cache::from_path("../../.cache").init().unwrap();
|
||||
let interpreter = PythonEnvironment::from_root("../../.venv", &cache)
|
||||
.unwrap()
|
||||
.into_interpreter();
|
||||
|
|
@ -134,7 +131,7 @@ mod resolver {
|
|||
);
|
||||
|
||||
static TAGS: LazyLock<Tags> = LazyLock::new(|| {
|
||||
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false, false).unwrap()
|
||||
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false).unwrap()
|
||||
});
|
||||
|
||||
pub(crate) async fn resolve(
|
||||
|
|
|
|||
|
|
@ -1 +1,10 @@
|
|||
pub mod criterion {
|
||||
//! This module re-exports the criterion API but picks the right backend depending on whether
|
||||
//! the benchmarks are built to run locally or with codspeed
|
||||
|
||||
#[cfg(not(feature = "codspeed"))]
|
||||
pub use criterion::*;
|
||||
|
||||
#[cfg(feature = "codspeed")]
|
||||
pub use codspeed_criterion_compat::*;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
[package]
|
||||
name = "uv-bin-install"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
description = "Binary download and installation utilities for uv"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
@ -22,8 +23,6 @@ uv-distribution-filename = { workspace = true }
|
|||
uv-extract = { workspace = true }
|
||||
uv-pep440 = { workspace = true }
|
||||
uv-platform = { workspace = true }
|
||||
uv-redacted = { workspace = true }
|
||||
|
||||
fs-err = { workspace = true, features = ["tokio"] }
|
||||
futures = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
|
|
@ -35,3 +34,4 @@ tokio = { workspace = true }
|
|||
tokio-util = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-bin-install
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bin-install).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -19,12 +19,11 @@ use tracing::debug;
|
|||
use url::Url;
|
||||
use uv_distribution_filename::SourceDistExtension;
|
||||
|
||||
use uv_cache::{Cache, CacheBucket, CacheEntry, Error as CacheError};
|
||||
use uv_cache::{Cache, CacheBucket, CacheEntry};
|
||||
use uv_client::{BaseClient, is_transient_network_error};
|
||||
use uv_extract::{Error as ExtractError, stream};
|
||||
use uv_pep440::Version;
|
||||
use uv_platform::Platform;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
/// Binary tools that can be installed.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
|
|
@ -135,13 +134,10 @@ pub enum Error {
|
|||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Cache(#[from] CacheError),
|
||||
|
||||
#[error("Failed to detect platform")]
|
||||
Platform(#[from] uv_platform::Error),
|
||||
|
||||
#[error("Attempt failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
||||
#[error("Attempt failed after {retries} retries")]
|
||||
RetriedError {
|
||||
#[source]
|
||||
err: Box<Error>,
|
||||
|
|
@ -315,7 +311,7 @@ async fn download_and_unpack(
|
|||
let temp_dir = tempfile::tempdir_in(cache.bucket(CacheBucket::Binaries))?;
|
||||
|
||||
let response = client
|
||||
.for_host(&DisplaySafeUrl::from_url(download_url.clone()))
|
||||
.for_host(&download_url.clone().into())
|
||||
.get(download_url.clone())
|
||||
.send()
|
||||
.await
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
[package]
|
||||
name = "uv-build-backend"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.1.0"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-build-backend
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-backend).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,4 +1,3 @@
|
|||
use itertools::Itertools;
|
||||
mod metadata;
|
||||
mod serde_verbatim;
|
||||
mod settings;
|
||||
|
|
@ -8,10 +7,8 @@ mod wheel;
|
|||
pub use metadata::{PyProjectToml, check_direct_build};
|
||||
pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
||||
pub use source_dist::{build_source_dist, list_source_dist};
|
||||
use uv_warnings::warn_user_once;
|
||||
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
|
@ -32,9 +29,9 @@ use crate::settings::ModuleName;
|
|||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Invalid metadata format in: {}", _0.user_display())]
|
||||
Toml(PathBuf, #[source] toml::de::Error),
|
||||
#[error("Invalid project metadata")]
|
||||
#[error("Invalid pyproject.toml")]
|
||||
Toml(#[from] toml::de::Error),
|
||||
#[error("Invalid pyproject.toml")]
|
||||
Validation(#[from] ValidationError),
|
||||
#[error("Invalid module name: {0}")]
|
||||
InvalidModuleName(String, #[source] IdentifierParseError),
|
||||
|
|
@ -194,60 +191,6 @@ fn check_metadata_directory(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the list of module names without names which would be included twice
|
||||
///
|
||||
/// In normal cases it should do nothing:
|
||||
///
|
||||
/// * `["aaa"] -> ["aaa"]`
|
||||
/// * `["aaa", "bbb"] -> ["aaa", "bbb"]`
|
||||
///
|
||||
/// Duplicate elements are removed:
|
||||
///
|
||||
/// * `["aaa", "aaa"] -> ["aaa"]`
|
||||
/// * `["bbb", "aaa", "bbb"] -> ["aaa", "bbb"]`
|
||||
///
|
||||
/// Names with more specific paths are removed in favour of more general paths:
|
||||
///
|
||||
/// * `["aaa.foo", "aaa"] -> ["aaa"]`
|
||||
/// * `["bbb", "aaa", "bbb.foo", "ccc.foo", "ccc.foo.bar", "aaa"] -> ["aaa", "bbb.foo", "ccc.foo"]`
|
||||
///
|
||||
/// This does not preserve the order of the elements.
|
||||
fn prune_redundant_modules(mut names: Vec<String>) -> Vec<String> {
|
||||
names.sort();
|
||||
let mut pruned = Vec::with_capacity(names.len());
|
||||
for name in names {
|
||||
if let Some(last) = pruned.last() {
|
||||
if name == *last {
|
||||
continue;
|
||||
}
|
||||
// This is a more specific (narrow) module name than what came before
|
||||
if name
|
||||
.strip_prefix(last)
|
||||
.is_some_and(|suffix| suffix.starts_with('.'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
pruned.push(name);
|
||||
}
|
||||
pruned
|
||||
}
|
||||
|
||||
/// Wraps [`prune_redundant_modules`] with a conditional warning when modules are ignored
|
||||
fn prune_redundant_modules_warn(names: &[String], show_warnings: bool) -> Vec<String> {
|
||||
let pruned = prune_redundant_modules(names.to_vec());
|
||||
if show_warnings && names.len() != pruned.len() {
|
||||
let mut pruned: HashSet<_> = pruned.iter().collect();
|
||||
let ignored: Vec<_> = names.iter().filter(|name| !pruned.remove(name)).collect();
|
||||
let s = if ignored.len() == 1 { "" } else { "s" };
|
||||
warn_user_once!(
|
||||
"Ignoring redundant module name{s} in `tool.uv.build-backend.module-name`: `{}`",
|
||||
ignored.into_iter().join("`, `")
|
||||
);
|
||||
}
|
||||
pruned
|
||||
}
|
||||
|
||||
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
||||
/// checking the project layout and names.
|
||||
///
|
||||
|
|
@ -270,7 +213,6 @@ fn find_roots(
|
|||
relative_module_root: &Path,
|
||||
module_name: Option<&ModuleName>,
|
||||
namespace: bool,
|
||||
show_warnings: bool,
|
||||
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
||||
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
||||
// Check that even if a path contains `..`, we only include files below the module root.
|
||||
|
|
@ -289,8 +231,8 @@ fn find_roots(
|
|||
ModuleName::Name(name) => {
|
||||
vec![name.split('.').collect::<PathBuf>()]
|
||||
}
|
||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
||||
.into_iter()
|
||||
ModuleName::Names(names) => names
|
||||
.iter()
|
||||
.map(|name| name.split('.').collect::<PathBuf>())
|
||||
.collect(),
|
||||
}
|
||||
|
|
@ -308,9 +250,9 @@ fn find_roots(
|
|||
let modules_relative = if let Some(module_name) = module_name {
|
||||
match module_name {
|
||||
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
||||
.into_iter()
|
||||
.map(|name| module_path_from_module_name(&src_root, &name))
|
||||
ModuleName::Names(names) => names
|
||||
.iter()
|
||||
.map(|name| module_path_from_module_name(&src_root, name))
|
||||
.collect::<Result<_, _>>()?,
|
||||
}
|
||||
} else {
|
||||
|
|
@ -478,20 +420,19 @@ mod tests {
|
|||
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
||||
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
||||
// latest and remove it since it has the same filename as the indirect wheel.
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION, false)?;
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION)?;
|
||||
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
||||
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
||||
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
||||
fs_err::remove_file(&direct_wheel_path)?;
|
||||
|
||||
// Build a source distribution.
|
||||
let (_name, source_dist_list_files) =
|
||||
list_source_dist(source_root, MOCK_UV_VERSION, false)?;
|
||||
let (_name, source_dist_list_files) = list_source_dist(source_root, MOCK_UV_VERSION)?;
|
||||
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
||||
// normalize the path.
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION, false)?;
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION)?;
|
||||
let source_dist_path = dist.join(source_dist_filename.to_string());
|
||||
let source_dist_contents = sdist_contents(&source_dist_path);
|
||||
|
||||
|
|
@ -505,13 +446,7 @@ mod tests {
|
|||
source_dist_filename.name.as_dist_info_name(),
|
||||
source_dist_filename.version
|
||||
));
|
||||
let wheel_filename = build_wheel(
|
||||
&sdist_top_level_directory,
|
||||
dist,
|
||||
None,
|
||||
MOCK_UV_VERSION,
|
||||
false,
|
||||
)?;
|
||||
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, MOCK_UV_VERSION)?;
|
||||
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
||||
|
||||
// Check that direct and indirect wheels are identical.
|
||||
|
|
@ -599,7 +534,7 @@ mod tests {
|
|||
/// platform-independent deterministic builds.
|
||||
#[test]
|
||||
fn built_by_uv_building() {
|
||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
||||
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||
let src = TempDir::new().unwrap();
|
||||
for dir in [
|
||||
"src",
|
||||
|
|
@ -662,7 +597,7 @@ mod tests {
|
|||
// Check that the source dist is reproducible across platforms.
|
||||
assert_snapshot!(
|
||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
||||
@"bb74bff575b135bb39e5c9bce56349441fb0923bb8857e32a5eaf34ec1843967"
|
||||
@"871d1f859140721b67cbeaca074e7a2740c88c38028d0509eba87d1285f1da9e"
|
||||
);
|
||||
// Check both the files we report and the actual files
|
||||
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
||||
|
|
@ -821,7 +756,7 @@ mod tests {
|
|||
|
||||
// Build a wheel from a source distribution
|
||||
let output_dir = TempDir::new().unwrap();
|
||||
build_source_dist(src.path(), output_dir.path(), "0.5.15", false).unwrap();
|
||||
build_source_dist(src.path(), output_dir.path(), "0.5.15").unwrap();
|
||||
let sdist_tree = TempDir::new().unwrap();
|
||||
let source_dist_path = output_dir.path().join("pep_pep639_license-1.0.0.tar.gz");
|
||||
let sdist_reader = BufReader::new(File::open(&source_dist_path).unwrap());
|
||||
|
|
@ -832,7 +767,6 @@ mod tests {
|
|||
output_dir.path(),
|
||||
None,
|
||||
"0.5.15",
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
let wheel = output_dir
|
||||
|
|
@ -897,7 +831,6 @@ mod tests {
|
|||
output_dir.path(),
|
||||
Some(&metadata_dir.path().join(&dist_info_dir)),
|
||||
"0.5.15",
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
let wheel = output_dir
|
||||
|
|
@ -1481,114 +1414,4 @@ mod tests {
|
|||
simple_namespace_part-1.0.0.dist-info/WHEEL
|
||||
");
|
||||
}
|
||||
|
||||
/// `prune_redundant_modules` should remove modules which are already
|
||||
/// included (either directly or via their parent)
|
||||
#[test]
|
||||
fn test_prune_redundant_modules() {
|
||||
fn check(input: &[&str], expect: &[&str]) {
|
||||
let input = input.iter().map(|s| (*s).to_string()).collect();
|
||||
let expect: Vec<_> = expect.iter().map(|s| (*s).to_string()).collect();
|
||||
assert_eq!(prune_redundant_modules(input), expect);
|
||||
}
|
||||
|
||||
// Basic cases
|
||||
check(&[], &[]);
|
||||
check(&["foo"], &["foo"]);
|
||||
check(&["foo", "bar"], &["bar", "foo"]);
|
||||
|
||||
// Deshadowing
|
||||
check(&["foo", "foo.bar"], &["foo"]);
|
||||
check(&["foo.bar", "foo"], &["foo"]);
|
||||
check(
|
||||
&["foo.bar.a", "foo.bar.b", "foo.bar", "foo", "foo.bar.a.c"],
|
||||
&["foo"],
|
||||
);
|
||||
check(
|
||||
&["bar.one", "bar.two", "baz", "bar", "baz.one"],
|
||||
&["bar", "baz"],
|
||||
);
|
||||
|
||||
// Potential false positives
|
||||
check(&["foo", "foobar"], &["foo", "foobar"]);
|
||||
check(
|
||||
&["foo", "foobar", "foo.bar", "foobar.baz"],
|
||||
&["foo", "foobar"],
|
||||
);
|
||||
check(&["foo.bar", "foo.baz"], &["foo.bar", "foo.baz"]);
|
||||
check(&["foo", "foo", "foo.bar", "foo.bar"], &["foo"]);
|
||||
|
||||
// Everything
|
||||
check(
|
||||
&[
|
||||
"foo.inner",
|
||||
"foo.inner.deeper",
|
||||
"foo",
|
||||
"bar",
|
||||
"bar.sub",
|
||||
"bar.sub.deep",
|
||||
"foobar",
|
||||
"baz.baz.bar",
|
||||
"baz.baz",
|
||||
"qux",
|
||||
],
|
||||
&["bar", "baz.baz", "foo", "foobar", "qux"],
|
||||
);
|
||||
}
|
||||
|
||||
/// A package with duplicate module names.
|
||||
#[test]
|
||||
fn duplicate_module_names() {
|
||||
let src = TempDir::new().unwrap();
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "duplicate"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = ["foo", "foo", "bar.baz", "bar.baz.submodule"]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
fs_err::create_dir_all(src.path().join("src").join("foo")).unwrap();
|
||||
File::create(src.path().join("src").join("foo").join("__init__.py")).unwrap();
|
||||
fs_err::create_dir_all(src.path().join("src").join("bar").join("baz")).unwrap();
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("bar")
|
||||
.join("baz")
|
||||
.join("__init__.py"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build = build(src.path(), dist.path()).unwrap();
|
||||
assert_snapshot!(build.source_dist_contents.join("\n"), @r"
|
||||
duplicate-1.0.0/
|
||||
duplicate-1.0.0/PKG-INFO
|
||||
duplicate-1.0.0/pyproject.toml
|
||||
duplicate-1.0.0/src
|
||||
duplicate-1.0.0/src/bar
|
||||
duplicate-1.0.0/src/bar/baz
|
||||
duplicate-1.0.0/src/bar/baz/__init__.py
|
||||
duplicate-1.0.0/src/foo
|
||||
duplicate-1.0.0/src/foo/__init__.py
|
||||
");
|
||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
||||
bar/
|
||||
bar/baz/
|
||||
bar/baz/__init__.py
|
||||
duplicate-1.0.0.dist-info/
|
||||
duplicate-1.0.0.dist-info/METADATA
|
||||
duplicate-1.0.0.dist-info/RECORD
|
||||
duplicate-1.0.0.dist-info/WHEEL
|
||||
foo/
|
||||
foo/__init__.py
|
||||
");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,10 +3,10 @@ use std::ffi::OsStr;
|
|||
use std::fmt::Display;
|
||||
use std::fmt::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::{self, FromStr};
|
||||
use std::str::FromStr;
|
||||
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use serde::Deserialize;
|
||||
use tracing::{debug, trace, warn};
|
||||
use version_ranges::Ranges;
|
||||
use walkdir::WalkDir;
|
||||
|
|
@ -60,10 +60,6 @@ pub enum ValidationError {
|
|||
ReservedGuiScripts,
|
||||
#[error("`project.license` is not a valid SPDX expression: {0}")]
|
||||
InvalidSpdx(String, #[source] spdx::error::ParseError),
|
||||
#[error("`{field}` glob `{glob}` did not match any files")]
|
||||
LicenseGlobNoMatches { field: String, glob: String },
|
||||
#[error("License file `{}` must be UTF-8 encoded", _0)]
|
||||
LicenseFileNotUtf8(String),
|
||||
}
|
||||
|
||||
/// Check if the build backend is matching the currently running uv version.
|
||||
|
|
@ -109,26 +105,6 @@ pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
/// A package name as provided in a `pyproject.toml`.
|
||||
#[derive(Debug, Clone)]
|
||||
struct VerbatimPackageName {
|
||||
/// The package name as given in the `pyproject.toml`.
|
||||
given: String,
|
||||
/// The normalized package name.
|
||||
normalized: PackageName,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for VerbatimPackageName {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let given = String::deserialize(deserializer)?;
|
||||
let normalized = PackageName::from_str(&given).map_err(serde::de::Error::custom)?;
|
||||
Ok(Self { given, normalized })
|
||||
}
|
||||
}
|
||||
|
||||
/// A `pyproject.toml` as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(
|
||||
|
|
@ -147,18 +123,15 @@ pub struct PyProjectToml {
|
|||
|
||||
impl PyProjectToml {
|
||||
pub(crate) fn name(&self) -> &PackageName {
|
||||
&self.project.name.normalized
|
||||
&self.project.name
|
||||
}
|
||||
|
||||
pub(crate) fn version(&self) -> &Version {
|
||||
&self.project.version
|
||||
}
|
||||
|
||||
pub(crate) fn parse(path: &Path) -> Result<Self, Error> {
|
||||
let contents = fs_err::read_to_string(path)?;
|
||||
let pyproject_toml =
|
||||
toml::from_str(&contents).map_err(|err| Error::Toml(path.to_path_buf(), err))?;
|
||||
Ok(pyproject_toml)
|
||||
pub(crate) fn parse(contents: &str) -> Result<Self, Error> {
|
||||
Ok(toml::from_str(contents)?)
|
||||
}
|
||||
|
||||
pub(crate) fn readme(&self) -> Option<&Readme> {
|
||||
|
|
@ -346,7 +319,99 @@ impl PyProjectToml {
|
|||
"2.3"
|
||||
};
|
||||
|
||||
let (license, license_expression, license_files) = self.license_metadata(root)?;
|
||||
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
||||
let (license, license_expression, license_files) =
|
||||
if let Some(license_globs) = &self.project.license_files {
|
||||
let license_expression = match &self.project.license {
|
||||
None => None,
|
||||
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
||||
Some(License::Text { .. } | License::File { .. }) => {
|
||||
return Err(ValidationError::MixedLicenseGenerations.into());
|
||||
}
|
||||
};
|
||||
|
||||
let mut license_files = Vec::new();
|
||||
let mut license_globs_parsed = Vec::new();
|
||||
for license_glob in license_globs {
|
||||
let pep639_glob =
|
||||
PortableGlobParser::Pep639
|
||||
.parse(license_glob)
|
||||
.map_err(|err| Error::PortableGlob {
|
||||
field: license_glob.to_string(),
|
||||
source: err,
|
||||
})?;
|
||||
license_globs_parsed.push(pep639_glob);
|
||||
}
|
||||
let license_globs =
|
||||
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
||||
Error::GlobSetTooLarge {
|
||||
field: "tool.uv.build-backend.source-include".to_string(),
|
||||
source: err,
|
||||
}
|
||||
})?;
|
||||
|
||||
for entry in WalkDir::new(root)
|
||||
.sort_by_file_name()
|
||||
.into_iter()
|
||||
.filter_entry(|entry| {
|
||||
license_globs.match_directory(
|
||||
entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root"),
|
||||
)
|
||||
})
|
||||
{
|
||||
let entry = entry.map_err(|err| Error::WalkDir {
|
||||
root: root.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
let relative = entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root");
|
||||
if !license_globs.match_path(relative) {
|
||||
trace!("Not a license files match: {}", relative.user_display());
|
||||
continue;
|
||||
}
|
||||
if !entry.file_type().is_file() {
|
||||
trace!(
|
||||
"Not a file in license files match: {}",
|
||||
relative.user_display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
error_on_venv(entry.file_name(), entry.path())?;
|
||||
|
||||
debug!("License files match: {}", relative.user_display());
|
||||
license_files.push(relative.portable_display().to_string());
|
||||
}
|
||||
|
||||
// The glob order may be unstable
|
||||
license_files.sort();
|
||||
|
||||
(None, license_expression, license_files)
|
||||
} else {
|
||||
match &self.project.license {
|
||||
None => (None, None, Vec::new()),
|
||||
Some(License::Spdx(license_expression)) => {
|
||||
(None, Some(license_expression.clone()), Vec::new())
|
||||
}
|
||||
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
||||
Some(License::File { file }) => {
|
||||
let text = fs_err::read_to_string(root.join(file))?;
|
||||
(Some(text), None, Vec::new())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Check that the license expression is a valid SPDX identifier.
|
||||
if let Some(license_expression) = &license_expression {
|
||||
if let Err(err) = spdx::Expression::parse(license_expression) {
|
||||
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(konsti): https://peps.python.org/pep-0753/#label-normalization (Draft)
|
||||
let project_urls = self
|
||||
|
|
@ -391,7 +456,7 @@ impl PyProjectToml {
|
|||
|
||||
Ok(Metadata23 {
|
||||
metadata_version: metadata_version.to_string(),
|
||||
name: self.project.name.given.clone(),
|
||||
name: self.project.name.to_string(),
|
||||
version: self.project.version.to_string(),
|
||||
// Not supported.
|
||||
platforms: vec![],
|
||||
|
|
@ -433,156 +498,6 @@ impl PyProjectToml {
|
|||
})
|
||||
}
|
||||
|
||||
/// Parse and validate the old (PEP 621) and new (PEP 639) license files.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn license_metadata(
|
||||
&self,
|
||||
root: &Path,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<String>), Error> {
|
||||
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
||||
let (license, license_expression, license_files) = if let Some(license_globs) =
|
||||
&self.project.license_files
|
||||
{
|
||||
let license_expression = match &self.project.license {
|
||||
None => None,
|
||||
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
||||
Some(License::Text { .. } | License::File { .. }) => {
|
||||
return Err(ValidationError::MixedLicenseGenerations.into());
|
||||
}
|
||||
};
|
||||
|
||||
let mut license_files = Vec::new();
|
||||
let mut license_globs_parsed = Vec::with_capacity(license_globs.len());
|
||||
let mut license_glob_matchers = Vec::with_capacity(license_globs.len());
|
||||
|
||||
for license_glob in license_globs {
|
||||
let pep639_glob =
|
||||
PortableGlobParser::Pep639
|
||||
.parse(license_glob)
|
||||
.map_err(|err| Error::PortableGlob {
|
||||
field: license_glob.to_owned(),
|
||||
source: err,
|
||||
})?;
|
||||
license_glob_matchers.push(pep639_glob.compile_matcher());
|
||||
license_globs_parsed.push(pep639_glob);
|
||||
}
|
||||
|
||||
// Track whether each user-specified glob matched so we can flag the unmatched ones.
|
||||
let mut license_globs_matched = vec![false; license_globs_parsed.len()];
|
||||
|
||||
let license_globs =
|
||||
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
||||
Error::GlobSetTooLarge {
|
||||
field: "project.license-files".to_string(),
|
||||
source: err,
|
||||
}
|
||||
})?;
|
||||
|
||||
for entry in WalkDir::new(root)
|
||||
.sort_by_file_name()
|
||||
.into_iter()
|
||||
.filter_entry(|entry| {
|
||||
license_globs.match_directory(
|
||||
entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root"),
|
||||
)
|
||||
})
|
||||
{
|
||||
let entry = entry.map_err(|err| Error::WalkDir {
|
||||
root: root.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
|
||||
let relative = entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root");
|
||||
|
||||
if !license_globs.match_path(relative) {
|
||||
trace!("Not a license files match: {}", relative.user_display());
|
||||
continue;
|
||||
}
|
||||
|
||||
let file_type = entry.file_type();
|
||||
|
||||
if !(file_type.is_file() || file_type.is_symlink()) {
|
||||
trace!(
|
||||
"Not a file or symlink in license files match: {}",
|
||||
relative.user_display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
error_on_venv(entry.file_name(), entry.path())?;
|
||||
|
||||
debug!("License files match: {}", relative.user_display());
|
||||
|
||||
for (matched, matcher) in license_globs_matched
|
||||
.iter_mut()
|
||||
.zip(license_glob_matchers.iter())
|
||||
{
|
||||
if *matched {
|
||||
continue;
|
||||
}
|
||||
|
||||
if matcher.is_match(relative) {
|
||||
*matched = true;
|
||||
}
|
||||
}
|
||||
|
||||
license_files.push(relative.portable_display().to_string());
|
||||
}
|
||||
|
||||
if let Some((pattern, _)) = license_globs_parsed
|
||||
.into_iter()
|
||||
.zip(license_globs_matched)
|
||||
.find(|(_, matched)| !matched)
|
||||
{
|
||||
return Err(ValidationError::LicenseGlobNoMatches {
|
||||
field: "project.license-files".to_string(),
|
||||
glob: pattern.to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
for license_file in &license_files {
|
||||
let file_path = root.join(license_file);
|
||||
let bytes = fs_err::read(&file_path)?;
|
||||
if str::from_utf8(&bytes).is_err() {
|
||||
return Err(ValidationError::LicenseFileNotUtf8(license_file.clone()).into());
|
||||
}
|
||||
}
|
||||
|
||||
// The glob order may be unstable
|
||||
license_files.sort();
|
||||
|
||||
(None, license_expression, license_files)
|
||||
} else {
|
||||
match &self.project.license {
|
||||
None => (None, None, Vec::new()),
|
||||
Some(License::Spdx(license_expression)) => {
|
||||
(None, Some(license_expression.clone()), Vec::new())
|
||||
}
|
||||
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
||||
Some(License::File { file }) => {
|
||||
let text = fs_err::read_to_string(root.join(file))?;
|
||||
(Some(text), None, Vec::new())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Check that the license expression is a valid SPDX identifier.
|
||||
if let Some(license_expression) = &license_expression {
|
||||
if let Err(err) = spdx::Expression::parse(license_expression) {
|
||||
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok((license, license_expression, license_files))
|
||||
}
|
||||
|
||||
/// Validate and convert the entrypoints in `pyproject.toml`, including console and GUI scripts,
|
||||
/// to an `entry_points.txt`.
|
||||
///
|
||||
|
|
@ -664,7 +579,7 @@ impl PyProjectToml {
|
|||
#[serde(rename_all = "kebab-case")]
|
||||
struct Project {
|
||||
/// The name of the project.
|
||||
name: VerbatimPackageName,
|
||||
name: PackageName,
|
||||
/// The version of the project.
|
||||
version: Version,
|
||||
/// The summary description of the project in one line.
|
||||
|
|
@ -941,28 +856,6 @@ mod tests {
|
|||
formatted
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uppercase_package_name() {
|
||||
let contents = r#"
|
||||
[project]
|
||||
name = "Hello-World"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#;
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||
Metadata-Version: 2.3
|
||||
Name: Hello-World
|
||||
Version: 0.1.0
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
|
@ -1037,7 +930,7 @@ mod tests {
|
|||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
|
|
@ -1131,7 +1024,7 @@ mod tests {
|
|||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||
|
|
@ -1223,7 +1116,7 @@ mod tests {
|
|||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
|
|
@ -1284,7 +1177,7 @@ mod tests {
|
|||
#[test]
|
||||
fn build_system_valid() {
|
||||
let contents = extend_project("");
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(&contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@""
|
||||
|
|
@ -1302,7 +1195,7 @@ mod tests {
|
|||
requires = ["uv_build"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@r###"`build_system.requires = ["uv_build"]` is missing an upper bound on the `uv_build` version such as `<0.5`. Without bounding the `uv_build` version, the source distribution will break when a future, breaking version of `uv_build` is released."###
|
||||
|
|
@ -1320,7 +1213,7 @@ mod tests {
|
|||
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||
|
|
@ -1338,7 +1231,7 @@ mod tests {
|
|||
requires = ["setuptools"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||
|
|
@ -1356,7 +1249,7 @@ mod tests {
|
|||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@r###"The value for `build_system.build-backend` should be `"uv_build"`, not `"setuptools"`"###
|
||||
|
|
@ -1367,7 +1260,7 @@ mod tests {
|
|||
fn minimal() {
|
||||
let contents = extend_project("");
|
||||
|
||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
|
|
@ -1386,14 +1279,15 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents).unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r#"
|
||||
TOML parse error at line 4, column 10
|
||||
let err = PyProjectToml::parse(&contents).unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: TOML parse error at line 4, column 10
|
||||
|
|
||||
4 | readme = { path = "Readme.md" }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
data did not match any variant of untagged enum Readme
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1403,7 +1297,7 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
|
|
@ -1425,14 +1319,14 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.description` must be a single line
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1443,14 +1337,14 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1459,7 +1353,7 @@ mod tests {
|
|||
license = "MIT OR Apache-2.0"
|
||||
"#
|
||||
});
|
||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
|
|
@ -1477,13 +1371,13 @@ mod tests {
|
|||
license = "MIT XOR Apache-2"
|
||||
"#
|
||||
});
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// TODO(konsti): We mess up the indentation in the error.
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.license` is not a valid SPDX expression: MIT XOR Apache-2
|
||||
Caused by: MIT XOR Apache-2
|
||||
^^^ unknown term
|
||||
|
|
@ -1497,18 +1391,18 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: Dynamic metadata is not supported
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
fn script_error(contents: &str) -> String {
|
||||
let err = toml::from_str::<PyProjectToml>(contents)
|
||||
let err = PyProjectToml::parse(contents)
|
||||
.unwrap()
|
||||
.to_entry_points()
|
||||
.unwrap_err();
|
||||
|
|
|
|||
|
|
@ -70,9 +70,6 @@ pub struct BuildBackendSettings {
|
|||
pub default_excludes: bool,
|
||||
|
||||
/// Glob expressions which files and directories to exclude from the source distribution.
|
||||
///
|
||||
/// These exclusions are also applied to wheels to ensure that a wheel built from a source tree
|
||||
/// is consistent with a wheel built from a source distribution.
|
||||
#[option(
|
||||
default = r#"[]"#,
|
||||
value_type = "list[str]",
|
||||
|
|
|
|||
|
|
@ -24,9 +24,9 @@ pub fn build_source_dist(
|
|||
source_tree: &Path,
|
||||
source_dist_directory: &Path,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<SourceDistFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
let filename = SourceDistFilename {
|
||||
name: pyproject_toml.name().clone(),
|
||||
version: pyproject_toml.version().clone(),
|
||||
|
|
@ -34,7 +34,7 @@ pub fn build_source_dist(
|
|||
};
|
||||
let source_dist_path = source_dist_directory.join(filename.to_string());
|
||||
let writer = TarGzWriter::new(&source_dist_path)?;
|
||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
||||
write_source_dist(source_tree, writer, uv_version)?;
|
||||
Ok(filename)
|
||||
}
|
||||
|
||||
|
|
@ -42,9 +42,9 @@ pub fn build_source_dist(
|
|||
pub fn list_source_dist(
|
||||
source_tree: &Path,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<(SourceDistFilename, FileList), Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
let filename = SourceDistFilename {
|
||||
name: pyproject_toml.name().clone(),
|
||||
version: pyproject_toml.version().clone(),
|
||||
|
|
@ -52,7 +52,7 @@ pub fn list_source_dist(
|
|||
};
|
||||
let mut files = FileList::new();
|
||||
let writer = ListWriter::new(&mut files);
|
||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
||||
write_source_dist(source_tree, writer, uv_version)?;
|
||||
Ok((filename, files))
|
||||
}
|
||||
|
||||
|
|
@ -61,7 +61,6 @@ fn source_dist_matcher(
|
|||
source_tree: &Path,
|
||||
pyproject_toml: &PyProjectToml,
|
||||
settings: BuildBackendSettings,
|
||||
show_warnings: bool,
|
||||
) -> Result<(GlobDirFilter, GlobSet), Error> {
|
||||
// File and directories to include in the source directory
|
||||
let mut include_globs = Vec::new();
|
||||
|
|
@ -76,7 +75,6 @@ fn source_dist_matcher(
|
|||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
show_warnings,
|
||||
)?;
|
||||
for module_relative in modules_relative {
|
||||
// The wheel must not include any files included by the source distribution (at least until we
|
||||
|
|
@ -184,9 +182,9 @@ fn write_source_dist(
|
|||
source_tree: &Path,
|
||||
mut writer: impl DirectoryWriter,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<SourceDistFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -220,7 +218,7 @@ fn write_source_dist(
|
|||
)?;
|
||||
|
||||
let (include_matcher, exclude_matcher) =
|
||||
source_dist_matcher(source_tree, &pyproject_toml, settings, show_warnings)?;
|
||||
source_dist_matcher(source_tree, &pyproject_toml, settings)?;
|
||||
|
||||
let mut files_visited = 0;
|
||||
for entry in WalkDir::new(source_tree)
|
||||
|
|
@ -299,10 +297,6 @@ impl TarGzWriter {
|
|||
impl DirectoryWriter for TarGzWriter {
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||
let mut header = Header::new_gnu();
|
||||
// Work around bug in Python's std tar module
|
||||
// https://github.com/python/cpython/issues/141707
|
||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
||||
header.set_entry_type(EntryType::Regular);
|
||||
header.set_size(bytes.len() as u64);
|
||||
// Reasonable default to avoid 0o000 permissions, the user's umask will be applied on
|
||||
// unpacking.
|
||||
|
|
@ -316,10 +310,6 @@ impl DirectoryWriter for TarGzWriter {
|
|||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
||||
let metadata = fs_err::metadata(file)?;
|
||||
let mut header = Header::new_gnu();
|
||||
// Work around bug in Python's std tar module
|
||||
// https://github.com/python/cpython/issues/141707
|
||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
||||
header.set_entry_type(EntryType::Regular);
|
||||
// Preserve the executable bit, especially for scripts
|
||||
#[cfg(unix)]
|
||||
let executable_bit = {
|
||||
|
|
|
|||
|
|
@ -29,9 +29,9 @@ pub fn build_wheel(
|
|||
wheel_dir: &Path,
|
||||
metadata_directory: Option<&Path>,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<WheelFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -58,7 +58,6 @@ pub fn build_wheel(
|
|||
&filename,
|
||||
uv_version,
|
||||
wheel_writer,
|
||||
show_warnings,
|
||||
)?;
|
||||
|
||||
Ok(filename)
|
||||
|
|
@ -68,9 +67,9 @@ pub fn build_wheel(
|
|||
pub fn list_wheel(
|
||||
source_tree: &Path,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<(WheelFilename, FileList), Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -88,14 +87,7 @@ pub fn list_wheel(
|
|||
|
||||
let mut files = FileList::new();
|
||||
let writer = ListWriter::new(&mut files);
|
||||
write_wheel(
|
||||
source_tree,
|
||||
&pyproject_toml,
|
||||
&filename,
|
||||
uv_version,
|
||||
writer,
|
||||
show_warnings,
|
||||
)?;
|
||||
write_wheel(source_tree, &pyproject_toml, &filename, uv_version, writer)?;
|
||||
Ok((filename, files))
|
||||
}
|
||||
|
||||
|
|
@ -105,7 +97,6 @@ fn write_wheel(
|
|||
filename: &WheelFilename,
|
||||
uv_version: &str,
|
||||
mut wheel_writer: impl DirectoryWriter,
|
||||
show_warnings: bool,
|
||||
) -> Result<(), Error> {
|
||||
let settings = pyproject_toml
|
||||
.settings()
|
||||
|
|
@ -141,7 +132,6 @@ fn write_wheel(
|
|||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
show_warnings,
|
||||
)?;
|
||||
|
||||
let mut files_visited = 0;
|
||||
|
|
@ -269,9 +259,9 @@ pub fn build_editable(
|
|||
wheel_dir: &Path,
|
||||
metadata_directory: Option<&Path>,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<WheelFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -305,7 +295,6 @@ pub fn build_editable(
|
|||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
show_warnings,
|
||||
)?;
|
||||
|
||||
wheel_writer.write_bytes(
|
||||
|
|
@ -332,7 +321,8 @@ pub fn metadata(
|
|||
metadata_directory: &Path,
|
||||
uv_version: &str,
|
||||
) -> Result<String, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -840,7 +830,7 @@ mod test {
|
|||
#[test]
|
||||
fn test_prepare_metadata() {
|
||||
let metadata_dir = TempDir::new().unwrap();
|
||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
||||
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||
metadata(built_by_uv, metadata_dir.path(), "1.0.0+test").unwrap();
|
||||
|
||||
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-build-frontend"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Build wheels from source distributions"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -16,7 +17,6 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-auth = { workspace = true }
|
||||
uv-cache-key = { workspace = true }
|
||||
uv-configuration = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-build-frontend
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-frontend).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -28,7 +28,7 @@ use tokio::io::AsyncBufReadExt;
|
|||
use tokio::process::Command;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||
use uv_auth::CredentialsCache;
|
||||
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
|
||||
use uv_distribution::BuildRequires;
|
||||
|
|
@ -36,7 +36,7 @@ use uv_distribution_types::{
|
|||
ConfigSettings, ExtraBuildRequirement, ExtraBuildRequires, IndexLocations, Requirement,
|
||||
Resolution,
|
||||
};
|
||||
use uv_fs::{LockedFile, LockedFileMode};
|
||||
use uv_fs::LockedFile;
|
||||
use uv_fs::{PythonExt, Simplified};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
|
|
@ -292,7 +292,6 @@ impl SourceBuild {
|
|||
mut environment_variables: FxHashMap<OsString, OsString>,
|
||||
level: BuildOutput,
|
||||
concurrent_builds: usize,
|
||||
credentials_cache: &CredentialsCache,
|
||||
preview: Preview,
|
||||
) -> Result<Self, Error> {
|
||||
let temp_dir = build_context.cache().venv_dir()?;
|
||||
|
|
@ -303,6 +302,7 @@ impl SourceBuild {
|
|||
source.to_path_buf()
|
||||
};
|
||||
|
||||
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
|
||||
// Check if we have a PEP 517 build backend.
|
||||
let (pep517_backend, project) = Self::extract_pep517_backend(
|
||||
&source_tree,
|
||||
|
|
@ -311,7 +311,7 @@ impl SourceBuild {
|
|||
locations,
|
||||
source_strategy,
|
||||
workspace_cache,
|
||||
credentials_cache,
|
||||
&default_backend,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| *err)?;
|
||||
|
|
@ -383,6 +383,7 @@ impl SourceBuild {
|
|||
let resolved_requirements = Self::get_resolved_requirements(
|
||||
build_context,
|
||||
source_build_context,
|
||||
&default_backend,
|
||||
&pep517_backend,
|
||||
extra_build_dependencies,
|
||||
build_stack,
|
||||
|
|
@ -454,7 +455,6 @@ impl SourceBuild {
|
|||
&environment_variables,
|
||||
&modified_path,
|
||||
&temp_dir,
|
||||
credentials_cache,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
|
@ -493,16 +493,12 @@ impl SourceBuild {
|
|||
"uv-setuptools-{}.lock",
|
||||
cache_digest(&canonical_source_path)
|
||||
));
|
||||
source_tree_lock = LockedFile::acquire(
|
||||
lock_path,
|
||||
LockedFileMode::Exclusive,
|
||||
self.source_tree.to_string_lossy(),
|
||||
)
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire build lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire build lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Ok(source_tree_lock)
|
||||
}
|
||||
|
|
@ -510,12 +506,13 @@ impl SourceBuild {
|
|||
async fn get_resolved_requirements(
|
||||
build_context: &impl BuildContext,
|
||||
source_build_context: SourceBuildContext,
|
||||
default_backend: &Pep517Backend,
|
||||
pep517_backend: &Pep517Backend,
|
||||
extra_build_dependencies: Vec<Requirement>,
|
||||
build_stack: &BuildStack,
|
||||
) -> Result<Resolution, Error> {
|
||||
Ok(
|
||||
if pep517_backend.requirements == DEFAULT_BACKEND.requirements
|
||||
if pep517_backend.requirements == default_backend.requirements
|
||||
&& extra_build_dependencies.is_empty()
|
||||
{
|
||||
let mut resolution = source_build_context.default_resolution.lock().await;
|
||||
|
|
@ -523,7 +520,7 @@ impl SourceBuild {
|
|||
resolved_requirements.clone()
|
||||
} else {
|
||||
let resolved_requirements = build_context
|
||||
.resolve(&DEFAULT_BACKEND.requirements, build_stack)
|
||||
.resolve(&default_backend.requirements, build_stack)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
Error::RequirementsResolve("`setup.py` build", err.into())
|
||||
|
|
@ -563,7 +560,7 @@ impl SourceBuild {
|
|||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
workspace_cache: &WorkspaceCache,
|
||||
credentials_cache: &CredentialsCache,
|
||||
default_backend: &Pep517Backend,
|
||||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||
Ok(toml) => {
|
||||
|
|
@ -592,7 +589,6 @@ impl SourceBuild {
|
|||
locations,
|
||||
source_strategy,
|
||||
workspace_cache,
|
||||
credentials_cache,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
|
|
@ -662,7 +658,7 @@ impl SourceBuild {
|
|||
}
|
||||
}
|
||||
|
||||
DEFAULT_BACKEND.clone()
|
||||
default_backend.clone()
|
||||
};
|
||||
Ok((backend, pyproject_toml.project))
|
||||
}
|
||||
|
|
@ -678,7 +674,7 @@ impl SourceBuild {
|
|||
// the default backend, to match `build`. `pip` uses `setup.py` directly in this
|
||||
// case, but plans to make PEP 517 builds the default in the future.
|
||||
// See: https://github.com/pypa/pip/issues/9175.
|
||||
Ok((DEFAULT_BACKEND.clone(), None))
|
||||
Ok((default_backend.clone(), None))
|
||||
}
|
||||
Err(err) => Err(Box::new(err.into())),
|
||||
}
|
||||
|
|
@ -965,7 +961,6 @@ async fn create_pep517_build_environment(
|
|||
environment_variables: &FxHashMap<OsString, OsString>,
|
||||
modified_path: &OsString,
|
||||
temp_dir: &TempDir,
|
||||
credentials_cache: &CredentialsCache,
|
||||
) -> Result<(), Error> {
|
||||
// Write the hook output to a file so that we can read it back reliably.
|
||||
let outfile = temp_dir
|
||||
|
|
@ -1060,7 +1055,6 @@ async fn create_pep517_build_environment(
|
|||
locations,
|
||||
source_strategy,
|
||||
workspace_cache,
|
||||
credentials_cache,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
[package]
|
||||
name = "uv-build"
|
||||
version = "0.9.18"
|
||||
description = "A Python build backend"
|
||||
version = "0.8.21"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "uv-build"
|
||||
version = "0.9.18"
|
||||
version = "0.8.21"
|
||||
description = "The uv build backend"
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ def main():
|
|||
"Use `uv build` or another build frontend instead.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
if "--help" in sys.argv or "-h" in sys.argv:
|
||||
if "--help" in sys.argv:
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
|
|
|||
|
|
@ -44,7 +44,6 @@ fn main() -> Result<()> {
|
|||
&env::current_dir()?,
|
||||
&sdist_directory,
|
||||
uv_version::version(),
|
||||
false,
|
||||
)?;
|
||||
// Tell the build frontend about the name of the artifact we built
|
||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||
|
|
@ -57,7 +56,6 @@ fn main() -> Result<()> {
|
|||
&wheel_directory,
|
||||
metadata_directory.as_deref(),
|
||||
uv_version::version(),
|
||||
false,
|
||||
)?;
|
||||
// Tell the build frontend about the name of the artifact we built
|
||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||
|
|
@ -70,7 +68,6 @@ fn main() -> Result<()> {
|
|||
&wheel_directory,
|
||||
metadata_directory.as_deref(),
|
||||
uv_version::version(),
|
||||
false,
|
||||
)?;
|
||||
// Tell the build frontend about the name of the artifact we built
|
||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
[package]
|
||||
name = "uv-cache-info"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -16,8 +16,6 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-fs = { workspace = true }
|
||||
|
||||
fs-err = { workspace = true }
|
||||
globwalk = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cache-info
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-info).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,12 +1,11 @@
|
|||
use std::borrow::Cow;
|
||||
use std::cmp::max;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use serde::Deserialize;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use uv_fs::Simplified;
|
||||
|
||||
use crate::git_info::{Commit, Tags};
|
||||
use crate::glob::cluster_globs;
|
||||
use crate::timestamp::Timestamp;
|
||||
|
|
@ -64,7 +63,7 @@ impl CacheInfo {
|
|||
pub fn from_directory(directory: &Path) -> Result<Self, CacheInfoError> {
|
||||
let mut commit = None;
|
||||
let mut tags = None;
|
||||
let mut last_changed: Option<(PathBuf, Timestamp)> = None;
|
||||
let mut timestamp = None;
|
||||
let mut directories = BTreeMap::new();
|
||||
let mut env = BTreeMap::new();
|
||||
|
||||
|
|
@ -129,12 +128,7 @@ impl CacheInfo {
|
|||
);
|
||||
continue;
|
||||
}
|
||||
let timestamp = Timestamp::from_metadata(&metadata);
|
||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
||||
}) {
|
||||
last_changed = Some((path, timestamp));
|
||||
}
|
||||
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||
}
|
||||
CacheKey::Directory { dir } => {
|
||||
// Treat the path as a directory.
|
||||
|
|
@ -264,25 +258,14 @@ impl CacheInfo {
|
|||
}
|
||||
continue;
|
||||
}
|
||||
let timestamp = Timestamp::from_metadata(&metadata);
|
||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
||||
}) {
|
||||
last_changed = Some((entry.into_path(), timestamp));
|
||||
}
|
||||
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let timestamp = if let Some((path, timestamp)) = last_changed {
|
||||
debug!(
|
||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}. Most recently modified: {}",
|
||||
path.user_display()
|
||||
);
|
||||
Some(timestamp)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
debug!(
|
||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}"
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
timestamp,
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-cache-key"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Generic functionality for caching paths, URLs, and other resources across platforms."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cache-key
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-key).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -4,7 +4,7 @@ use std::hash::{Hash, Hasher};
|
|||
use std::ops::Deref;
|
||||
|
||||
use url::Url;
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
||||
|
||||
|
|
@ -98,7 +98,7 @@ impl CanonicalUrl {
|
|||
Self(url)
|
||||
}
|
||||
|
||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
||||
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||
}
|
||||
}
|
||||
|
|
@ -139,18 +139,8 @@ impl std::fmt::Display for CanonicalUrl {
|
|||
/// `https://github.com/pypa/package.git#subdirectory=pkg_b` would map to different
|
||||
/// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same
|
||||
/// resource.
|
||||
///
|
||||
/// The additional information it holds should only be used to discriminate between
|
||||
/// sources that hold the exact same commit in their canonical representation,
|
||||
/// but may differ in the contents such as when Git LFS is enabled.
|
||||
///
|
||||
/// A different cache key will be computed when Git LFS is enabled.
|
||||
/// When Git LFS is `false` or `None`, the cache key remains unchanged.
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||
pub struct RepositoryUrl {
|
||||
repo_url: DisplaySafeUrl,
|
||||
with_lfs: Option<bool>,
|
||||
}
|
||||
pub struct RepositoryUrl(DisplaySafeUrl);
|
||||
|
||||
impl RepositoryUrl {
|
||||
pub fn new(url: &DisplaySafeUrl) -> Self {
|
||||
|
|
@ -171,31 +161,19 @@ impl RepositoryUrl {
|
|||
url.set_fragment(None);
|
||||
url.set_query(None);
|
||||
|
||||
Self {
|
||||
repo_url: url,
|
||||
with_lfs: None,
|
||||
}
|
||||
Self(url)
|
||||
}
|
||||
|
||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
||||
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_lfs(mut self, lfs: Option<bool>) -> Self {
|
||||
self.with_lfs = lfs;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheKey for RepositoryUrl {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.repo_url.as_str().cache_key(state);
|
||||
if let Some(true) = self.with_lfs {
|
||||
1u8.cache_key(state);
|
||||
}
|
||||
self.0.as_str().cache_key(state);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -203,10 +181,7 @@ impl Hash for RepositoryUrl {
|
|||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.repo_url.as_str().hash(state);
|
||||
if let Some(true) = self.with_lfs {
|
||||
1u8.hash(state);
|
||||
}
|
||||
self.0.as_str().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -214,13 +189,13 @@ impl Deref for RepositoryUrl {
|
|||
type Target = Url;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.repo_url
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RepositoryUrl {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.repo_url, f)
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -229,7 +204,7 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), DisplaySafeUrlError> {
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
|
|
@ -279,7 +254,7 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn canonical_url() -> Result<(), DisplaySafeUrlError> {
|
||||
fn canonical_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
|
|
@ -308,14 +283,6 @@ mod tests {
|
|||
)?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they differ in Git LFS enablement.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
||||
)?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse(
|
||||
|
|
@ -368,7 +335,7 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url() -> Result<(), DisplaySafeUrlError> {
|
||||
fn repository_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
|
|
@ -411,76 +378,6 @@ mod tests {
|
|||
)?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// differ in Git LFS enablement.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
||||
)?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url_with_lfs() -> Result<(), DisplaySafeUrlError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let repo_url_basic = hasher.finish();
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.cache_key(&mut hasher);
|
||||
let repo_url_with_fragments = hasher.finish();
|
||||
|
||||
assert_eq!(
|
||||
repo_url_basic, repo_url_with_fragments,
|
||||
"repository urls should have the exact cache keys as fragments are removed",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.with_lfs(None)
|
||||
.cache_key(&mut hasher);
|
||||
let git_url_with_fragments = hasher.finish();
|
||||
|
||||
assert_eq!(
|
||||
repo_url_with_fragments, git_url_with_fragments,
|
||||
"both structs should have the exact cache keys as fragments are still removed",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.with_lfs(Some(false))
|
||||
.cache_key(&mut hasher);
|
||||
let git_url_with_fragments_and_lfs_false = hasher.finish();
|
||||
|
||||
assert_eq!(
|
||||
git_url_with_fragments, git_url_with_fragments_and_lfs_false,
|
||||
"both structs should have the exact cache keys as lfs false should not influence them",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.with_lfs(Some(true))
|
||||
.cache_key(&mut hasher);
|
||||
let git_url_with_fragments_and_lfs_true = hasher.finish();
|
||||
|
||||
assert_ne!(
|
||||
git_url_with_fragments, git_url_with_fragments_and_lfs_true,
|
||||
"both structs should have different cache keys as one has Git LFS enabled",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-cache"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Generate stable hash digests across versions and platforms."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -34,6 +35,5 @@ rustc-hash = { workspace = true }
|
|||
same-file = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cache
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
|||
use uv_static::EnvVars;
|
||||
|
||||
use crate::Cache;
|
||||
use clap::{Parser, ValueHint};
|
||||
use clap::Parser;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
|
|
@ -27,7 +27,7 @@ pub struct CacheArgs {
|
|||
/// `%LOCALAPPDATA%\uv\cache` on Windows.
|
||||
///
|
||||
/// To view the location of the cache directory, run `uv cache dir`.
|
||||
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR, value_hint = ValueHint::DirPath)]
|
||||
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR)]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use rustc_hash::FxHashMap;
|
|||
use tracing::{debug, trace, warn};
|
||||
|
||||
use uv_cache_info::Timestamp;
|
||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, Simplified, cachedir, directories};
|
||||
use uv_fs::{LockedFile, Simplified, cachedir, directories};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::ResolutionMetadata;
|
||||
|
||||
|
|
@ -35,17 +35,6 @@ mod wheel;
|
|||
/// Must be kept in-sync with the version in [`CacheBucket::to_str`].
|
||||
pub const ARCHIVE_VERSION: u8 = 0;
|
||||
|
||||
/// Error locking a cache entry or shard
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Could not make the path absolute")]
|
||||
Absolute(#[source] io::Error),
|
||||
#[error("Could not acquire lock")]
|
||||
Acquire(#[from] LockedFileError),
|
||||
}
|
||||
|
||||
/// A [`CacheEntry`] which may or may not exist yet.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CacheEntry(PathBuf);
|
||||
|
|
@ -91,14 +80,9 @@ impl CacheEntry {
|
|||
}
|
||||
|
||||
/// Acquire the [`CacheEntry`] as an exclusive lock.
|
||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
||||
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||
fs_err::create_dir_all(self.dir())?;
|
||||
Ok(LockedFile::acquire(
|
||||
self.path(),
|
||||
LockedFileMode::Exclusive,
|
||||
self.path().display(),
|
||||
)
|
||||
.await?)
|
||||
LockedFile::acquire(self.path(), self.path().display()).await
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -125,14 +109,9 @@ impl CacheShard {
|
|||
}
|
||||
|
||||
/// Acquire the cache entry as an exclusive lock.
|
||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
||||
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||
fs_err::create_dir_all(self.as_ref())?;
|
||||
Ok(LockedFile::acquire(
|
||||
self.join(".lock"),
|
||||
LockedFileMode::Exclusive,
|
||||
self.display(),
|
||||
)
|
||||
.await?)
|
||||
LockedFile::acquire(self.join(".lock"), self.display()).await
|
||||
}
|
||||
|
||||
/// Return the [`CacheShard`] as a [`PathBuf`].
|
||||
|
|
@ -203,7 +182,7 @@ impl Cache {
|
|||
}
|
||||
|
||||
/// Acquire a lock that allows removing entries from the cache.
|
||||
pub async fn with_exclusive_lock(self) -> Result<Self, LockedFileError> {
|
||||
pub fn with_exclusive_lock(self) -> Result<Self, io::Error> {
|
||||
let Self {
|
||||
root,
|
||||
refresh,
|
||||
|
|
@ -219,12 +198,8 @@ impl Cache {
|
|||
),
|
||||
);
|
||||
}
|
||||
let lock_file = LockedFile::acquire(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Exclusive,
|
||||
root.simplified_display(),
|
||||
)
|
||||
.await?;
|
||||
let lock_file =
|
||||
LockedFile::acquire_blocking(root.join(".lock"), root.simplified_display())?;
|
||||
|
||||
Ok(Self {
|
||||
root,
|
||||
|
|
@ -245,11 +220,7 @@ impl Cache {
|
|||
lock_file,
|
||||
} = self;
|
||||
|
||||
match LockedFile::acquire_no_wait(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Exclusive,
|
||||
root.simplified_display(),
|
||||
) {
|
||||
match LockedFile::acquire_no_wait(root.join(".lock"), root.simplified_display()) {
|
||||
Some(lock_file) => Ok(Self {
|
||||
root,
|
||||
refresh,
|
||||
|
|
@ -401,8 +372,10 @@ impl Cache {
|
|||
self.temp_dir.is_some()
|
||||
}
|
||||
|
||||
/// Populate the cache scaffold.
|
||||
fn create_base_files(root: &PathBuf) -> io::Result<()> {
|
||||
/// Initialize the [`Cache`].
|
||||
pub fn init(self) -> Result<Self, io::Error> {
|
||||
let root = &self.root;
|
||||
|
||||
// Create the cache directory, if it doesn't exist.
|
||||
fs_err::create_dir_all(root)?;
|
||||
|
||||
|
|
@ -448,66 +421,29 @@ impl Cache {
|
|||
.join(".git"),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Initialize the [`Cache`].
|
||||
pub async fn init(self) -> Result<Self, Error> {
|
||||
let root = &self.root;
|
||||
|
||||
Self::create_base_files(root)?;
|
||||
|
||||
// Block cache removal operations from interfering.
|
||||
let lock_file = match LockedFile::acquire(
|
||||
let lock_file = match LockedFile::acquire_shared_blocking(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Shared,
|
||||
root.simplified_display(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
) {
|
||||
Ok(lock_file) => Some(Arc::new(lock_file)),
|
||||
Err(err)
|
||||
if err
|
||||
.as_io_error()
|
||||
.is_some_and(|err| err.kind() == io::ErrorKind::Unsupported) =>
|
||||
{
|
||||
Err(err) if err.kind() == io::ErrorKind::Unsupported => {
|
||||
warn!(
|
||||
"Shared locking is not supported by the current platform or filesystem, \
|
||||
reduced parallel process safety with `uv cache clean` and `uv cache prune`."
|
||||
reduced parallel process safety with `uv cache clean` and `uv cache prune`."
|
||||
);
|
||||
None
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
||||
root: std::path::absolute(root)?,
|
||||
lock_file,
|
||||
..self
|
||||
})
|
||||
}
|
||||
|
||||
/// Initialize the [`Cache`], assuming that there are no other uv processes running.
|
||||
pub fn init_no_wait(self) -> Result<Option<Self>, Error> {
|
||||
let root = &self.root;
|
||||
|
||||
Self::create_base_files(root)?;
|
||||
|
||||
// Block cache removal operations from interfering.
|
||||
let Some(lock_file) = LockedFile::acquire_no_wait(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Shared,
|
||||
root.simplified_display(),
|
||||
) else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(Self {
|
||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
||||
lock_file: Some(Arc::new(lock_file)),
|
||||
..self
|
||||
}))
|
||||
}
|
||||
|
||||
/// Clear the cache, removing all entries.
|
||||
pub fn clear(self, reporter: Box<dyn CleanReporter>) -> Result<Removal, io::Error> {
|
||||
// Remove everything but `.lock`, Windows does not allow removal of a locked file
|
||||
|
|
@ -542,7 +478,7 @@ impl Cache {
|
|||
/// Remove a package from the cache.
|
||||
///
|
||||
/// Returns the number of entries removed from the cache.
|
||||
pub fn remove(&self, name: &PackageName) -> io::Result<Removal> {
|
||||
pub fn remove(&self, name: &PackageName) -> Result<Removal, io::Error> {
|
||||
// Collect the set of referenced archives.
|
||||
let references = self.find_archive_references()?;
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ pub enum WheelCache<'a> {
|
|||
Path(&'a DisplaySafeUrl),
|
||||
/// An editable dependency, which we key by URL.
|
||||
Editable(&'a DisplaySafeUrl),
|
||||
/// A Git dependency, which we key by URL (including LFS state), SHA.
|
||||
/// A Git dependency, which we key by URL and SHA.
|
||||
///
|
||||
/// Note that this variant only exists for source distributions; wheels can't be delivered
|
||||
/// through Git.
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-cli"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "The command line interface for the uv binary."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cli
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cli).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -366,7 +366,6 @@ pub fn resolver_options(
|
|||
exclude_newer_package.unwrap_or_default(),
|
||||
),
|
||||
link_mode,
|
||||
torch_backend: None,
|
||||
no_build: flag(no_build, build, "build"),
|
||||
no_build_package: Some(no_build_package),
|
||||
no_binary: flag(no_binary, binary, "binary"),
|
||||
|
|
@ -496,6 +495,5 @@ pub fn resolver_installer_options(
|
|||
Some(no_binary_package)
|
||||
},
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
torch_backend: None,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,7 @@
|
|||
[package]
|
||||
name = "uv-client"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
@ -38,7 +32,6 @@ uv-version = { workspace = true }
|
|||
uv-warnings = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
astral-tl = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
async_http_range_reader = { workspace = true }
|
||||
async_zip = { workspace = true }
|
||||
|
|
@ -61,6 +54,7 @@ serde = { workspace = true }
|
|||
serde_json = { workspace = true }
|
||||
sys-info = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tl = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-util = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
|
@ -72,9 +66,5 @@ http-body-util = { workspace = true }
|
|||
hyper = { workspace = true }
|
||||
hyper-util = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
rcgen = { workspace = true }
|
||||
rustls = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-rustls = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +1,5 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
# `pypi-client`
|
||||
|
||||
# uv-client
|
||||
A general-use client for interacting with PyPI.
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-client).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
Loosely modeled after Orogene's `oro-client`.
|
||||
|
|
|
|||
|
|
@ -28,14 +28,13 @@ use tracing::{debug, trace};
|
|||
use url::ParseError;
|
||||
use url::Url;
|
||||
|
||||
use uv_auth::{AuthMiddleware, Credentials, CredentialsCache, Indexes, PyxTokenStore};
|
||||
use uv_auth::{AuthMiddleware, Credentials, Indexes, PyxTokenStore};
|
||||
use uv_configuration::{KeyringProviderType, TrustedHost};
|
||||
use uv_fs::Simplified;
|
||||
use uv_pep508::MarkerEnvironment;
|
||||
use uv_platform_tags::Platform;
|
||||
use uv_preview::Preview;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_redacted::DisplaySafeUrlError;
|
||||
use uv_static::EnvVars;
|
||||
use uv_version::version;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
|
@ -45,12 +44,13 @@ use crate::middleware::OfflineMiddleware;
|
|||
use crate::tls::read_identity;
|
||||
use crate::{Connectivity, WrappedReqwestError};
|
||||
|
||||
/// Do not use this value directly outside tests, use [`retries_from_env`] instead.
|
||||
pub const DEFAULT_RETRIES: u32 = 3;
|
||||
|
||||
/// Maximum number of redirects to follow before giving up.
|
||||
///
|
||||
/// This is the default used by [`reqwest`].
|
||||
pub const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||
const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||
|
||||
/// Selectively skip parts or the entire auth middleware.
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
|
|
@ -78,10 +78,8 @@ pub struct BaseClientBuilder<'a> {
|
|||
markers: Option<&'a MarkerEnvironment>,
|
||||
platform: Option<&'a Platform>,
|
||||
auth_integration: AuthIntegration,
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
credentials_cache: Arc<CredentialsCache>,
|
||||
indexes: Indexes,
|
||||
timeout: Duration,
|
||||
default_timeout: Duration,
|
||||
extra_middleware: Option<ExtraMiddleware>,
|
||||
proxies: Vec<Proxy>,
|
||||
redirect_policy: RedirectPolicy,
|
||||
|
|
@ -91,8 +89,6 @@ pub struct BaseClientBuilder<'a> {
|
|||
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
||||
/// Optional custom reqwest client to use instead of creating a new one.
|
||||
custom_client: Option<Client>,
|
||||
/// uv subcommand in which this client is being used
|
||||
subcommand: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// The policy for handling HTTP redirects.
|
||||
|
|
@ -104,8 +100,6 @@ pub enum RedirectPolicy {
|
|||
BypassMiddleware,
|
||||
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
||||
RetriggerMiddleware,
|
||||
/// No redirect for non-cloneable (e.g., streaming) requests with custom redirect logic.
|
||||
NoRedirect,
|
||||
}
|
||||
|
||||
impl RedirectPolicy {
|
||||
|
|
@ -113,7 +107,6 @@ impl RedirectPolicy {
|
|||
match self {
|
||||
Self::BypassMiddleware => reqwest::redirect::Policy::default(),
|
||||
Self::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
||||
Self::NoRedirect => reqwest::redirect::Policy::none(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -143,39 +136,35 @@ impl Default for BaseClientBuilder<'_> {
|
|||
markers: None,
|
||||
platform: None,
|
||||
auth_integration: AuthIntegration::default(),
|
||||
credentials_cache: Arc::new(CredentialsCache::default()),
|
||||
indexes: Indexes::new(),
|
||||
timeout: Duration::from_secs(30),
|
||||
default_timeout: Duration::from_secs(30),
|
||||
extra_middleware: None,
|
||||
proxies: vec![],
|
||||
redirect_policy: RedirectPolicy::default(),
|
||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
||||
custom_client: None,
|
||||
subcommand: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> BaseClientBuilder<'a> {
|
||||
impl BaseClientBuilder<'_> {
|
||||
pub fn new(
|
||||
connectivity: Connectivity,
|
||||
native_tls: bool,
|
||||
allow_insecure_host: Vec<TrustedHost>,
|
||||
preview: Preview,
|
||||
timeout: Duration,
|
||||
retries: u32,
|
||||
) -> Self {
|
||||
Self {
|
||||
preview,
|
||||
allow_insecure_host,
|
||||
native_tls,
|
||||
retries,
|
||||
connectivity,
|
||||
timeout,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> BaseClientBuilder<'a> {
|
||||
/// Use a custom reqwest client instead of creating a new one.
|
||||
///
|
||||
/// This allows you to provide your own reqwest client with custom configuration.
|
||||
|
|
@ -211,6 +200,15 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise use the default
|
||||
/// retries.
|
||||
///
|
||||
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||
pub fn retries_from_env(mut self) -> Result<Self, RetryParsingError> {
|
||||
self.retries = retries_from_env()?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||
self.native_tls = native_tls;
|
||||
|
|
@ -248,8 +246,8 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
||||
self.timeout = timeout;
|
||||
pub fn default_timeout(mut self, default_timeout: Duration) -> Self {
|
||||
self.default_timeout = default_timeout;
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -283,26 +281,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn subcommand(mut self, subcommand: Vec<String>) -> Self {
|
||||
self.subcommand = Some(subcommand);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
||||
&self.credentials_cache
|
||||
}
|
||||
|
||||
/// See [`CredentialsCache::store_credentials_from_url`].
|
||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
||||
self.credentials_cache.store_credentials_from_url(url)
|
||||
}
|
||||
|
||||
/// See [`CredentialsCache::store_credentials`].
|
||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
||||
self.credentials_cache.store_credentials(url, credentials);
|
||||
}
|
||||
|
||||
pub fn is_native_tls(&self) -> bool {
|
||||
self.native_tls
|
||||
}
|
||||
|
|
@ -312,7 +290,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
|
||||
/// Create a [`RetryPolicy`] for the client.
|
||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
||||
fn retry_policy(&self) -> ExponentialBackoff {
|
||||
let mut builder = ExponentialBackoff::builder();
|
||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||
|
|
@ -321,7 +299,21 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
|
||||
pub fn build(&self) -> BaseClient {
|
||||
let timeout = self.timeout;
|
||||
// Timeout options, matching https://doc.rust-lang.org/nightly/cargo/reference/config.html#httptimeout
|
||||
// `UV_REQUEST_TIMEOUT` is provided for backwards compatibility with v0.1.6
|
||||
let timeout = env::var(EnvVars::UV_HTTP_TIMEOUT)
|
||||
.or_else(|_| env::var(EnvVars::UV_REQUEST_TIMEOUT))
|
||||
.or_else(|_| env::var(EnvVars::HTTP_TIMEOUT))
|
||||
.and_then(|value| {
|
||||
value.parse::<u64>()
|
||||
.map(Duration::from_secs)
|
||||
.or_else(|_| {
|
||||
// On parse error, warn and use the default timeout
|
||||
warn_user_once!("Ignoring invalid value from environment for `UV_HTTP_TIMEOUT`. Expected an integer number of seconds, got \"{value}\".");
|
||||
Ok(self.default_timeout)
|
||||
})
|
||||
})
|
||||
.unwrap_or(self.default_timeout);
|
||||
debug!("Using request timeout of {}s", timeout.as_secs());
|
||||
|
||||
// Use the custom client if provided, otherwise create a new one
|
||||
|
|
@ -351,7 +343,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
dangerous_client,
|
||||
raw_dangerous_client,
|
||||
timeout,
|
||||
credentials_cache: self.credentials_cache.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -378,7 +369,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
raw_client: existing.raw_client.clone(),
|
||||
raw_dangerous_client: existing.raw_dangerous_client.clone(),
|
||||
timeout: existing.timeout,
|
||||
credentials_cache: existing.credentials_cache.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -387,14 +377,14 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
let mut user_agent_string = format!("uv/{}", version());
|
||||
|
||||
// Add linehaul metadata.
|
||||
let linehaul = LineHaul::new(self.markers, self.platform, self.subcommand.clone());
|
||||
if let Ok(output) = serde_json::to_string(&linehaul) {
|
||||
let _ = write!(user_agent_string, " {output}");
|
||||
if let Some(markers) = self.markers {
|
||||
let linehaul = LineHaul::new(markers, self.platform);
|
||||
if let Ok(output) = serde_json::to_string(&linehaul) {
|
||||
let _ = write!(user_agent_string, " {output}");
|
||||
}
|
||||
}
|
||||
|
||||
// Checks for the presence of `SSL_CERT_FILE`.
|
||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
||||
// Check for the presence of an `SSL_CERT_FILE`.
|
||||
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
||||
let path_exists = Path::new(&path).exists();
|
||||
if !path_exists {
|
||||
|
|
@ -406,61 +396,11 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
path_exists
|
||||
});
|
||||
|
||||
// Checks for the presence of `SSL_CERT_DIR`.
|
||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
||||
let ssl_cert_dir_exists = env::var_os(EnvVars::SSL_CERT_DIR)
|
||||
.filter(|v| !v.is_empty())
|
||||
.is_some_and(|dirs| {
|
||||
// Parse `SSL_CERT_DIR`, with support for multiple entries using
|
||||
// a platform-specific delimiter (`:` on Unix, `;` on Windows)
|
||||
let (existing, missing): (Vec<_>, Vec<_>) =
|
||||
env::split_paths(&dirs).partition(|p| p.exists());
|
||||
|
||||
if existing.is_empty() {
|
||||
let end_note = if missing.len() == 1 {
|
||||
"The directory does not exist."
|
||||
} else {
|
||||
"The entries do not exist."
|
||||
};
|
||||
warn_user_once!(
|
||||
"Ignoring invalid `SSL_CERT_DIR`. {end_note}: {}.",
|
||||
missing
|
||||
.iter()
|
||||
.map(Simplified::simplified_display)
|
||||
.join(", ")
|
||||
.cyan()
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Warn on any missing entries
|
||||
if !missing.is_empty() {
|
||||
let end_note = if missing.len() == 1 {
|
||||
"The following directory does not exist:"
|
||||
} else {
|
||||
"The following entries do not exist:"
|
||||
};
|
||||
warn_user_once!(
|
||||
"Invalid entries in `SSL_CERT_DIR`. {end_note}: {}.",
|
||||
missing
|
||||
.iter()
|
||||
.map(Simplified::simplified_display)
|
||||
.join(", ")
|
||||
.cyan()
|
||||
);
|
||||
}
|
||||
|
||||
// Proceed while ignoring missing entries
|
||||
true
|
||||
});
|
||||
|
||||
// Create a secure client that validates certificates.
|
||||
let raw_client = self.create_client(
|
||||
&user_agent_string,
|
||||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
ssl_cert_dir_exists,
|
||||
Security::Secure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
|
@ -470,7 +410,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
&user_agent_string,
|
||||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
ssl_cert_dir_exists,
|
||||
Security::Insecure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
|
@ -483,7 +422,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
user_agent: &str,
|
||||
timeout: Duration,
|
||||
ssl_cert_file_exists: bool,
|
||||
ssl_cert_dir_exists: bool,
|
||||
security: Security,
|
||||
redirect_policy: RedirectPolicy,
|
||||
) -> Client {
|
||||
|
|
@ -502,7 +440,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
Security::Insecure => client_builder.danger_accept_invalid_certs(true),
|
||||
};
|
||||
|
||||
let client_builder = if self.native_tls || ssl_cert_file_exists || ssl_cert_dir_exists {
|
||||
let client_builder = if self.native_tls || ssl_cert_file_exists {
|
||||
client_builder.tls_built_in_native_certs(true)
|
||||
} else {
|
||||
client_builder.tls_built_in_webpki_certs(true)
|
||||
|
|
@ -583,7 +521,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
match self.auth_integration {
|
||||
AuthIntegration::Default => {
|
||||
let mut auth_middleware = AuthMiddleware::new()
|
||||
.with_cache_arc(self.credentials_cache.clone())
|
||||
.with_base_client(base_client)
|
||||
.with_indexes(self.indexes.clone())
|
||||
.with_keyring(self.keyring.to_provider())
|
||||
|
|
@ -595,7 +532,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
AuthIntegration::OnlyAuthenticated => {
|
||||
let mut auth_middleware = AuthMiddleware::new()
|
||||
.with_cache_arc(self.credentials_cache.clone())
|
||||
.with_base_client(base_client)
|
||||
.with_indexes(self.indexes.clone())
|
||||
.with_keyring(self.keyring.to_provider())
|
||||
|
|
@ -639,8 +575,6 @@ pub struct BaseClient {
|
|||
allow_insecure_host: Vec<TrustedHost>,
|
||||
/// The number of retries to attempt on transient errors.
|
||||
retries: u32,
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
credentials_cache: Arc<CredentialsCache>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
|
@ -663,7 +597,7 @@ impl BaseClient {
|
|||
|
||||
/// Executes a request, applying redirect policy.
|
||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||
let client = self.for_host(&DisplaySafeUrl::from_url(req.url().clone()));
|
||||
let client = self.for_host(&DisplaySafeUrl::from(req.url().clone()));
|
||||
client.execute(req).await
|
||||
}
|
||||
|
||||
|
|
@ -686,15 +620,7 @@ impl BaseClient {
|
|||
|
||||
/// The [`RetryPolicy`] for the client.
|
||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
||||
let mut builder = ExponentialBackoff::builder();
|
||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||
}
|
||||
builder.build_with_max_retries(self.retries)
|
||||
}
|
||||
|
||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
||||
&self.credentials_cache
|
||||
ExponentialBackoff::builder().build_with_max_retries(self.retries)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -732,7 +658,6 @@ impl RedirectClientWithMiddleware {
|
|||
match self.redirect_policy {
|
||||
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
||||
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
||||
RedirectPolicy::NoRedirect => self.client.execute(req).await,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -798,7 +723,7 @@ fn request_into_redirect(
|
|||
res: &Response,
|
||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||
) -> reqwest_middleware::Result<Option<Request>> {
|
||||
let original_req_url = DisplaySafeUrl::from_url(req.url().clone());
|
||||
let original_req_url = DisplaySafeUrl::from(req.url().clone());
|
||||
let status = res.status();
|
||||
let should_redirect = match status {
|
||||
StatusCode::MOVED_PERMANENTLY
|
||||
|
|
@ -851,7 +776,7 @@ fn request_into_redirect(
|
|||
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
||||
Ok(url) => url,
|
||||
// Per RFC 7231, URLs should be resolved against the request URL.
|
||||
Err(DisplaySafeUrlError::Url(ParseError::RelativeUrlWithoutBase)) => original_req_url.join(location).map_err(|err| {
|
||||
Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| {
|
||||
reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
||||
))
|
||||
|
|
@ -1104,12 +1029,12 @@ pub fn is_transient_network_error(err: &(dyn Error + 'static)) -> bool {
|
|||
}
|
||||
|
||||
trace!("Cannot retry nested reqwest error");
|
||||
} else if source.downcast_ref::<h2::Error>().is_some() {
|
||||
// All h2 errors look like errors that should be retried
|
||||
// https://github.com/astral-sh/uv/issues/15916
|
||||
trace!("Retrying nested h2 error");
|
||||
return true;
|
||||
} else if let Some(io_err) = source.downcast_ref::<io::Error>() {
|
||||
} else if let Some(io_err) = source.downcast_ref::<io::Error>().or_else(|| {
|
||||
// h2 may hide an IO error inside.
|
||||
source
|
||||
.downcast_ref::<h2::Error>()
|
||||
.and_then(|err| err.get_io())
|
||||
}) {
|
||||
has_known_error = true;
|
||||
let retryable_io_err_kinds = [
|
||||
// https://github.com/astral-sh/uv/issues/12054
|
||||
|
|
@ -1176,6 +1101,19 @@ pub enum RetryParsingError {
|
|||
ParseInt(#[from] ParseIntError),
|
||||
}
|
||||
|
||||
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change.
|
||||
///
|
||||
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||
pub fn retries_from_env() -> Result<u32, RetryParsingError> {
|
||||
// TODO(zanieb): We should probably parse this in another layer, but there's not a natural
|
||||
// fit for it right now
|
||||
if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) {
|
||||
Ok(value.to_string_lossy().as_ref().parse::<u32>()?)
|
||||
} else {
|
||||
Ok(DEFAULT_RETRIES)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
|||
|
|
@ -15,32 +15,12 @@ use uv_redacted::DisplaySafeUrl;
|
|||
|
||||
use crate::BaseClient;
|
||||
use crate::base_client::is_transient_network_error;
|
||||
use crate::error::ProblemDetails;
|
||||
use crate::{
|
||||
Error, ErrorKind,
|
||||
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
|
||||
rkyvutil::OwnedArchive,
|
||||
};
|
||||
|
||||
/// Extract problem details from an HTTP response if it has the correct content type
|
||||
///
|
||||
/// Note: This consumes the response body, so it should only be called when there's an error status.
|
||||
async fn extract_problem_details(response: Response) -> Option<ProblemDetails> {
|
||||
match response.bytes().await {
|
||||
Ok(bytes) => match serde_json::from_slice(&bytes) {
|
||||
Ok(details) => Some(details),
|
||||
Err(err) => {
|
||||
warn!("Failed to parse problem details: {err}");
|
||||
None
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
warn!("Failed to read response body for problem details: {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait the generalizes (de)serialization at a high level.
|
||||
///
|
||||
/// The main purpose of this trait is to make the `CachedClient` work for
|
||||
|
|
@ -557,36 +537,16 @@ impl CachedClient {
|
|||
cached: DataWithCachePolicy,
|
||||
new_cache_policy_builder: CachePolicyBuilder,
|
||||
) -> Result<CachedResponse, Error> {
|
||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
debug!("Sending revalidation request for: {url}");
|
||||
let mut response = self
|
||||
.0
|
||||
.execute(req)
|
||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
||||
|
||||
// Check for HTTP error status and extract problem details if available
|
||||
if let Err(status_error) = response.error_for_status_ref() {
|
||||
// Clone the response to extract problem details before the error consumes it
|
||||
let problem_details = if response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
.map(|ct| ct == "application/problem+json")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
extract_problem_details(response).await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
return Err(ErrorKind::from_reqwest_with_problem_details(
|
||||
url.clone(),
|
||||
status_error,
|
||||
problem_details,
|
||||
)
|
||||
.into());
|
||||
}
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||
.error_for_status()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
|
|
@ -627,7 +587,7 @@ impl CachedClient {
|
|||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
trace!("Sending fresh {} request for {}", req.method(), url);
|
||||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||
let mut response = self
|
||||
|
|
@ -651,25 +611,9 @@ impl CachedClient {
|
|||
.map(|retries| retries.value());
|
||||
|
||||
if let Err(status_error) = response.error_for_status_ref() {
|
||||
let problem_details = if response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
.map(|ct| ct.starts_with("application/problem+json"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
extract_problem_details(response).await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
return Err(CachedClientError::<Error>::Client {
|
||||
retries: retry_count,
|
||||
err: ErrorKind::from_reqwest_with_problem_details(
|
||||
url,
|
||||
status_error,
|
||||
problem_details,
|
||||
)
|
||||
.into(),
|
||||
err: ErrorKind::from_reqwest(url, status_error).into(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
|
@ -743,15 +687,13 @@ impl CachedClient {
|
|||
let total_retries = past_retries + middleware_retries;
|
||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying...",
|
||||
req.url(),
|
||||
);
|
||||
let duration = execute_after
|
||||
.duration_since(SystemTime::now())
|
||||
.unwrap_or_else(|_| Duration::default());
|
||||
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying after {:.1}s...",
|
||||
req.url(),
|
||||
duration.as_secs_f32(),
|
||||
);
|
||||
tokio::time::sleep(duration).await;
|
||||
past_retries += 1;
|
||||
continue;
|
||||
|
|
@ -803,14 +745,13 @@ impl CachedClient {
|
|||
let total_retries = past_retries + middleware_retries;
|
||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying...",
|
||||
req.url(),
|
||||
);
|
||||
let duration = execute_after
|
||||
.duration_since(SystemTime::now())
|
||||
.unwrap_or_else(|_| Duration::default());
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying after {}s...",
|
||||
req.url(),
|
||||
duration.as_secs(),
|
||||
);
|
||||
tokio::time::sleep(duration).await;
|
||||
past_retries += 1;
|
||||
continue;
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
use async_http_range_reader::AsyncHttpRangeReaderError;
|
||||
use async_zip::error::ZipError;
|
||||
use serde::Deserialize;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use uv_cache::Error as CacheError;
|
||||
use async_http_range_reader::AsyncHttpRangeReaderError;
|
||||
use async_zip::error::ZipError;
|
||||
|
||||
use uv_distribution_filename::{WheelFilename, WheelFilenameError};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
|
@ -13,61 +11,6 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use crate::middleware::OfflineError;
|
||||
use crate::{FlatIndexError, html};
|
||||
|
||||
/// RFC 9457 Problem Details for HTTP APIs
|
||||
///
|
||||
/// This structure represents the standard format for machine-readable details
|
||||
/// of errors in HTTP response bodies as defined in RFC 9457.
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct ProblemDetails {
|
||||
/// A URI reference that identifies the problem type.
|
||||
/// When dereferenced, it SHOULD provide human-readable documentation for the problem type.
|
||||
#[serde(rename = "type", default = "default_problem_type")]
|
||||
pub problem_type: String,
|
||||
|
||||
/// A short, human-readable summary of the problem type.
|
||||
pub title: Option<String>,
|
||||
|
||||
/// The HTTP status code generated by the origin server for this occurrence of the problem.
|
||||
pub status: Option<u16>,
|
||||
|
||||
/// A human-readable explanation specific to this occurrence of the problem.
|
||||
pub detail: Option<String>,
|
||||
|
||||
/// A URI reference that identifies the specific occurrence of the problem.
|
||||
pub instance: Option<String>,
|
||||
}
|
||||
|
||||
/// Default problem type URI as per RFC 9457
|
||||
#[inline]
|
||||
fn default_problem_type() -> String {
|
||||
"about:blank".to_string()
|
||||
}
|
||||
|
||||
impl ProblemDetails {
|
||||
/// Get a human-readable description of the problem
|
||||
pub fn description(&self) -> Option<String> {
|
||||
match self {
|
||||
Self {
|
||||
title: Some(title),
|
||||
detail: Some(detail),
|
||||
..
|
||||
} => Some(format!("Server message: {title}, {detail}")),
|
||||
Self {
|
||||
title: Some(title), ..
|
||||
} => Some(format!("Server message: {title}")),
|
||||
Self {
|
||||
detail: Some(detail),
|
||||
..
|
||||
} => Some(format!("Server message: {detail}")),
|
||||
Self {
|
||||
status: Some(status),
|
||||
..
|
||||
} => Some(format!("HTTP error {status}")),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Error {
|
||||
kind: Box<ErrorKind>,
|
||||
|
|
@ -79,9 +22,8 @@ impl Display for Error {
|
|||
if self.retries > 0 {
|
||||
write!(
|
||||
f,
|
||||
"Request failed after {retries} {subject}",
|
||||
retries = self.retries,
|
||||
subject = if self.retries > 1 { "retries" } else { "retry" }
|
||||
"Request failed after {retries} retries",
|
||||
retries = self.retries
|
||||
)
|
||||
} else {
|
||||
Display::fmt(&self.kind, f)
|
||||
|
|
@ -273,15 +215,11 @@ pub enum ErrorKind {
|
|||
/// Make sure the package name is spelled correctly and that you've
|
||||
/// configured the right registry to fetch it from.
|
||||
#[error("Package `{0}` was not found in the registry")]
|
||||
RemotePackageNotFound(PackageName),
|
||||
PackageNotFound(String),
|
||||
|
||||
/// The package was not found in the local (file-based) index.
|
||||
#[error("Package `{0}` was not found in the local index")]
|
||||
LocalPackageNotFound(PackageName),
|
||||
|
||||
/// The root was not found in the local (file-based) index.
|
||||
#[error("Local index not found at: `{}`", _0.display())]
|
||||
LocalIndexNotFound(PathBuf),
|
||||
FileNotFound(String),
|
||||
|
||||
/// The metadata file could not be parsed.
|
||||
#[error("Couldn't parse metadata of {0} from {1}")]
|
||||
|
|
@ -291,12 +229,16 @@ pub enum ErrorKind {
|
|||
#[source] Box<uv_pypi_types::MetadataError>,
|
||||
),
|
||||
|
||||
/// The metadata file was not found in the wheel.
|
||||
#[error("Metadata file `{0}` was not found in {1}")]
|
||||
MetadataNotFound(WheelFilename, String),
|
||||
|
||||
/// An error that happened while making a request or in a reqwest middleware.
|
||||
#[error("Failed to fetch: `{0}`")]
|
||||
WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError),
|
||||
|
||||
/// Add the number of failed retries to the error.
|
||||
#[error("Request failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
||||
#[error("Request failed after {retries} retries")]
|
||||
RequestWithRetries {
|
||||
source: Box<ErrorKind>,
|
||||
retries: u32,
|
||||
|
|
@ -338,9 +280,6 @@ pub enum ErrorKind {
|
|||
#[error("Failed to write to the client cache")]
|
||||
CacheWrite(#[source] std::io::Error),
|
||||
|
||||
#[error("Failed to acquire lock on the client cache")]
|
||||
CacheLock(#[source] CacheError),
|
||||
|
||||
#[error(transparent)]
|
||||
Io(std::io::Error),
|
||||
|
||||
|
|
@ -391,19 +330,7 @@ impl ErrorKind {
|
|||
}
|
||||
}
|
||||
|
||||
Self::WrappedReqwestError(url, WrappedReqwestError::from(err))
|
||||
}
|
||||
|
||||
/// Create an [`ErrorKind`] from a [`reqwest::Error`] with problem details.
|
||||
pub(crate) fn from_reqwest_with_problem_details(
|
||||
url: DisplaySafeUrl,
|
||||
error: reqwest::Error,
|
||||
problem_details: Option<ProblemDetails>,
|
||||
) -> Self {
|
||||
Self::WrappedReqwestError(
|
||||
url,
|
||||
WrappedReqwestError::with_problem_details(error.into(), problem_details),
|
||||
)
|
||||
Self::WrappedReqwestError(url, WrappedReqwestError(err))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -413,26 +340,12 @@ impl ErrorKind {
|
|||
/// Wraps a [`reqwest_middleware::Error`] instead of an [`reqwest::Error`] since the actual reqwest
|
||||
/// error may be below some context in the [`anyhow::Error`].
|
||||
#[derive(Debug)]
|
||||
pub struct WrappedReqwestError {
|
||||
error: reqwest_middleware::Error,
|
||||
problem_details: Option<Box<ProblemDetails>>,
|
||||
}
|
||||
pub struct WrappedReqwestError(reqwest_middleware::Error);
|
||||
|
||||
impl WrappedReqwestError {
|
||||
/// Create a new `WrappedReqwestError` with optional problem details
|
||||
pub fn with_problem_details(
|
||||
error: reqwest_middleware::Error,
|
||||
problem_details: Option<ProblemDetails>,
|
||||
) -> Self {
|
||||
Self {
|
||||
error,
|
||||
problem_details: problem_details.map(Box::new),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the inner [`reqwest::Error`] from the error chain, if it exists.
|
||||
fn inner(&self) -> Option<&reqwest::Error> {
|
||||
match &self.error {
|
||||
match &self.0 {
|
||||
reqwest_middleware::Error::Reqwest(err) => Some(err),
|
||||
reqwest_middleware::Error::Middleware(err) => err.chain().find_map(|err| {
|
||||
if let Some(err) = err.downcast_ref::<reqwest::Error>() {
|
||||
|
|
@ -494,19 +407,13 @@ impl WrappedReqwestError {
|
|||
|
||||
impl From<reqwest::Error> for WrappedReqwestError {
|
||||
fn from(error: reqwest::Error) -> Self {
|
||||
Self {
|
||||
error: error.into(),
|
||||
problem_details: None,
|
||||
}
|
||||
Self(error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<reqwest_middleware::Error> for WrappedReqwestError {
|
||||
fn from(error: reqwest_middleware::Error) -> Self {
|
||||
Self {
|
||||
error,
|
||||
problem_details: None,
|
||||
}
|
||||
Self(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -514,7 +421,7 @@ impl Deref for WrappedReqwestError {
|
|||
type Target = reqwest_middleware::Error;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.error
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -523,15 +430,9 @@ impl Display for WrappedReqwestError {
|
|||
if self.is_likely_offline() {
|
||||
// Insert an extra hint, we'll show the wrapped error through `source`
|
||||
f.write_str("Could not connect, are you offline?")
|
||||
} else if let Some(problem_details) = &self.problem_details {
|
||||
// Show problem details if available
|
||||
match problem_details.description() {
|
||||
None => Display::fmt(&self.error, f),
|
||||
Some(message) => f.write_str(&message),
|
||||
}
|
||||
} else {
|
||||
// Show the wrapped error
|
||||
Display::fmt(&self.error, f)
|
||||
Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -540,117 +441,10 @@ impl std::error::Error for WrappedReqwestError {
|
|||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
if self.is_likely_offline() {
|
||||
// `Display` is inserting an extra message, so we need to show the wrapped error
|
||||
Some(&self.error)
|
||||
} else if self.problem_details.is_some() {
|
||||
// `Display` is showing problem details, so show the wrapped error as source
|
||||
Some(&self.error)
|
||||
Some(&self.0)
|
||||
} else {
|
||||
// `Display` is showing the wrapped error, continue with its source
|
||||
self.error.source()
|
||||
self.0.source()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_parsing() {
|
||||
let json = r#"{
|
||||
"type": "https://example.com/probs/out-of-credit",
|
||||
"title": "You do not have enough credit.",
|
||||
"detail": "Your current balance is 30, but that costs 50.",
|
||||
"status": 403,
|
||||
"instance": "/account/12345/msgs/abc"
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.problem_type,
|
||||
"https://example.com/probs/out-of-credit"
|
||||
);
|
||||
assert_eq!(
|
||||
problem_details.title,
|
||||
Some("You do not have enough credit.".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
problem_details.detail,
|
||||
Some("Your current balance is 30, but that costs 50.".to_string())
|
||||
);
|
||||
assert_eq!(problem_details.status, Some(403));
|
||||
assert_eq!(
|
||||
problem_details.instance,
|
||||
Some("/account/12345/msgs/abc".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_default_type() {
|
||||
let json = r#"{
|
||||
"detail": "Something went wrong",
|
||||
"status": 500
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(problem_details.problem_type, "about:blank");
|
||||
assert_eq!(
|
||||
problem_details.detail,
|
||||
Some("Something went wrong".to_string())
|
||||
);
|
||||
assert_eq!(problem_details.status, Some(500));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_description() {
|
||||
let json = r#"{
|
||||
"detail": "Detailed error message",
|
||||
"title": "Error Title",
|
||||
"status": 400
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.description().unwrap(),
|
||||
"Server message: Error Title, Detailed error message"
|
||||
);
|
||||
|
||||
let json_no_detail = r#"{
|
||||
"title": "Error Title",
|
||||
"status": 400
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails =
|
||||
serde_json::from_slice(json_no_detail.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.description().unwrap(),
|
||||
"Server message: Error Title"
|
||||
);
|
||||
|
||||
let json_minimal = r#"{
|
||||
"status": 400
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails =
|
||||
serde_json::from_slice(json_minimal.as_bytes()).unwrap();
|
||||
assert_eq!(problem_details.description().unwrap(), "HTTP error 400");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_with_extensions() {
|
||||
let json = r#"{
|
||||
"type": "https://example.com/probs/out-of-credit",
|
||||
"title": "You do not have enough credit.",
|
||||
"detail": "Your current balance is 30, but that costs 50.",
|
||||
"status": 403,
|
||||
"balance": 30,
|
||||
"accounts": ["/account/12345", "/account/67890"]
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.title,
|
||||
Some("You do not have enough credit.".to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::cached_client::{CacheControl, CachedClientError};
|
||||
use crate::html::SimpleDetailHTML;
|
||||
use crate::html::SimpleHtml;
|
||||
use crate::{CachedClient, Connectivity, Error, ErrorKind, OwnedArchive};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
|
@ -189,13 +189,13 @@ impl<'a> FlatIndexClient<'a> {
|
|||
async {
|
||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||
// This ensures that we handle redirects and other URL transformations correctly.
|
||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
||||
let url = DisplaySafeUrl::from(response.url().clone());
|
||||
|
||||
let text = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(&text, &url)
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
|
||||
// Convert to a reference-counted string.
|
||||
|
|
@ -321,63 +321,6 @@ impl<'a> FlatIndexClient<'a> {
|
|||
index: flat_index.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
dists.sort_by(|a, b| {
|
||||
a.filename
|
||||
.cmp(&b.filename)
|
||||
.then_with(|| a.index.cmp(&b.index))
|
||||
});
|
||||
|
||||
Ok(FlatIndexEntries::from_entries(dists))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use fs_err::File;
|
||||
use std::io::Write;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn read_from_directory_sorts_distributions() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
let filenames = [
|
||||
"beta-2.0.0-py3-none-any.whl",
|
||||
"alpha-1.0.0.tar.gz",
|
||||
"alpha-1.0.0-py3-none-any.whl",
|
||||
];
|
||||
|
||||
for name in &filenames {
|
||||
let mut file = File::create(dir.path().join(name)).unwrap();
|
||||
file.write_all(b"").unwrap();
|
||||
}
|
||||
|
||||
let entries = FlatIndexClient::read_from_directory(
|
||||
dir.path(),
|
||||
&IndexUrl::parse(&dir.path().to_string_lossy(), None).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let actual = entries
|
||||
.entries
|
||||
.iter()
|
||||
.map(|entry| entry.filename.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut expected = filenames
|
||||
.iter()
|
||||
.map(|name| DistFilename::try_from_normalized_filename(name).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
expected.sort();
|
||||
|
||||
let expected = expected
|
||||
.into_iter()
|
||||
.map(|filename| filename.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,32 +3,32 @@ use std::str::FromStr;
|
|||
use jiff::Timestamp;
|
||||
use tl::HTMLTag;
|
||||
use tracing::{debug, instrument, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::VersionSpecifiers;
|
||||
use uv_pypi_types::{BaseUrl, CoreMetadata, Hashes, PypiFile, Yanked};
|
||||
use uv_pypi_types::{HashError, LenientVersionSpecifiers};
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
/// A parsed structure from PyPI "HTML" index format for a single package.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct SimpleDetailHTML {
|
||||
pub(crate) struct SimpleHtml {
|
||||
/// The [`BaseUrl`] to which all relative URLs should be resolved.
|
||||
pub(crate) base: BaseUrl,
|
||||
/// The list of [`PypiFile`]s available for download sorted by filename.
|
||||
pub(crate) files: Vec<PypiFile>,
|
||||
}
|
||||
|
||||
impl SimpleDetailHTML {
|
||||
impl SimpleHtml {
|
||||
/// Parse the list of [`PypiFile`]s from the simple HTML page returned by the given URL.
|
||||
#[instrument(skip_all, fields(url = % url))]
|
||||
pub(crate) fn parse(text: &str, url: &DisplaySafeUrl) -> Result<Self, Error> {
|
||||
pub(crate) fn parse(text: &str, url: &Url) -> Result<Self, Error> {
|
||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
||||
|
||||
// Parse the first `<base>` tag, if any, to determine the base URL to which all
|
||||
// relative URLs should be resolved. The HTML spec requires that the `<base>` tag
|
||||
// appear before other tags with attribute values of URLs.
|
||||
let base = BaseUrl::from(
|
||||
let base = BaseUrl::from(DisplaySafeUrl::from(
|
||||
dom.nodes()
|
||||
.iter()
|
||||
.filter_map(|node| node.as_tag())
|
||||
|
|
@ -38,7 +38,7 @@ impl SimpleDetailHTML {
|
|||
.transpose()?
|
||||
.flatten()
|
||||
.unwrap_or_else(|| url.clone()),
|
||||
);
|
||||
));
|
||||
|
||||
// Parse each `<a>` tag, to extract the filename, hash, and URL.
|
||||
let mut files: Vec<PypiFile> = dom
|
||||
|
|
@ -67,19 +67,18 @@ impl SimpleDetailHTML {
|
|||
}
|
||||
|
||||
/// Parse the `href` from a `<base>` tag.
|
||||
fn parse_base(base: &HTMLTag) -> Result<Option<DisplaySafeUrl>, Error> {
|
||||
fn parse_base(base: &HTMLTag) -> Result<Option<Url>, Error> {
|
||||
let Some(Some(href)) = base.attributes().get("href") else {
|
||||
return Ok(None);
|
||||
};
|
||||
let href = std::str::from_utf8(href.as_bytes())?;
|
||||
let url =
|
||||
DisplaySafeUrl::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
||||
let url = Url::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
||||
Ok(Some(url))
|
||||
}
|
||||
|
||||
/// Parse a [`PypiFile`] from an `<a>` tag.
|
||||
///
|
||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute.
|
||||
/// Returns `None` if the `<a>` don't doesn't have an `href` attribute.
|
||||
fn parse_anchor(link: &HTMLTag) -> Result<Option<PypiFile>, Error> {
|
||||
// Extract the href.
|
||||
let Some(href) = link
|
||||
|
|
@ -226,56 +225,6 @@ impl SimpleDetailHTML {
|
|||
}
|
||||
}
|
||||
|
||||
/// A parsed structure from PyPI "HTML" index format listing all available packages.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct SimpleIndexHtml {
|
||||
/// The list of project names available in the index.
|
||||
pub(crate) projects: Vec<PackageName>,
|
||||
}
|
||||
|
||||
impl SimpleIndexHtml {
|
||||
/// Parse the list of project names from the Simple API index HTML page.
|
||||
pub(crate) fn parse(text: &str) -> Result<Self, Error> {
|
||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
||||
|
||||
// Parse each `<a>` tag to extract the project name.
|
||||
let parser = dom.parser();
|
||||
let mut projects = dom
|
||||
.nodes()
|
||||
.iter()
|
||||
.filter_map(|node| node.as_tag())
|
||||
.filter(|link| link.name().as_bytes() == b"a")
|
||||
.filter_map(|link| Self::parse_anchor_project_name(link, parser))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Sort for deterministic ordering.
|
||||
projects.sort_unstable();
|
||||
|
||||
Ok(Self { projects })
|
||||
}
|
||||
|
||||
/// Parse a project name from an `<a>` tag.
|
||||
///
|
||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute or text content.
|
||||
fn parse_anchor_project_name(link: &HTMLTag, parser: &tl::Parser) -> Option<PackageName> {
|
||||
// Extract the href.
|
||||
link.attributes()
|
||||
.get("href")
|
||||
.flatten()
|
||||
.filter(|bytes| !bytes.as_bytes().is_empty())?;
|
||||
|
||||
// Extract the text content, which should be the project name.
|
||||
let inner_text = link.inner_text(parser);
|
||||
let project_name = inner_text.trim();
|
||||
|
||||
if project_name.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
PackageName::from_str(project_name).ok()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
|
|
@ -285,7 +234,7 @@ pub enum Error {
|
|||
FromUtf8(#[from] std::string::FromUtf8Error),
|
||||
|
||||
#[error("Failed to parse URL: {0}")]
|
||||
UrlParse(String, #[source] DisplaySafeUrlError),
|
||||
UrlParse(String, #[source] url::ParseError),
|
||||
|
||||
#[error(transparent)]
|
||||
HtmlParse(#[from] tl::ParseError),
|
||||
|
|
@ -325,10 +274,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -382,10 +331,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -442,10 +391,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -499,10 +448,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -556,10 +505,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -613,10 +562,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -668,10 +617,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -723,10 +672,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
";
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -761,10 +710,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -799,10 +748,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -854,10 +803,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -909,11 +858,11 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base);
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base);
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
Ok(
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -966,11 +915,11 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base);
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base);
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
Ok(
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1023,8 +972,8 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap_err();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, `sha512`, or `blake2b`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
||||
}
|
||||
|
||||
|
|
@ -1040,13 +989,11 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse(
|
||||
"https://storage.googleapis.com/jax-releases/jax_cuda_releases.html",
|
||||
)
|
||||
.unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://storage.googleapis.com/jax-releases/jax_cuda_releases.html")
|
||||
.unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1124,11 +1071,11 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1228,10 +1175,10 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1300,11 +1247,11 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1427,180 +1374,4 @@ mod tests {
|
|||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test parsing Simple API index (root) HTML.
|
||||
#[test]
|
||||
fn parse_simple_index() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Simple Index</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Simple Index</h1>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
<a href="/simple/jinja2/">jinja2</a><br/>
|
||||
<a href="/simple/requests/">requests</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
PackageName(
|
||||
"jinja2",
|
||||
),
|
||||
PackageName(
|
||||
"requests",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that project names are sorted.
|
||||
#[test]
|
||||
fn parse_simple_index_sorted() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="/simple/zebra/">zebra</a><br/>
|
||||
<a href="/simple/apple/">apple</a><br/>
|
||||
<a href="/simple/monkey/">monkey</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"apple",
|
||||
),
|
||||
PackageName(
|
||||
"monkey",
|
||||
),
|
||||
PackageName(
|
||||
"zebra",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that links without `href` attributes are ignored.
|
||||
#[test]
|
||||
fn parse_simple_index_missing_href() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Simple Index</h1>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
<a>no-href-project</a><br/>
|
||||
<a href="/simple/requests/">requests</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
PackageName(
|
||||
"requests",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that links with empty `href` attributes are ignored.
|
||||
#[test]
|
||||
fn parse_simple_index_empty_href() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="">empty-href</a><br/>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that links with empty text content are ignored.
|
||||
#[test]
|
||||
fn parse_simple_index_empty_text() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="/simple/empty/"></a><br/>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
<a href="/simple/whitespace/"> </a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test parsing with case variations and normalization.
|
||||
#[test]
|
||||
fn parse_simple_index_case_variations() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="/simple/Flask/">Flask</a><br/>
|
||||
<a href="/simple/django/">django</a><br/>
|
||||
<a href="/simple/PyYAML/">PyYAML</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
// Note: We preserve the case as returned by the server
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"django",
|
||||
),
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
PackageName(
|
||||
"pyyaml",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
pub use base_client::{
|
||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_MAX_REDIRECTS, DEFAULT_RETRIES,
|
||||
ExtraMiddleware, RedirectClientWithMiddleware, RedirectPolicy, RequestBuilder,
|
||||
RetryParsingError, UvRetryableStrategy, is_transient_network_error,
|
||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
|
||||
RedirectClientWithMiddleware, RequestBuilder, RetryParsingError, UvRetryableStrategy,
|
||||
is_transient_network_error, retries_from_env,
|
||||
};
|
||||
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
||||
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
||||
pub use flat_index::{FlatIndexClient, FlatIndexEntries, FlatIndexEntry, FlatIndexError};
|
||||
pub use linehaul::LineHaul;
|
||||
pub use registry_client::{
|
||||
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleDetailMetadata,
|
||||
SimpleDetailMetadatum, SimpleIndexMetadata, VersionFiles,
|
||||
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleMetadata,
|
||||
SimpleMetadatum, VersionFiles,
|
||||
};
|
||||
pub use rkyvutil::{Deserializer, OwnedArchive, Serializer, Validator};
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ use uv_version::version;
|
|||
pub struct Installer {
|
||||
pub name: Option<String>,
|
||||
pub version: Option<String>,
|
||||
pub subcommand: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
||||
|
|
@ -64,11 +63,7 @@ pub struct LineHaul {
|
|||
impl LineHaul {
|
||||
/// Initializes Linehaul information based on PEP 508 markers.
|
||||
#[instrument(name = "linehaul", skip_all)]
|
||||
pub fn new(
|
||||
markers: Option<&MarkerEnvironment>,
|
||||
platform: Option<&Platform>,
|
||||
subcommand: Option<Vec<String>>,
|
||||
) -> Self {
|
||||
pub fn new(markers: &MarkerEnvironment, platform: Option<&Platform>) -> Self {
|
||||
// https://github.com/pypa/pip/blob/24.0/src/pip/_internal/network/session.py#L87
|
||||
let looks_like_ci = [
|
||||
EnvVars::BUILD_BUILDID,
|
||||
|
|
@ -128,19 +123,18 @@ impl LineHaul {
|
|||
installer: Option::from(Installer {
|
||||
name: Some("uv".to_string()),
|
||||
version: Some(version().to_string()),
|
||||
subcommand,
|
||||
}),
|
||||
python: markers.map(|markers| markers.python_full_version().version.to_string()),
|
||||
python: Some(markers.python_full_version().version.to_string()),
|
||||
implementation: Option::from(Implementation {
|
||||
name: markers.map(|markers| markers.platform_python_implementation().to_string()),
|
||||
version: markers.map(|markers| markers.python_full_version().version.to_string()),
|
||||
name: Some(markers.platform_python_implementation().to_string()),
|
||||
version: Some(markers.python_full_version().version.to_string()),
|
||||
}),
|
||||
distro,
|
||||
system: Option::from(System {
|
||||
name: markers.map(|markers| markers.platform_system().to_string()),
|
||||
release: markers.map(|markers| markers.platform_release().to_string()),
|
||||
name: Some(markers.platform_system().to_string()),
|
||||
release: Some(markers.platform_release().to_string()),
|
||||
}),
|
||||
cpu: markers.map(|markers| markers.platform_machine().to_string()),
|
||||
cpu: Some(markers.platform_machine().to_string()),
|
||||
// Should probably always be None in uv.
|
||||
openssl_version: None,
|
||||
// Should probably always be None in uv.
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ impl Middleware for OfflineMiddleware {
|
|||
) -> reqwest_middleware::Result<Response> {
|
||||
Err(reqwest_middleware::Error::Middleware(
|
||||
OfflineError {
|
||||
url: DisplaySafeUrl::from_url(req.url().clone()),
|
||||
url: DisplaySafeUrl::from(req.url().clone()),
|
||||
}
|
||||
.into(),
|
||||
))
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ use tokio::sync::{Mutex, Semaphore};
|
|||
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_auth::{CredentialsCache, Indexes, PyxTokenStore};
|
||||
use uv_auth::{Indexes, PyxTokenStore};
|
||||
use uv_cache::{Cache, CacheBucket, CacheEntry, WheelCache};
|
||||
use uv_configuration::IndexStrategy;
|
||||
use uv_configuration::KeyringProviderType;
|
||||
|
|
@ -29,9 +29,7 @@ use uv_normalize::PackageName;
|
|||
use uv_pep440::Version;
|
||||
use uv_pep508::MarkerEnvironment;
|
||||
use uv_platform_tags::Platform;
|
||||
use uv_pypi_types::{
|
||||
PypiSimpleDetail, PypiSimpleIndex, PyxSimpleDetail, PyxSimpleIndex, ResolutionMetadata,
|
||||
};
|
||||
use uv_pypi_types::{PypiSimpleDetail, PyxSimpleDetail, ResolutionMetadata};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_small_str::SmallString;
|
||||
use uv_torch::TorchStrategy;
|
||||
|
|
@ -39,7 +37,7 @@ use uv_torch::TorchStrategy;
|
|||
use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy};
|
||||
use crate::cached_client::CacheControl;
|
||||
use crate::flat_index::FlatIndexEntry;
|
||||
use crate::html::SimpleDetailHTML;
|
||||
use crate::html::SimpleHtml;
|
||||
use crate::remote_metadata::wheel_metadata_from_remote_zip;
|
||||
use crate::rkyvutil::OwnedArchive;
|
||||
use crate::{
|
||||
|
|
@ -148,30 +146,8 @@ impl<'a> RegistryClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
/// Add all authenticated sources to the cache.
|
||||
pub fn cache_index_credentials(&mut self) {
|
||||
for index in self.index_locations.known_indexes() {
|
||||
if let Some(credentials) = index.credentials() {
|
||||
trace!(
|
||||
"Read credentials for index {}",
|
||||
index
|
||||
.name
|
||||
.as_ref()
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_else(|| index.url.to_string())
|
||||
);
|
||||
if let Some(root_url) = index.root_url() {
|
||||
self.base_client_builder
|
||||
.store_credentials(&root_url, credentials.clone());
|
||||
}
|
||||
self.base_client_builder
|
||||
.store_credentials(index.raw_url(), credentials);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(mut self) -> RegistryClient {
|
||||
self.cache_index_credentials();
|
||||
pub fn build(self) -> RegistryClient {
|
||||
self.index_locations.cache_index_credentials();
|
||||
let index_urls = self.index_locations.index_urls();
|
||||
|
||||
// Build a base client
|
||||
|
|
@ -202,8 +178,8 @@ impl<'a> RegistryClientBuilder<'a> {
|
|||
}
|
||||
|
||||
/// Share the underlying client between two different middleware configurations.
|
||||
pub fn wrap_existing(mut self, existing: &BaseClient) -> RegistryClient {
|
||||
self.cache_index_credentials();
|
||||
pub fn wrap_existing(self, existing: &BaseClient) -> RegistryClient {
|
||||
self.index_locations.cache_index_credentials();
|
||||
let index_urls = self.index_locations.index_urls();
|
||||
|
||||
// Wrap in any relevant middleware and handle connectivity.
|
||||
|
|
@ -260,7 +236,7 @@ pub struct RegistryClient {
|
|||
#[derive(Debug)]
|
||||
pub enum MetadataFormat {
|
||||
/// The metadata adheres to the Simple Repository API format.
|
||||
Simple(OwnedArchive<SimpleDetailMetadata>),
|
||||
Simple(OwnedArchive<SimpleMetadata>),
|
||||
/// The metadata consists of a list of distributions from a "flat" index.
|
||||
Flat(Vec<FlatIndexEntry>),
|
||||
}
|
||||
|
|
@ -291,10 +267,6 @@ impl RegistryClient {
|
|||
self.timeout
|
||||
}
|
||||
|
||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
||||
self.client.uncached().credentials_cache()
|
||||
}
|
||||
|
||||
/// Return the appropriate index URLs for the given [`PackageName`].
|
||||
fn index_urls_for(
|
||||
&self,
|
||||
|
|
@ -332,7 +304,7 @@ impl RegistryClient {
|
|||
/// and [PEP 691 – JSON-based Simple API for Python Package Indexes](https://peps.python.org/pep-0691/),
|
||||
/// which the PyPI JSON API implements.
|
||||
#[instrument(skip_all, fields(package = % package_name))]
|
||||
pub async fn simple_detail<'index>(
|
||||
pub async fn package_metadata<'index>(
|
||||
&'index self,
|
||||
package_name: &PackageName,
|
||||
index: Option<IndexMetadataRef<'index>>,
|
||||
|
|
@ -363,7 +335,7 @@ impl RegistryClient {
|
|||
let status_code_strategy =
|
||||
self.index_urls.status_code_strategy_for(index.url);
|
||||
match self
|
||||
.simple_detail_single_index(
|
||||
.simple_single_index(
|
||||
package_name,
|
||||
index.url,
|
||||
capabilities,
|
||||
|
|
@ -409,7 +381,7 @@ impl RegistryClient {
|
|||
let status_code_strategy =
|
||||
IndexStatusCodeStrategy::ignore_authentication_error_codes();
|
||||
let metadata = match self
|
||||
.simple_detail_single_index(
|
||||
.simple_single_index(
|
||||
package_name,
|
||||
index.url,
|
||||
capabilities,
|
||||
|
|
@ -443,7 +415,7 @@ impl RegistryClient {
|
|||
if results.is_empty() {
|
||||
return match self.connectivity {
|
||||
Connectivity::Online => {
|
||||
Err(ErrorKind::RemotePackageNotFound(package_name.clone()).into())
|
||||
Err(ErrorKind::PackageNotFound(package_name.to_string()).into())
|
||||
}
|
||||
Connectivity::Offline => Err(ErrorKind::Offline(package_name.to_string()).into()),
|
||||
};
|
||||
|
|
@ -492,11 +464,11 @@ impl RegistryClient {
|
|||
Ok(package_entries)
|
||||
}
|
||||
|
||||
/// Fetch the [`SimpleDetailMetadata`] from a single index for a given package.
|
||||
/// Fetch the [`SimpleMetadata`] from a single index for a given package.
|
||||
///
|
||||
/// The index can either be a PEP 503-compatible remote repository, or a local directory laid
|
||||
/// out in the same format.
|
||||
async fn simple_detail_single_index(
|
||||
async fn simple_single_index(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
index: &IndexUrl,
|
||||
|
|
@ -539,13 +511,13 @@ impl RegistryClient {
|
|||
#[cfg(windows)]
|
||||
let _lock = {
|
||||
let lock_entry = cache_entry.with_file(format!("{package_name}.lock"));
|
||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
||||
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||
};
|
||||
|
||||
let result = if matches!(index, IndexUrl::Path(_)) {
|
||||
self.fetch_local_simple_detail(package_name, &url).await
|
||||
self.fetch_local_index(package_name, &url).await
|
||||
} else {
|
||||
self.fetch_remote_simple_detail(package_name, &url, index, &cache_entry, cache_control)
|
||||
self.fetch_remote_index(package_name, &url, index, &cache_entry, cache_control)
|
||||
.await
|
||||
};
|
||||
|
||||
|
|
@ -574,22 +546,22 @@ impl RegistryClient {
|
|||
ErrorKind::Offline(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
||||
|
||||
// The package could not be found in the local index.
|
||||
ErrorKind::LocalPackageNotFound(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
||||
ErrorKind::FileNotFound(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
||||
|
||||
_ => Err(err),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch the [`SimpleDetailMetadata`] from a remote URL, using the PEP 503 Simple Repository API.
|
||||
async fn fetch_remote_simple_detail(
|
||||
/// Fetch the [`SimpleMetadata`] from a remote URL, using the PEP 503 Simple Repository API.
|
||||
async fn fetch_remote_index(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
url: &DisplaySafeUrl,
|
||||
index: &IndexUrl,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<OwnedArchive<SimpleDetailMetadata>, Error> {
|
||||
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||
// In theory, we should be able to pass `MediaType::all()` to all registries, and as
|
||||
// unsupported media types should be ignored by the server. For now, we implement this
|
||||
// defensively to avoid issues with misconfigured servers.
|
||||
|
|
@ -613,7 +585,7 @@ impl RegistryClient {
|
|||
async {
|
||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||
// This ensures that we handle redirects and other URL transformations correctly.
|
||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
||||
let url = DisplaySafeUrl::from(response.url().clone());
|
||||
|
||||
let content_type = response
|
||||
.headers()
|
||||
|
|
@ -639,7 +611,7 @@ impl RegistryClient {
|
|||
let data: PyxSimpleDetail = rmp_serde::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_msgpack_err(err, url.clone()))?;
|
||||
|
||||
SimpleDetailMetadata::from_pyx_files(
|
||||
SimpleMetadata::from_pyx_files(
|
||||
data.files,
|
||||
data.core_metadata,
|
||||
package_name,
|
||||
|
|
@ -654,7 +626,7 @@ impl RegistryClient {
|
|||
let data: PyxSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
|
||||
SimpleDetailMetadata::from_pyx_files(
|
||||
SimpleMetadata::from_pyx_files(
|
||||
data.files,
|
||||
data.core_metadata,
|
||||
package_name,
|
||||
|
|
@ -670,14 +642,14 @@ impl RegistryClient {
|
|||
let data: PypiSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
|
||||
SimpleDetailMetadata::from_pypi_files(data.files, package_name, &url)
|
||||
SimpleMetadata::from_pypi_files(data.files, package_name, &url)
|
||||
}
|
||||
MediaType::PypiV1Html | MediaType::TextHtml => {
|
||||
let text = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
SimpleDetailMetadata::from_html(&text, package_name, &url)?
|
||||
SimpleMetadata::from_html(&text, package_name, &url)?
|
||||
}
|
||||
};
|
||||
OwnedArchive::from_unarchived(&unarchived)
|
||||
|
|
@ -697,13 +669,13 @@ impl RegistryClient {
|
|||
Ok(simple)
|
||||
}
|
||||
|
||||
/// Fetch the [`SimpleDetailMetadata`] from a local file, using a PEP 503-compatible directory
|
||||
/// Fetch the [`SimpleMetadata`] from a local file, using a PEP 503-compatible directory
|
||||
/// structure.
|
||||
async fn fetch_local_simple_detail(
|
||||
async fn fetch_local_index(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
url: &DisplaySafeUrl,
|
||||
) -> Result<OwnedArchive<SimpleDetailMetadata>, Error> {
|
||||
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||
let path = url
|
||||
.to_file_path()
|
||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
||||
|
|
@ -711,185 +683,15 @@ impl RegistryClient {
|
|||
let text = match fs_err::tokio::read_to_string(&path).await {
|
||||
Ok(text) => text,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Err(Error::from(ErrorKind::LocalPackageNotFound(
|
||||
package_name.clone(),
|
||||
return Err(Error::from(ErrorKind::FileNotFound(
|
||||
package_name.to_string(),
|
||||
)));
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(Error::from(ErrorKind::Io(err)));
|
||||
}
|
||||
};
|
||||
let metadata = SimpleDetailMetadata::from_html(&text, package_name, url)?;
|
||||
OwnedArchive::from_unarchived(&metadata)
|
||||
}
|
||||
|
||||
/// Fetch the list of projects from a Simple API index at a remote URL.
|
||||
///
|
||||
/// This fetches the root of a Simple API index (e.g., `https://pypi.org/simple/`)
|
||||
/// which returns a list of all available projects.
|
||||
pub async fn fetch_simple_index(
|
||||
&self,
|
||||
index_url: &IndexUrl,
|
||||
) -> Result<SimpleIndexMetadata, Error> {
|
||||
// Format the URL for PyPI.
|
||||
let mut url = index_url.url().clone();
|
||||
url.path_segments_mut()
|
||||
.map_err(|()| ErrorKind::CannotBeABase(index_url.url().clone()))?
|
||||
.pop_if_empty()
|
||||
// The URL *must* end in a trailing slash for proper relative path behavior
|
||||
// ref https://github.com/servo/rust-url/issues/333
|
||||
.push("");
|
||||
|
||||
if url.scheme() == "file" {
|
||||
let archived = self.fetch_local_simple_index(&url).await?;
|
||||
Ok(OwnedArchive::deserialize(&archived))
|
||||
} else {
|
||||
let archived = self.fetch_remote_simple_index(&url, index_url).await?;
|
||||
Ok(OwnedArchive::deserialize(&archived))
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetch the list of projects from a remote Simple API index.
|
||||
async fn fetch_remote_simple_index(
|
||||
&self,
|
||||
url: &DisplaySafeUrl,
|
||||
index: &IndexUrl,
|
||||
) -> Result<OwnedArchive<SimpleIndexMetadata>, Error> {
|
||||
// In theory, we should be able to pass `MediaType::all()` to all registries, and as
|
||||
// unsupported media types should be ignored by the server. For now, we implement this
|
||||
// defensively to avoid issues with misconfigured servers.
|
||||
let accept = if self
|
||||
.pyx_token_store
|
||||
.as_ref()
|
||||
.is_some_and(|token_store| token_store.is_known_url(index.url()))
|
||||
{
|
||||
MediaType::all()
|
||||
} else {
|
||||
MediaType::pypi()
|
||||
};
|
||||
|
||||
let cache_entry = self.cache.entry(
|
||||
CacheBucket::Simple,
|
||||
WheelCache::Index(index).root(),
|
||||
"index.html.rkyv",
|
||||
);
|
||||
let cache_control = match self.connectivity {
|
||||
Connectivity::Online => {
|
||||
if let Some(header) = self.index_urls.simple_api_cache_control_for(index) {
|
||||
CacheControl::Override(header)
|
||||
} else {
|
||||
CacheControl::from(
|
||||
self.cache
|
||||
.freshness(&cache_entry, None, None)
|
||||
.map_err(ErrorKind::Io)?,
|
||||
)
|
||||
}
|
||||
}
|
||||
Connectivity::Offline => CacheControl::AllowStale,
|
||||
};
|
||||
|
||||
let parse_simple_response = |response: Response| {
|
||||
async {
|
||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||
// This ensures that we handle redirects and other URL transformations correctly.
|
||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
||||
|
||||
let content_type = response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.ok_or_else(|| Error::from(ErrorKind::MissingContentType(url.clone())))?;
|
||||
let content_type = content_type.to_str().map_err(|err| {
|
||||
Error::from(ErrorKind::InvalidContentTypeHeader(url.clone(), err))
|
||||
})?;
|
||||
let media_type = content_type.split(';').next().unwrap_or(content_type);
|
||||
let media_type = MediaType::from_str(media_type).ok_or_else(|| {
|
||||
Error::from(ErrorKind::UnsupportedMediaType(
|
||||
url.clone(),
|
||||
media_type.to_string(),
|
||||
))
|
||||
})?;
|
||||
|
||||
let metadata = match media_type {
|
||||
MediaType::PyxV1Msgpack => {
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
let data: PyxSimpleIndex = rmp_serde::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_msgpack_err(err, url.clone()))?;
|
||||
SimpleIndexMetadata::from_pyx_index(data)
|
||||
}
|
||||
MediaType::PyxV1Json => {
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
let data: PyxSimpleIndex = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
SimpleIndexMetadata::from_pyx_index(data)
|
||||
}
|
||||
MediaType::PypiV1Json => {
|
||||
let bytes = response
|
||||
.bytes()
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
let data: PypiSimpleIndex = serde_json::from_slice(bytes.as_ref())
|
||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||
SimpleIndexMetadata::from_pypi_index(data)
|
||||
}
|
||||
MediaType::PypiV1Html | MediaType::TextHtml => {
|
||||
let text = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
SimpleIndexMetadata::from_html(&text, &url)?
|
||||
}
|
||||
};
|
||||
|
||||
OwnedArchive::from_unarchived(&metadata)
|
||||
}
|
||||
};
|
||||
|
||||
let simple_request = self
|
||||
.uncached_client(url)
|
||||
.get(Url::from(url.clone()))
|
||||
.header("Accept-Encoding", "gzip, deflate, zstd")
|
||||
.header("Accept", accept)
|
||||
.build()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
|
||||
let index = self
|
||||
.cached_client()
|
||||
.get_cacheable_with_retry(
|
||||
simple_request,
|
||||
&cache_entry,
|
||||
cache_control,
|
||||
parse_simple_response,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
/// Fetch the list of projects from a local Simple API index.
|
||||
async fn fetch_local_simple_index(
|
||||
&self,
|
||||
url: &DisplaySafeUrl,
|
||||
) -> Result<OwnedArchive<SimpleIndexMetadata>, Error> {
|
||||
let path = url
|
||||
.to_file_path()
|
||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
||||
.join("index.html");
|
||||
let text = match fs_err::tokio::read_to_string(&path).await {
|
||||
Ok(text) => text,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Err(Error::from(ErrorKind::LocalIndexNotFound(path)));
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(Error::from(ErrorKind::Io(err)));
|
||||
}
|
||||
};
|
||||
let metadata = SimpleIndexMetadata::from_html(&text, url)?;
|
||||
let metadata = SimpleMetadata::from_html(&text, package_name, url)?;
|
||||
OwnedArchive::from_unarchived(&metadata)
|
||||
}
|
||||
|
||||
|
|
@ -1031,7 +833,7 @@ impl RegistryClient {
|
|||
#[cfg(windows)]
|
||||
let _lock = {
|
||||
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
||||
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||
};
|
||||
|
||||
let response_callback = async |response: Response| {
|
||||
|
|
@ -1115,7 +917,7 @@ impl RegistryClient {
|
|||
#[cfg(windows)]
|
||||
let _lock = {
|
||||
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
||||
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||
};
|
||||
|
||||
// Attempt to fetch via a range request.
|
||||
|
|
@ -1246,7 +1048,7 @@ impl RegistryClient {
|
|||
#[derive(Debug)]
|
||||
pub(crate) enum SimpleMetadataSearchOutcome {
|
||||
/// Simple metadata was found
|
||||
Found(OwnedArchive<SimpleDetailMetadata>),
|
||||
Found(OwnedArchive<SimpleMetadata>),
|
||||
/// Simple metadata was not found
|
||||
NotFound,
|
||||
/// A status code failure was encountered when searching for
|
||||
|
|
@ -1327,62 +1129,20 @@ pub struct VersionSourceDist {
|
|||
pub file: File,
|
||||
}
|
||||
|
||||
/// The list of projects available in a Simple API index.
|
||||
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct SimpleIndexMetadata {
|
||||
/// The list of project names available in the index.
|
||||
projects: Vec<PackageName>,
|
||||
}
|
||||
|
||||
impl SimpleIndexMetadata {
|
||||
/// Iterate over the projects in the index.
|
||||
pub fn iter(&self) -> impl Iterator<Item = &PackageName> {
|
||||
self.projects.iter()
|
||||
}
|
||||
|
||||
/// Create a [`SimpleIndexMetadata`] from a [`PypiSimpleIndex`].
|
||||
fn from_pypi_index(index: PypiSimpleIndex) -> Self {
|
||||
Self {
|
||||
projects: index.projects.into_iter().map(|entry| entry.name).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a [`SimpleIndexMetadata`] from a [`PyxSimpleIndex`].
|
||||
fn from_pyx_index(index: PyxSimpleIndex) -> Self {
|
||||
Self {
|
||||
projects: index.projects.into_iter().map(|entry| entry.name).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a [`SimpleIndexMetadata`] from HTML content.
|
||||
fn from_html(text: &str, url: &DisplaySafeUrl) -> Result<Self, Error> {
|
||||
let html = crate::html::SimpleIndexHtml::parse(text).map_err(|err| {
|
||||
Error::from(ErrorKind::BadHtml {
|
||||
source: err,
|
||||
url: url.clone(),
|
||||
})
|
||||
})?;
|
||||
Ok(Self {
|
||||
projects: html.projects,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct SimpleDetailMetadata(Vec<SimpleDetailMetadatum>);
|
||||
pub struct SimpleMetadata(Vec<SimpleMetadatum>);
|
||||
|
||||
#[derive(Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct SimpleDetailMetadatum {
|
||||
pub struct SimpleMetadatum {
|
||||
pub version: Version,
|
||||
pub files: VersionFiles,
|
||||
pub metadata: Option<ResolutionMetadata>,
|
||||
}
|
||||
|
||||
impl SimpleDetailMetadata {
|
||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &SimpleDetailMetadatum> {
|
||||
impl SimpleMetadata {
|
||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &SimpleMetadatum> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
|
|
@ -1426,7 +1186,7 @@ impl SimpleDetailMetadata {
|
|||
Self(
|
||||
version_map
|
||||
.into_iter()
|
||||
.map(|(version, files)| SimpleDetailMetadatum {
|
||||
.map(|(version, files)| SimpleMetadatum {
|
||||
version,
|
||||
files,
|
||||
metadata: None,
|
||||
|
|
@ -1488,7 +1248,7 @@ impl SimpleDetailMetadata {
|
|||
provides_extra: metadata.provides_extra,
|
||||
dynamic: false,
|
||||
});
|
||||
SimpleDetailMetadatum {
|
||||
SimpleMetadatum {
|
||||
version,
|
||||
files,
|
||||
metadata,
|
||||
|
|
@ -1498,34 +1258,34 @@ impl SimpleDetailMetadata {
|
|||
)
|
||||
}
|
||||
|
||||
/// Read the [`SimpleDetailMetadata`] from an HTML index.
|
||||
/// Read the [`SimpleMetadata`] from an HTML index.
|
||||
fn from_html(
|
||||
text: &str,
|
||||
package_name: &PackageName,
|
||||
url: &DisplaySafeUrl,
|
||||
) -> Result<Self, Error> {
|
||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(text, url)
|
||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
let SimpleHtml { base, files } =
|
||||
SimpleHtml::parse(text, url).map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
|
||||
Ok(Self::from_pypi_files(files, package_name, base.as_url()))
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for SimpleDetailMetadata {
|
||||
type Item = SimpleDetailMetadatum;
|
||||
type IntoIter = std::vec::IntoIter<SimpleDetailMetadatum>;
|
||||
impl IntoIterator for SimpleMetadata {
|
||||
type Item = SimpleMetadatum;
|
||||
type IntoIter = std::vec::IntoIter<SimpleMetadatum>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl ArchivedSimpleDetailMetadata {
|
||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &rkyv::Archived<SimpleDetailMetadatum>> {
|
||||
impl ArchivedSimpleMetadata {
|
||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &rkyv::Archived<SimpleMetadatum>> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
pub fn datum(&self, i: usize) -> Option<&rkyv::Archived<SimpleDetailMetadatum>> {
|
||||
pub fn datum(&self, i: usize) -> Option<&rkyv::Archived<SimpleMetadatum>> {
|
||||
self.0.get(i)
|
||||
}
|
||||
}
|
||||
|
|
@ -1608,9 +1368,7 @@ mod tests {
|
|||
use uv_pypi_types::PypiSimpleDetail;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::{
|
||||
BaseClientBuilder, SimpleDetailMetadata, SimpleDetailMetadatum, html::SimpleDetailHTML,
|
||||
};
|
||||
use crate::{BaseClientBuilder, SimpleMetadata, SimpleMetadatum, html::SimpleHtml};
|
||||
|
||||
use crate::RegistryClientBuilder;
|
||||
use uv_cache::Cache;
|
||||
|
|
@ -1828,14 +1586,14 @@ mod tests {
|
|||
"#;
|
||||
let data: PypiSimpleDetail = serde_json::from_str(response).unwrap();
|
||||
let base = DisplaySafeUrl::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
||||
let simple_metadata = SimpleDetailMetadata::from_pypi_files(
|
||||
let simple_metadata = SimpleMetadata::from_pypi_files(
|
||||
data.files,
|
||||
&PackageName::from_str("pyflyby").unwrap(),
|
||||
&base,
|
||||
);
|
||||
let versions: Vec<String> = simple_metadata
|
||||
.iter()
|
||||
.map(|SimpleDetailMetadatum { version, .. }| version.to_string())
|
||||
.map(|SimpleMetadatum { version, .. }| version.to_string())
|
||||
.collect();
|
||||
assert_eq!(versions, ["1.7.8".to_string()]);
|
||||
}
|
||||
|
|
@ -1866,7 +1624,7 @@ mod tests {
|
|||
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||
.unwrap();
|
||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||
let base = SmallString::from(base.as_str());
|
||||
|
||||
// Test parsing of the file urls
|
||||
|
|
|
|||
|
|
@ -1,382 +0,0 @@
|
|||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use futures::future;
|
||||
use http_body_util::combinators::BoxBody;
|
||||
use http_body_util::{BodyExt, Full};
|
||||
use hyper::body::{Bytes, Incoming};
|
||||
use hyper::header::USER_AGENT;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::{Request, Response};
|
||||
use hyper_util::rt::{TokioExecutor, TokioIo};
|
||||
use hyper_util::server::conn::auto::Builder;
|
||||
use rcgen::{
|
||||
BasicConstraints, Certificate, CertificateParams, DnType, ExtendedKeyUsagePurpose, IsCa,
|
||||
Issuer, KeyPair, KeyUsagePurpose, SanType, date_time_ymd,
|
||||
};
|
||||
use rustls::pki_types::{CertificateDer, PrivateKeyDer};
|
||||
use rustls::server::WebPkiClientVerifier;
|
||||
use rustls::{RootCertStore, ServerConfig};
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::task::JoinHandle;
|
||||
use tokio_rustls::TlsAcceptor;
|
||||
|
||||
use uv_fs::Simplified;
|
||||
|
||||
/// An issued certificate, together with the subject keypair.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct SelfSigned {
|
||||
/// An issued certificate.
|
||||
pub public: Certificate,
|
||||
/// The certificate's subject signing key.
|
||||
pub private: KeyPair,
|
||||
}
|
||||
|
||||
/// Defines the base location for temporary generated certs.
|
||||
///
|
||||
/// See [`TestContext::test_bucket_dir`] for implementation rationale.
|
||||
pub(crate) fn test_cert_dir() -> PathBuf {
|
||||
std::env::temp_dir()
|
||||
.simple_canonicalize()
|
||||
.expect("failed to canonicalize temp dir")
|
||||
.join("uv")
|
||||
.join("tests")
|
||||
.join("certs")
|
||||
}
|
||||
|
||||
/// Generates a self-signed server certificate for `uv-test-server`, `localhost` and `127.0.0.1`.
|
||||
/// This certificate is standalone and not issued by a self-signed Root CA.
|
||||
///
|
||||
/// Use sparingly as generation of certs is a slow operation.
|
||||
pub(crate) fn generate_self_signed_certs() -> Result<SelfSigned> {
|
||||
let mut params = CertificateParams::default();
|
||||
params.is_ca = IsCa::NoCa;
|
||||
params.not_before = date_time_ymd(1975, 1, 1);
|
||||
params.not_after = date_time_ymd(4096, 1, 1);
|
||||
params.key_usages.push(KeyUsagePurpose::DigitalSignature);
|
||||
params.key_usages.push(KeyUsagePurpose::KeyEncipherment);
|
||||
params
|
||||
.extended_key_usages
|
||||
.push(ExtendedKeyUsagePurpose::ServerAuth);
|
||||
params
|
||||
.distinguished_name
|
||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
||||
params
|
||||
.distinguished_name
|
||||
.push(DnType::CommonName, "uv-test-server");
|
||||
params
|
||||
.subject_alt_names
|
||||
.push(SanType::DnsName("uv-test-server".try_into()?));
|
||||
params
|
||||
.subject_alt_names
|
||||
.push(SanType::DnsName("localhost".try_into()?));
|
||||
params
|
||||
.subject_alt_names
|
||||
.push(SanType::IpAddress("127.0.0.1".parse()?));
|
||||
let private = KeyPair::generate()?;
|
||||
let public = params.self_signed(&private)?;
|
||||
|
||||
Ok(SelfSigned { public, private })
|
||||
}
|
||||
|
||||
/// Generates a self-signed root CA, server certificate, and client certificate.
|
||||
/// There are no intermediate certs generated as part of this function.
|
||||
/// The server certificate is for `uv-test-server`, `localhost` and `127.0.0.1` issued by this CA.
|
||||
/// The client certificate is for `uv-test-client` issued by this CA.
|
||||
///
|
||||
/// Use sparingly as generation of these certs is a very slow operation.
|
||||
pub(crate) fn generate_self_signed_certs_with_ca() -> Result<(SelfSigned, SelfSigned, SelfSigned)> {
|
||||
// Generate the CA
|
||||
let mut ca_params = CertificateParams::default();
|
||||
ca_params.is_ca = IsCa::Ca(BasicConstraints::Unconstrained); // root cert
|
||||
ca_params.not_before = date_time_ymd(1975, 1, 1);
|
||||
ca_params.not_after = date_time_ymd(4096, 1, 1);
|
||||
ca_params.key_usages.push(KeyUsagePurpose::DigitalSignature);
|
||||
ca_params.key_usages.push(KeyUsagePurpose::KeyCertSign);
|
||||
ca_params.key_usages.push(KeyUsagePurpose::CrlSign);
|
||||
ca_params
|
||||
.distinguished_name
|
||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
||||
ca_params
|
||||
.distinguished_name
|
||||
.push(DnType::CommonName, "uv-test-ca");
|
||||
ca_params
|
||||
.subject_alt_names
|
||||
.push(SanType::DnsName("uv-test-ca".try_into()?));
|
||||
let ca_private_key = KeyPair::generate()?;
|
||||
let ca_public_cert = ca_params.self_signed(&ca_private_key)?;
|
||||
let ca_cert_issuer = Issuer::new(ca_params, &ca_private_key);
|
||||
|
||||
// Generate server cert issued by this CA
|
||||
let mut server_params = CertificateParams::default();
|
||||
server_params.is_ca = IsCa::NoCa;
|
||||
server_params.not_before = date_time_ymd(1975, 1, 1);
|
||||
server_params.not_after = date_time_ymd(4096, 1, 1);
|
||||
server_params.use_authority_key_identifier_extension = true;
|
||||
server_params
|
||||
.key_usages
|
||||
.push(KeyUsagePurpose::DigitalSignature);
|
||||
server_params
|
||||
.key_usages
|
||||
.push(KeyUsagePurpose::KeyEncipherment);
|
||||
server_params
|
||||
.extended_key_usages
|
||||
.push(ExtendedKeyUsagePurpose::ServerAuth);
|
||||
server_params
|
||||
.distinguished_name
|
||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
||||
server_params
|
||||
.distinguished_name
|
||||
.push(DnType::CommonName, "uv-test-server");
|
||||
server_params
|
||||
.subject_alt_names
|
||||
.push(SanType::DnsName("uv-test-server".try_into()?));
|
||||
server_params
|
||||
.subject_alt_names
|
||||
.push(SanType::DnsName("localhost".try_into()?));
|
||||
server_params
|
||||
.subject_alt_names
|
||||
.push(SanType::IpAddress("127.0.0.1".parse()?));
|
||||
let server_private_key = KeyPair::generate()?;
|
||||
let server_public_cert = server_params.signed_by(&server_private_key, &ca_cert_issuer)?;
|
||||
|
||||
// Generate client cert issued by this CA
|
||||
let mut client_params = CertificateParams::default();
|
||||
client_params.is_ca = IsCa::NoCa;
|
||||
client_params.not_before = date_time_ymd(1975, 1, 1);
|
||||
client_params.not_after = date_time_ymd(4096, 1, 1);
|
||||
client_params.use_authority_key_identifier_extension = true;
|
||||
client_params
|
||||
.key_usages
|
||||
.push(KeyUsagePurpose::DigitalSignature);
|
||||
client_params
|
||||
.extended_key_usages
|
||||
.push(ExtendedKeyUsagePurpose::ClientAuth);
|
||||
client_params
|
||||
.distinguished_name
|
||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
||||
client_params
|
||||
.distinguished_name
|
||||
.push(DnType::CommonName, "uv-test-client");
|
||||
client_params
|
||||
.subject_alt_names
|
||||
.push(SanType::DnsName("uv-test-client".try_into()?));
|
||||
let client_private_key = KeyPair::generate()?;
|
||||
let client_public_cert = client_params.signed_by(&client_private_key, &ca_cert_issuer)?;
|
||||
|
||||
let ca_self_signed = SelfSigned {
|
||||
public: ca_public_cert,
|
||||
private: ca_private_key,
|
||||
};
|
||||
let server_self_signed = SelfSigned {
|
||||
public: server_public_cert,
|
||||
private: server_private_key,
|
||||
};
|
||||
let client_self_signed = SelfSigned {
|
||||
public: client_public_cert,
|
||||
private: client_private_key,
|
||||
};
|
||||
|
||||
Ok((ca_self_signed, server_self_signed, client_self_signed))
|
||||
}
|
||||
|
||||
// Plain is fine for now; Arc/Box could be used later if we need to support move.
|
||||
type ServerSvcFn =
|
||||
fn(
|
||||
Request<Incoming>,
|
||||
) -> future::Ready<Result<Response<BoxBody<Bytes, hyper::Error>>, hyper::Error>>;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct TestServerBuilder<'a> {
|
||||
// Custom server response function
|
||||
svc_fn: Option<ServerSvcFn>,
|
||||
// CA certificate
|
||||
ca_cert: Option<&'a SelfSigned>,
|
||||
// Server certificate
|
||||
server_cert: Option<&'a SelfSigned>,
|
||||
// Enable mTLS Verification
|
||||
mutual_tls: bool,
|
||||
}
|
||||
|
||||
impl<'a> TestServerBuilder<'a> {
|
||||
pub(crate) fn new() -> Self {
|
||||
Self {
|
||||
svc_fn: None,
|
||||
server_cert: None,
|
||||
ca_cert: None,
|
||||
mutual_tls: false,
|
||||
}
|
||||
}
|
||||
|
||||
#[expect(unused)]
|
||||
/// Provide a custom server response function.
|
||||
pub(crate) fn with_svc_fn(mut self, svc_fn: ServerSvcFn) -> Self {
|
||||
self.svc_fn = Some(svc_fn);
|
||||
self
|
||||
}
|
||||
|
||||
/// Provide the server certificate. This will enable TLS (HTTPS).
|
||||
pub(crate) fn with_server_cert(mut self, server_cert: &'a SelfSigned) -> Self {
|
||||
self.server_cert = Some(server_cert);
|
||||
self
|
||||
}
|
||||
|
||||
/// CA certificate used to build the `RootCertStore` for client verification.
|
||||
/// Requires `with_server_cert`.
|
||||
pub(crate) fn with_ca_cert(mut self, ca_cert: &'a SelfSigned) -> Self {
|
||||
self.ca_cert = Some(ca_cert);
|
||||
self
|
||||
}
|
||||
|
||||
/// Enforce mutual TLS (client cert auth).
|
||||
/// Requires `with_server_cert` and `with_ca_cert`.
|
||||
pub(crate) fn with_mutual_tls(mut self, mutual: bool) -> Self {
|
||||
self.mutual_tls = mutual;
|
||||
self
|
||||
}
|
||||
|
||||
/// Starts the HTTP(S) server with optional mTLS enforcement.
|
||||
pub(crate) async fn start(self) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
||||
// Validate builder input combinations
|
||||
if self.ca_cert.is_some() && self.server_cert.is_none() {
|
||||
anyhow::bail!("server certificate is required when CA certificate is provided");
|
||||
}
|
||||
if self.mutual_tls && (self.ca_cert.is_none() || self.server_cert.is_none()) {
|
||||
anyhow::bail!("ca certificate is required for mTLS");
|
||||
}
|
||||
|
||||
// Set up the TCP listener on a random available port
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||
let addr = listener.local_addr()?;
|
||||
|
||||
// Setup TLS Config (if any)
|
||||
let tls_acceptor = if let Some(server_cert) = self.server_cert {
|
||||
// Prepare Server Cert and KeyPair
|
||||
let server_key = PrivateKeyDer::try_from(server_cert.private.serialize_der()).unwrap();
|
||||
let server_cert = vec![CertificateDer::from(server_cert.public.der().to_vec())];
|
||||
|
||||
// Setup CA Verifier
|
||||
let client_verifier = if let Some(ca_cert) = self.ca_cert {
|
||||
let mut root_store = RootCertStore::empty();
|
||||
root_store
|
||||
.add(CertificateDer::from(ca_cert.public.der().to_vec()))
|
||||
.expect("failed to add CA cert");
|
||||
if self.mutual_tls {
|
||||
// Setup mTLS CA config
|
||||
WebPkiClientVerifier::builder(root_store.into())
|
||||
.build()
|
||||
.expect("failed to setup client verifier")
|
||||
} else {
|
||||
// Only load the CA roots
|
||||
WebPkiClientVerifier::builder(root_store.into())
|
||||
.allow_unauthenticated()
|
||||
.build()
|
||||
.expect("failed to setup client verifier")
|
||||
}
|
||||
} else {
|
||||
WebPkiClientVerifier::no_client_auth()
|
||||
};
|
||||
|
||||
let mut tls_config = ServerConfig::builder()
|
||||
.with_client_cert_verifier(client_verifier)
|
||||
.with_single_cert(server_cert, server_key)?;
|
||||
tls_config.alpn_protocols = vec![b"http/1.1".to_vec(), b"http/1.0".to_vec()];
|
||||
|
||||
Some(TlsAcceptor::from(Arc::new(tls_config)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Setup Response Handler
|
||||
let svc_fn = if let Some(custom_svc_fn) = self.svc_fn {
|
||||
custom_svc_fn
|
||||
} else {
|
||||
|req: Request<Incoming>| {
|
||||
// Get User Agent Header and send it back in the response
|
||||
let user_agent = req
|
||||
.headers()
|
||||
.get(USER_AGENT)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_default(); // Empty Default
|
||||
let response_content = Full::new(Bytes::from(user_agent))
|
||||
.map_err(|_| unreachable!())
|
||||
.boxed();
|
||||
// If we ever want a true echo server, we can use instead
|
||||
// let response_content = req.into_body().boxed();
|
||||
// although uv-client doesn't expose post currently.
|
||||
future::ok::<_, hyper::Error>(Response::new(response_content))
|
||||
}
|
||||
};
|
||||
|
||||
// Spawn the server loop in a background task
|
||||
let server_task = tokio::spawn(async move {
|
||||
let svc = service_fn(move |req: Request<Incoming>| svc_fn(req));
|
||||
|
||||
let (tcp_stream, _remote_addr) = listener
|
||||
.accept()
|
||||
.await
|
||||
.context("Failed to accept TCP connection")?;
|
||||
|
||||
// Start Server (not wrapped in loop {} since we want a single response server)
|
||||
// If we want server to accept multiple connections, we can wrap it in loop {}
|
||||
// but we'll need to ensure to handle termination signals in the tests otherwise
|
||||
// it may never stop.
|
||||
if let Some(tls_acceptor) = tls_acceptor {
|
||||
let tls_stream = tls_acceptor
|
||||
.accept(tcp_stream)
|
||||
.await
|
||||
.context("Failed to accept TLS connection")?;
|
||||
let socket = TokioIo::new(tls_stream);
|
||||
tokio::task::spawn(async move {
|
||||
Builder::new(TokioExecutor::new())
|
||||
.serve_connection(socket, svc)
|
||||
.await
|
||||
.expect("HTTPS Server Started");
|
||||
});
|
||||
} else {
|
||||
let socket = TokioIo::new(tcp_stream);
|
||||
tokio::task::spawn(async move {
|
||||
Builder::new(TokioExecutor::new())
|
||||
.serve_connection(socket, svc)
|
||||
.await
|
||||
.expect("HTTP Server Started");
|
||||
});
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
Ok((server_task, addr))
|
||||
}
|
||||
}
|
||||
|
||||
/// Single Request HTTP server that echoes the User Agent Header.
|
||||
pub(crate) async fn start_http_user_agent_server() -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
||||
TestServerBuilder::new().start().await
|
||||
}
|
||||
|
||||
/// Single Request HTTPS server that echoes the User Agent Header.
|
||||
pub(crate) async fn start_https_user_agent_server(
|
||||
server_cert: &SelfSigned,
|
||||
) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
||||
TestServerBuilder::new()
|
||||
.with_server_cert(server_cert)
|
||||
.start()
|
||||
.await
|
||||
}
|
||||
|
||||
/// Single Request HTTPS mTLS server that echoes the User Agent Header.
|
||||
pub(crate) async fn start_https_mtls_user_agent_server(
|
||||
ca_cert: &SelfSigned,
|
||||
server_cert: &SelfSigned,
|
||||
) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
||||
TestServerBuilder::new()
|
||||
.with_ca_cert(ca_cert)
|
||||
.with_server_cert(server_cert)
|
||||
.with_mutual_tls(true)
|
||||
.start()
|
||||
.await
|
||||
}
|
||||
|
|
@ -1,4 +1,2 @@
|
|||
mod http_util;
|
||||
mod remote_metadata;
|
||||
mod ssl_certs;
|
||||
mod user_agent_version;
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ use uv_redacted::DisplaySafeUrl;
|
|||
|
||||
#[tokio::test]
|
||||
async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let cache = Cache::temp()?.init()?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
|
||||
// The first run is without cache (the tempdir is empty), the second has the cache from the
|
||||
|
|
@ -21,11 +21,11 @@ async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
|||
let filename = WheelFilename::from_str(url.rsplit_once('/').unwrap().1)?;
|
||||
let dist = BuiltDist::DirectUrl(DirectUrlBuiltDist {
|
||||
filename,
|
||||
location: Box::new(DisplaySafeUrl::parse(url)?),
|
||||
url: VerbatimUrl::from_str(url)?,
|
||||
location: Box::new(DisplaySafeUrl::parse(url).unwrap()),
|
||||
url: VerbatimUrl::from_str(url).unwrap(),
|
||||
});
|
||||
let capabilities = IndexCapabilities::default();
|
||||
let metadata = client.wheel_metadata(&dist, &capabilities).await?;
|
||||
let metadata = client.wheel_metadata(&dist, &capabilities).await.unwrap();
|
||||
assert_eq!(metadata.version.to_string(), "4.66.1");
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,333 +0,0 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use rustls::AlertDescription;
|
||||
use url::Url;
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_client::BaseClientBuilder;
|
||||
use uv_client::RegistryClientBuilder;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::http_util::{
|
||||
generate_self_signed_certs, generate_self_signed_certs_with_ca,
|
||||
start_https_mtls_user_agent_server, start_https_user_agent_server, test_cert_dir,
|
||||
};
|
||||
|
||||
// SAFETY: This test is meant to run with single thread configuration
|
||||
#[tokio::test]
|
||||
#[allow(unsafe_code)]
|
||||
async fn ssl_env_vars() -> Result<()> {
|
||||
// Ensure our environment is not polluted with anything that may affect `rustls-native-certs`
|
||||
unsafe {
|
||||
std::env::remove_var(EnvVars::UV_NATIVE_TLS);
|
||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
||||
std::env::remove_var(EnvVars::SSL_CERT_DIR);
|
||||
std::env::remove_var(EnvVars::SSL_CLIENT_CERT);
|
||||
}
|
||||
|
||||
// Create temporary cert dirs
|
||||
let cert_dir = test_cert_dir();
|
||||
fs_err::create_dir_all(&cert_dir).expect("Failed to create test cert bucket");
|
||||
let cert_dir =
|
||||
tempfile::TempDir::new_in(cert_dir).expect("Failed to create test cert directory");
|
||||
let does_not_exist_cert_dir = cert_dir.path().join("does_not_exist");
|
||||
|
||||
// Generate self-signed standalone cert
|
||||
let standalone_server_cert = generate_self_signed_certs()?;
|
||||
let standalone_public_pem_path = cert_dir.path().join("standalone_public.pem");
|
||||
let standalone_private_pem_path = cert_dir.path().join("standalone_private.pem");
|
||||
|
||||
// Generate self-signed CA, server, and client certs
|
||||
let (ca_cert, server_cert, client_cert) = generate_self_signed_certs_with_ca()?;
|
||||
let ca_public_pem_path = cert_dir.path().join("ca_public.pem");
|
||||
let ca_private_pem_path = cert_dir.path().join("ca_private.pem");
|
||||
let server_public_pem_path = cert_dir.path().join("server_public.pem");
|
||||
let server_private_pem_path = cert_dir.path().join("server_private.pem");
|
||||
let client_combined_pem_path = cert_dir.path().join("client_combined.pem");
|
||||
|
||||
// Persist the certs in PKCS8 format as the env vars expect a path on disk
|
||||
fs_err::write(
|
||||
standalone_public_pem_path.as_path(),
|
||||
standalone_server_cert.public.pem(),
|
||||
)?;
|
||||
fs_err::write(
|
||||
standalone_private_pem_path.as_path(),
|
||||
standalone_server_cert.private.serialize_pem(),
|
||||
)?;
|
||||
fs_err::write(ca_public_pem_path.as_path(), ca_cert.public.pem())?;
|
||||
fs_err::write(
|
||||
ca_private_pem_path.as_path(),
|
||||
ca_cert.private.serialize_pem(),
|
||||
)?;
|
||||
fs_err::write(server_public_pem_path.as_path(), server_cert.public.pem())?;
|
||||
fs_err::write(
|
||||
server_private_pem_path.as_path(),
|
||||
server_cert.private.serialize_pem(),
|
||||
)?;
|
||||
fs_err::write(
|
||||
client_combined_pem_path.as_path(),
|
||||
// SSL_CLIENT_CERT expects a "combined" cert with the public and private key.
|
||||
format!(
|
||||
"{}\n{}",
|
||||
client_cert.public.pem(),
|
||||
client_cert.private.serialize_pem()
|
||||
),
|
||||
)?;
|
||||
|
||||
// ** Set SSL_CERT_FILE to non-existent location
|
||||
// ** Then verify our request fails to establish a connection
|
||||
|
||||
unsafe {
|
||||
std::env::set_var(EnvVars::SSL_CERT_FILE, does_not_exist_cert_dir.as_os_str());
|
||||
}
|
||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
let res = client
|
||||
.cached_client()
|
||||
.uncached()
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await;
|
||||
unsafe {
|
||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
||||
}
|
||||
|
||||
// Validate the client error
|
||||
let Some(reqwest_middleware::Error::Middleware(middleware_error)) = res.err() else {
|
||||
panic!("expected middleware error");
|
||||
};
|
||||
let reqwest_error = middleware_error
|
||||
.chain()
|
||||
.find_map(|err| {
|
||||
err.downcast_ref::<reqwest_middleware::Error>().map(|err| {
|
||||
if let reqwest_middleware::Error::Reqwest(inner) = err {
|
||||
inner
|
||||
} else {
|
||||
panic!("expected reqwest error")
|
||||
}
|
||||
})
|
||||
})
|
||||
.expect("expected reqwest error");
|
||||
assert!(reqwest_error.is_connect());
|
||||
|
||||
// Validate the server error
|
||||
let server_res = server_task.await?;
|
||||
let expected_err = if let Err(anyhow_err) = server_res
|
||||
&& let Some(io_err) = anyhow_err.downcast_ref::<std::io::Error>()
|
||||
&& let Some(wrapped_err) = io_err.get_ref()
|
||||
&& let Some(tls_err) = wrapped_err.downcast_ref::<rustls::Error>()
|
||||
&& matches!(
|
||||
tls_err,
|
||||
rustls::Error::AlertReceived(AlertDescription::UnknownCA)
|
||||
) {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
assert!(expected_err);
|
||||
|
||||
// ** Set SSL_CERT_FILE to our public certificate
|
||||
// ** Then verify our request successfully establishes a connection
|
||||
|
||||
unsafe {
|
||||
std::env::set_var(
|
||||
EnvVars::SSL_CERT_FILE,
|
||||
standalone_public_pem_path.as_os_str(),
|
||||
);
|
||||
}
|
||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
let res = client
|
||||
.cached_client()
|
||||
.uncached()
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await;
|
||||
assert!(res.is_ok());
|
||||
let _ = server_task.await?; // wait for server shutdown
|
||||
unsafe {
|
||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
||||
}
|
||||
|
||||
// ** Set SSL_CERT_DIR to our cert dir as well as some other dir that does not exist
|
||||
// ** Then verify our request still successfully establishes a connection
|
||||
|
||||
unsafe {
|
||||
std::env::set_var(
|
||||
EnvVars::SSL_CERT_DIR,
|
||||
std::env::join_paths(vec![
|
||||
cert_dir.path().as_os_str(),
|
||||
does_not_exist_cert_dir.as_os_str(),
|
||||
])?,
|
||||
);
|
||||
}
|
||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
let res = client
|
||||
.cached_client()
|
||||
.uncached()
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await;
|
||||
assert!(res.is_ok());
|
||||
let _ = server_task.await?; // wait for server shutdown
|
||||
unsafe {
|
||||
std::env::remove_var(EnvVars::SSL_CERT_DIR);
|
||||
}
|
||||
|
||||
// ** Set SSL_CERT_DIR to only the dir that does not exist
|
||||
// ** Then verify our request fails to establish a connection
|
||||
|
||||
unsafe {
|
||||
std::env::set_var(EnvVars::SSL_CERT_DIR, does_not_exist_cert_dir.as_os_str());
|
||||
}
|
||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
let res = client
|
||||
.cached_client()
|
||||
.uncached()
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await;
|
||||
unsafe {
|
||||
std::env::remove_var(EnvVars::SSL_CERT_DIR);
|
||||
}
|
||||
|
||||
// Validate the client error
|
||||
let Some(reqwest_middleware::Error::Middleware(middleware_error)) = res.err() else {
|
||||
panic!("expected middleware error");
|
||||
};
|
||||
let reqwest_error = middleware_error
|
||||
.chain()
|
||||
.find_map(|err| {
|
||||
err.downcast_ref::<reqwest_middleware::Error>().map(|err| {
|
||||
if let reqwest_middleware::Error::Reqwest(inner) = err {
|
||||
inner
|
||||
} else {
|
||||
panic!("expected reqwest error")
|
||||
}
|
||||
})
|
||||
})
|
||||
.expect("expected reqwest error");
|
||||
assert!(reqwest_error.is_connect());
|
||||
|
||||
// Validate the server error
|
||||
let server_res = server_task.await?;
|
||||
let expected_err = if let Err(anyhow_err) = server_res
|
||||
&& let Some(io_err) = anyhow_err.downcast_ref::<std::io::Error>()
|
||||
&& let Some(wrapped_err) = io_err.get_ref()
|
||||
&& let Some(tls_err) = wrapped_err.downcast_ref::<rustls::Error>()
|
||||
&& matches!(
|
||||
tls_err,
|
||||
rustls::Error::AlertReceived(AlertDescription::UnknownCA)
|
||||
) {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
assert!(expected_err);
|
||||
|
||||
// *** mTLS Tests
|
||||
|
||||
// ** Set SSL_CERT_FILE to our CA and SSL_CLIENT_CERT to our client cert
|
||||
// ** Then verify our request still successfully establishes a connection
|
||||
|
||||
// We need to set SSL_CERT_FILE or SSL_CERT_DIR to our CA as we need to tell
|
||||
// our HTTP client that we trust certificates issued by our self-signed CA.
|
||||
// This inherently also tests that our server cert is also validated as part
|
||||
// of the certificate path validation algorithm.
|
||||
unsafe {
|
||||
std::env::set_var(EnvVars::SSL_CERT_FILE, ca_public_pem_path.as_os_str());
|
||||
std::env::set_var(
|
||||
EnvVars::SSL_CLIENT_CERT,
|
||||
client_combined_pem_path.as_os_str(),
|
||||
);
|
||||
}
|
||||
let (server_task, addr) = start_https_mtls_user_agent_server(&ca_cert, &server_cert).await?;
|
||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
let res = client
|
||||
.cached_client()
|
||||
.uncached()
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await;
|
||||
assert!(res.is_ok());
|
||||
let _ = server_task.await?; // wait for server shutdown
|
||||
unsafe {
|
||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
||||
std::env::remove_var(EnvVars::SSL_CLIENT_CERT);
|
||||
}
|
||||
|
||||
// ** Set SSL_CERT_FILE to our CA and unset SSL_CLIENT_CERT
|
||||
// ** Then verify our request fails to establish a connection
|
||||
|
||||
unsafe {
|
||||
std::env::set_var(EnvVars::SSL_CERT_FILE, ca_public_pem_path.as_os_str());
|
||||
}
|
||||
let (server_task, addr) = start_https_mtls_user_agent_server(&ca_cert, &server_cert).await?;
|
||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
let res = client
|
||||
.cached_client()
|
||||
.uncached()
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await;
|
||||
unsafe {
|
||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
||||
}
|
||||
|
||||
// Validate the client error
|
||||
let Some(reqwest_middleware::Error::Middleware(middleware_error)) = res.err() else {
|
||||
panic!("expected middleware error");
|
||||
};
|
||||
let reqwest_error = middleware_error
|
||||
.chain()
|
||||
.find_map(|err| {
|
||||
err.downcast_ref::<reqwest_middleware::Error>().map(|err| {
|
||||
if let reqwest_middleware::Error::Reqwest(inner) = err {
|
||||
inner
|
||||
} else {
|
||||
panic!("expected reqwest error")
|
||||
}
|
||||
})
|
||||
})
|
||||
.expect("expected reqwest error");
|
||||
assert!(reqwest_error.is_connect());
|
||||
|
||||
// Validate the server error
|
||||
let server_res = server_task.await?;
|
||||
let expected_err = if let Err(anyhow_err) = server_res
|
||||
&& let Some(io_err) = anyhow_err.downcast_ref::<std::io::Error>()
|
||||
&& let Some(wrapped_err) = io_err.get_ref()
|
||||
&& let Some(tls_err) = wrapped_err.downcast_ref::<rustls::Error>()
|
||||
&& matches!(tls_err, rustls::Error::NoCertificatesPresented)
|
||||
{
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
assert!(expected_err);
|
||||
|
||||
// Fin.
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,9 +1,16 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use futures::future;
|
||||
use http_body_util::Full;
|
||||
use hyper::body::Bytes;
|
||||
use hyper::header::USER_AGENT;
|
||||
use hyper::server::conn::http1;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::{Request, Response};
|
||||
use hyper_util::rt::TokioIo;
|
||||
use insta::{assert_json_snapshot, assert_snapshot, with_settings};
|
||||
use std::str::FromStr;
|
||||
use tokio::net::TcpListener;
|
||||
use url::Url;
|
||||
|
||||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClientBuilder;
|
||||
use uv_client::{BaseClientBuilder, LineHaul};
|
||||
|
|
@ -12,15 +19,39 @@ use uv_platform_tags::{Arch, Os, Platform};
|
|||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_version::version;
|
||||
|
||||
use crate::http_util::start_http_user_agent_server;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_user_agent_has_version() -> Result<()> {
|
||||
// Initialize dummy http server
|
||||
let (server_task, addr) = start_http_user_agent_server().await?;
|
||||
// Set up the TCP listener on a random available port
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||
let addr = listener.local_addr()?;
|
||||
|
||||
// Spawn the server loop in a background task
|
||||
let server_task = tokio::spawn(async move {
|
||||
let svc = service_fn(move |req: Request<hyper::body::Incoming>| {
|
||||
// Get User Agent Header and send it back in the response
|
||||
let user_agent = req
|
||||
.headers()
|
||||
.get(USER_AGENT)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_default(); // Empty Default
|
||||
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
||||
});
|
||||
// Start Server (not wrapped in loop {} since we want a single response server)
|
||||
// If you want server to accept multiple connections, wrap it in loop {}
|
||||
let (socket, _) = listener.accept().await.unwrap();
|
||||
let socket = TokioIo::new(socket);
|
||||
tokio::task::spawn(async move {
|
||||
http1::Builder::new()
|
||||
.serve_connection(socket, svc)
|
||||
.with_upgrades()
|
||||
.await
|
||||
.expect("Server Started");
|
||||
});
|
||||
});
|
||||
|
||||
// Initialize uv-client
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let cache = Cache::temp()?.init()?;
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||
|
||||
// Send request to our dummy server
|
||||
|
|
@ -39,102 +70,45 @@ async fn test_user_agent_has_version() -> Result<()> {
|
|||
// Check User Agent
|
||||
let body = res.text().await?;
|
||||
|
||||
let (uv_version, uv_linehaul) = body
|
||||
.split_once(' ')
|
||||
.expect("Failed to split User-Agent header");
|
||||
|
||||
// Deserializing Linehaul
|
||||
let linehaul: LineHaul = serde_json::from_str(uv_linehaul)?;
|
||||
|
||||
// Assert linehaul user agent
|
||||
let filters = vec![(version(), "[VERSION]")];
|
||||
with_settings!({
|
||||
filters => filters
|
||||
}, {
|
||||
// Assert uv version
|
||||
assert_snapshot!(uv_version, @"uv/[VERSION]");
|
||||
// Assert linehaul json
|
||||
assert_json_snapshot!(&linehaul.installer, @r#"
|
||||
{
|
||||
"name": "uv",
|
||||
"version": "[VERSION]",
|
||||
"subcommand": null
|
||||
}
|
||||
"#);
|
||||
});
|
||||
// Verify body matches regex
|
||||
assert_eq!(body, format!("uv/{}", version()));
|
||||
|
||||
// Wait for the server task to complete, to be a good citizen.
|
||||
let _ = server_task.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_user_agent_has_subcommand() -> Result<()> {
|
||||
// Initialize dummy http server
|
||||
let (server_task, addr) = start_http_user_agent_server().await?;
|
||||
|
||||
// Initialize uv-client
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let client = RegistryClientBuilder::new(
|
||||
BaseClientBuilder::default().subcommand(vec!["foo".to_owned(), "bar".to_owned()]),
|
||||
cache,
|
||||
)
|
||||
.build();
|
||||
|
||||
// Send request to our dummy server
|
||||
let url = DisplaySafeUrl::from_str(&format!("http://{addr}"))?;
|
||||
let res = client
|
||||
.cached_client()
|
||||
.uncached()
|
||||
.for_host(&url)
|
||||
.get(Url::from(url))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
// Check the HTTP status
|
||||
assert!(res.status().is_success());
|
||||
|
||||
// Check User Agent
|
||||
let body = res.text().await?;
|
||||
|
||||
let (uv_version, uv_linehaul) = body
|
||||
.split_once(' ')
|
||||
.expect("Failed to split User-Agent header");
|
||||
|
||||
// Deserializing Linehaul
|
||||
let linehaul: LineHaul = serde_json::from_str(uv_linehaul)?;
|
||||
|
||||
// Assert linehaul user agent
|
||||
let filters = vec![(version(), "[VERSION]")];
|
||||
with_settings!({
|
||||
filters => filters
|
||||
}, {
|
||||
// Assert uv version
|
||||
assert_snapshot!(uv_version, @"uv/[VERSION]");
|
||||
// Assert linehaul json
|
||||
assert_json_snapshot!(&linehaul.installer, @r#"
|
||||
{
|
||||
"name": "uv",
|
||||
"version": "[VERSION]",
|
||||
"subcommand": [
|
||||
"foo",
|
||||
"bar"
|
||||
]
|
||||
}
|
||||
"#);
|
||||
});
|
||||
|
||||
// Wait for the server task to complete, to be a good citizen.
|
||||
let _ = server_task.await?;
|
||||
server_task.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_user_agent_has_linehaul() -> Result<()> {
|
||||
// Initialize dummy http server
|
||||
let (server_task, addr) = start_http_user_agent_server().await?;
|
||||
// Set up the TCP listener on a random available port
|
||||
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||
let addr = listener.local_addr()?;
|
||||
|
||||
// Spawn the server loop in a background task
|
||||
let server_task = tokio::spawn(async move {
|
||||
let svc = service_fn(move |req: Request<hyper::body::Incoming>| {
|
||||
// Get User Agent Header and send it back in the response
|
||||
let user_agent = req
|
||||
.headers()
|
||||
.get(USER_AGENT)
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.map(ToString::to_string)
|
||||
.unwrap_or_default(); // Empty Default
|
||||
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
||||
});
|
||||
// Start Server (not wrapped in loop {} since we want a single response server)
|
||||
// If you want server to accept multiple connections, wrap it in loop {}
|
||||
let (socket, _) = listener.accept().await.unwrap();
|
||||
let socket = TokioIo::new(socket);
|
||||
tokio::task::spawn(async move {
|
||||
http1::Builder::new()
|
||||
.serve_connection(socket, svc)
|
||||
.with_upgrades()
|
||||
.await
|
||||
.expect("Server Started");
|
||||
});
|
||||
});
|
||||
|
||||
// Add some representative markers for an Ubuntu CI runner
|
||||
let markers = MarkerEnvironment::try_from(MarkerEnvironmentBuilder {
|
||||
|
|
@ -149,10 +123,11 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
|||
python_full_version: "3.12.2",
|
||||
python_version: "3.12",
|
||||
sys_platform: "linux",
|
||||
})?;
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
// Initialize uv-client
|
||||
let cache = Cache::temp()?.init().await?;
|
||||
let cache = Cache::temp()?.init()?;
|
||||
let mut builder =
|
||||
RegistryClientBuilder::new(BaseClientBuilder::default(), cache).markers(&markers);
|
||||
|
||||
|
|
@ -194,7 +169,7 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
|||
let body = res.text().await?;
|
||||
|
||||
// Wait for the server task to complete, to be a good citizen.
|
||||
let _ = server_task.await?;
|
||||
server_task.await?;
|
||||
|
||||
// Unpack User-Agent with linehaul
|
||||
let (uv_version, uv_linehaul) = body
|
||||
|
|
@ -215,12 +190,11 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
|||
assert_json_snapshot!(&linehaul, {
|
||||
".distro" => "[distro]",
|
||||
".ci" => "[ci]"
|
||||
}, @r#"
|
||||
}, @r###"
|
||||
{
|
||||
"installer": {
|
||||
"name": "uv",
|
||||
"version": "[VERSION]",
|
||||
"subcommand": null
|
||||
"version": "[VERSION]"
|
||||
},
|
||||
"python": "3.12.2",
|
||||
"implementation": {
|
||||
|
|
@ -238,7 +212,7 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
|||
"rustc_version": null,
|
||||
"ci": "[ci]"
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
});
|
||||
|
||||
// Assert distro
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
[package]
|
||||
name = "uv-configuration"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-configuration
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-configuration).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
use rustc_hash::FxHashSet;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
/// A set of packages to exclude from resolution.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Excludes(FxHashSet<PackageName>);
|
||||
|
||||
impl Excludes {
|
||||
/// Return an iterator over all package names in the exclusion set.
|
||||
pub fn iter(&self) -> impl Iterator<Item = &PackageName> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
/// Check if a package is excluded.
|
||||
pub fn contains(&self, name: &PackageName) -> bool {
|
||||
self.0.contains(name)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<PackageName> for Excludes {
|
||||
fn from_iter<I: IntoIterator<Item = PackageName>>(iter: I) -> Self {
|
||||
Self(iter.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
|
@ -15,30 +15,4 @@ pub enum ExportFormat {
|
|||
#[serde(rename = "pylock.toml", alias = "pylock-toml")]
|
||||
#[cfg_attr(feature = "clap", clap(name = "pylock.toml", alias = "pylock-toml"))]
|
||||
PylockToml,
|
||||
/// Export in `CycloneDX` v1.5 JSON format.
|
||||
#[serde(rename = "cyclonedx1.5")]
|
||||
#[cfg_attr(
|
||||
feature = "clap",
|
||||
clap(name = "cyclonedx1.5", alias = "cyclonedx1.5+json")
|
||||
)]
|
||||
CycloneDX1_5,
|
||||
}
|
||||
|
||||
/// The output format to use in `uv pip compile`.
|
||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
||||
#[cfg_attr(feature = "clap", derive(clap::ValueEnum))]
|
||||
pub enum PipCompileFormat {
|
||||
/// Export in `requirements.txt` format.
|
||||
#[default]
|
||||
#[serde(rename = "requirements.txt", alias = "requirements-txt")]
|
||||
#[cfg_attr(
|
||||
feature = "clap",
|
||||
clap(name = "requirements.txt", alias = "requirements-txt")
|
||||
)]
|
||||
RequirementsTxt,
|
||||
/// Export in `pylock.toml` format.
|
||||
#[serde(rename = "pylock.toml", alias = "pylock-toml")]
|
||||
#[cfg_attr(feature = "clap", clap(name = "pylock.toml", alias = "pylock-toml"))]
|
||||
PylockToml,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,43 +17,26 @@ pub struct InstallTarget<'a> {
|
|||
pub struct InstallOptions {
|
||||
/// Omit the project itself from the resolution.
|
||||
pub no_install_project: bool,
|
||||
/// Include only the project itself in the resolution.
|
||||
pub only_install_project: bool,
|
||||
/// Omit all workspace members (including the project itself) from the resolution.
|
||||
pub no_install_workspace: bool,
|
||||
/// Include only workspace members (including the project itself) in the resolution.
|
||||
pub only_install_workspace: bool,
|
||||
/// Omit all local packages from the resolution.
|
||||
pub no_install_local: bool,
|
||||
/// Include only local packages in the resolution.
|
||||
pub only_install_local: bool,
|
||||
/// Omit the specified packages from the resolution.
|
||||
pub no_install_package: Vec<PackageName>,
|
||||
/// Include only the specified packages in the resolution.
|
||||
pub only_install_package: Vec<PackageName>,
|
||||
}
|
||||
|
||||
impl InstallOptions {
|
||||
#[allow(clippy::fn_params_excessive_bools)]
|
||||
pub fn new(
|
||||
no_install_project: bool,
|
||||
only_install_project: bool,
|
||||
no_install_workspace: bool,
|
||||
only_install_workspace: bool,
|
||||
no_install_local: bool,
|
||||
only_install_local: bool,
|
||||
no_install_package: Vec<PackageName>,
|
||||
only_install_package: Vec<PackageName>,
|
||||
) -> Self {
|
||||
Self {
|
||||
no_install_project,
|
||||
only_install_project,
|
||||
no_install_workspace,
|
||||
only_install_workspace,
|
||||
no_install_local,
|
||||
only_install_local,
|
||||
no_install_package,
|
||||
only_install_package,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -65,55 +48,6 @@ impl InstallOptions {
|
|||
members: &BTreeSet<PackageName>,
|
||||
) -> bool {
|
||||
let package_name = target.name;
|
||||
|
||||
// If `--only-install-package` is set, only include specified packages.
|
||||
if !self.only_install_package.is_empty() {
|
||||
if self.only_install_package.contains(package_name) {
|
||||
return true;
|
||||
}
|
||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-package`");
|
||||
return false;
|
||||
}
|
||||
|
||||
// If `--only-install-local` is set, only include local packages.
|
||||
if self.only_install_local {
|
||||
if target.is_local {
|
||||
return true;
|
||||
}
|
||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-local`");
|
||||
return false;
|
||||
}
|
||||
|
||||
// If `--only-install-workspace` is set, only include the project and workspace members.
|
||||
if self.only_install_workspace {
|
||||
// Check if it's the project itself
|
||||
if let Some(project_name) = project_name {
|
||||
if package_name == project_name {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if it's a workspace member
|
||||
if members.contains(package_name) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise, exclude it
|
||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-workspace`");
|
||||
return false;
|
||||
}
|
||||
|
||||
// If `--only-install-project` is set, only include the project itself.
|
||||
if self.only_install_project {
|
||||
if let Some(project_name) = project_name {
|
||||
if package_name == project_name {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-project`");
|
||||
return false;
|
||||
}
|
||||
|
||||
// If `--no-install-project` is set, remove the project itself.
|
||||
if self.no_install_project {
|
||||
if let Some(project_name) = project_name {
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ pub use dependency_groups::*;
|
|||
pub use dry_run::*;
|
||||
pub use editable::*;
|
||||
pub use env_file::*;
|
||||
pub use excludes::*;
|
||||
pub use export_format::*;
|
||||
pub use extras::*;
|
||||
pub use hash::*;
|
||||
|
|
@ -31,7 +30,6 @@ mod dependency_groups;
|
|||
mod dry_run;
|
||||
mod editable;
|
||||
mod env_file;
|
||||
mod excludes;
|
||||
mod export_format;
|
||||
mod extras;
|
||||
mod hash;
|
||||
|
|
|
|||
|
|
@ -94,32 +94,3 @@ wheels/
|
|||
# Virtual environments
|
||||
.venv
|
||||
";
|
||||
|
||||
/// Setting for Git LFS (Large File Storage) support.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
pub enum GitLfsSetting {
|
||||
/// Git LFS is disabled (default).
|
||||
#[default]
|
||||
Disabled,
|
||||
/// Git LFS is enabled. Tracks whether it came from an environment variable.
|
||||
Enabled { from_env: bool },
|
||||
}
|
||||
|
||||
impl GitLfsSetting {
|
||||
pub fn new(from_arg: Option<bool>, from_env: Option<bool>) -> Self {
|
||||
match (from_arg, from_env) {
|
||||
(Some(true), _) => Self::Enabled { from_env: false },
|
||||
(_, Some(true)) => Self::Enabled { from_env: true },
|
||||
_ => Self::Disabled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<GitLfsSetting> for Option<bool> {
|
||||
fn from(setting: GitLfsSetting) -> Self {
|
||||
match setting {
|
||||
GitLfsSetting::Enabled { .. } => Some(true),
|
||||
GitLfsSetting::Disabled => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-console"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Utilities for interacting with the terminal"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-console
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-console).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,12 +1,13 @@
|
|||
[package]
|
||||
name = "uv-dev"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Build wheels from source distributions"
|
||||
publish = false
|
||||
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -79,4 +80,4 @@ performance-memory-allocator = ["dep:uv-performance-memory-allocator"]
|
|||
render = ["poloto", "resvg", "tagu"]
|
||||
|
||||
[package.metadata.cargo-shear]
|
||||
ignored = ["uv-performance-memory-allocator"]
|
||||
ignored = ["flate2", "uv-extract", "uv-performance-memory-allocator"]
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue