mirror of https://github.com/astral-sh/uv
Compare commits
No commits in common. "main" and "0.8.17" have entirely different histories.
|
|
@ -1,81 +0,0 @@
|
||||||
# /// script
|
|
||||||
# requires-python = ">=3.12"
|
|
||||||
# dependencies = []
|
|
||||||
# ///
|
|
||||||
|
|
||||||
"""Post-edit hook to auto-format files after Claude edits."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def format_rust(file_path: str, cwd: str) -> None:
|
|
||||||
"""Format Rust files with cargo fmt."""
|
|
||||||
try:
|
|
||||||
subprocess.run(
|
|
||||||
["cargo", "fmt", "--", file_path],
|
|
||||||
cwd=cwd,
|
|
||||||
capture_output=True,
|
|
||||||
)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def format_python(file_path: str, cwd: str) -> None:
|
|
||||||
"""Format Python files with ruff."""
|
|
||||||
try:
|
|
||||||
subprocess.run(
|
|
||||||
["uvx", "ruff", "format", file_path],
|
|
||||||
cwd=cwd,
|
|
||||||
capture_output=True,
|
|
||||||
)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def format_prettier(file_path: str, cwd: str, prose_wrap: bool = False) -> None:
|
|
||||||
"""Format files with prettier."""
|
|
||||||
args = ["npx", "prettier", "--write"]
|
|
||||||
if prose_wrap:
|
|
||||||
args.extend(["--prose-wrap", "always"])
|
|
||||||
args.append(file_path)
|
|
||||||
try:
|
|
||||||
subprocess.run(args, cwd=cwd, capture_output=True)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
import os
|
|
||||||
|
|
||||||
input_data = json.load(sys.stdin)
|
|
||||||
|
|
||||||
tool_name = input_data.get("tool_name")
|
|
||||||
tool_input = input_data.get("tool_input", {})
|
|
||||||
file_path = tool_input.get("file_path")
|
|
||||||
|
|
||||||
# Only process Write, Edit, and MultiEdit tools
|
|
||||||
if tool_name not in ("Write", "Edit", "MultiEdit"):
|
|
||||||
return
|
|
||||||
|
|
||||||
if not file_path:
|
|
||||||
return
|
|
||||||
|
|
||||||
cwd = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
||||||
path = Path(file_path)
|
|
||||||
ext = path.suffix
|
|
||||||
|
|
||||||
if ext == ".rs":
|
|
||||||
format_rust(file_path, cwd)
|
|
||||||
elif ext in (".py", ".pyi"):
|
|
||||||
format_python(file_path, cwd)
|
|
||||||
elif ext in (".json5", ".yaml", ".yml"):
|
|
||||||
format_prettier(file_path, cwd)
|
|
||||||
elif ext == ".md":
|
|
||||||
format_prettier(file_path, cwd, prose_wrap=True)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
{
|
|
||||||
"hooks": {
|
|
||||||
"PostToolUse": [
|
|
||||||
{
|
|
||||||
"matcher": "Edit|Write|MultiEdit",
|
|
||||||
"hooks": [
|
|
||||||
{
|
|
||||||
"type": "command",
|
|
||||||
"command": "uv run .claude/hooks/post-edit-format.py"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -3,19 +3,20 @@
|
||||||
dependencyDashboard: true,
|
dependencyDashboard: true,
|
||||||
suppressNotifications: ["prEditedNotification"],
|
suppressNotifications: ["prEditedNotification"],
|
||||||
extends: [
|
extends: [
|
||||||
"github>astral-sh/renovate-config",
|
"config:recommended",
|
||||||
// For tool versions defined in GitHub Actions:
|
// For tool versions defined in GitHub Actions:
|
||||||
"customManagers:githubActionsVersions",
|
"customManagers:githubActionsVersions",
|
||||||
],
|
],
|
||||||
labels: ["internal"],
|
labels: ["internal"],
|
||||||
schedule: ["* 0-3 * * 1"],
|
schedule: ["before 4am on Monday"],
|
||||||
semanticCommits: "disabled",
|
semanticCommits: "disabled",
|
||||||
separateMajorMinor: false,
|
separateMajorMinor: false,
|
||||||
|
prHourlyLimit: 10,
|
||||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
||||||
cargo: {
|
cargo: {
|
||||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||||
rangeStrategy: "update-lockfile",
|
rangeStrategy: "update-lockfile",
|
||||||
managerFilePatterns: ["/^Cargo\\.toml$/", "/^crates/.*Cargo\\.toml$/"],
|
managerFilePatterns: ["/^crates/.*Cargo\\.toml$/"],
|
||||||
},
|
},
|
||||||
"pre-commit": {
|
"pre-commit": {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
|
@ -85,12 +86,6 @@
|
||||||
description: "Weekly update of pyo3 dependencies",
|
description: "Weekly update of pyo3 dependencies",
|
||||||
enabled: false,
|
enabled: false,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
groupName: "pubgrub",
|
|
||||||
matchManagers: ["cargo"],
|
|
||||||
matchDepNames: ["pubgrub", "version-ranges"],
|
|
||||||
description: "version-ranges and pubgrub are in the same Git repository",
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
commitMessageTopic: "MSRV",
|
commitMessageTopic: "MSRV",
|
||||||
matchManagers: ["custom.regex"],
|
matchManagers: ["custom.regex"],
|
||||||
|
|
|
||||||
|
|
@ -61,7 +61,6 @@ jobs:
|
||||||
- name: "Build sdist"
|
- name: "Build sdist"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
command: sdist
|
command: sdist
|
||||||
args: --out dist
|
args: --out dist
|
||||||
- name: "Test sdist"
|
- name: "Test sdist"
|
||||||
|
|
@ -82,7 +81,6 @@ jobs:
|
||||||
- name: "Build sdist uv-build"
|
- name: "Build sdist uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
command: sdist
|
command: sdist
|
||||||
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Test sdist uv-build"
|
- name: "Test sdist uv-build"
|
||||||
|
|
@ -98,7 +96,7 @@ jobs:
|
||||||
|
|
||||||
macos-x86_64:
|
macos-x86_64:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-macos-14
|
runs-on: macos-14
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
|
@ -115,7 +113,6 @@ jobs:
|
||||||
- name: "Build wheels - x86_64"
|
- name: "Build wheels - x86_64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
|
|
@ -146,7 +143,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build - x86_64"
|
- name: "Build wheels uv-build - x86_64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Upload wheels uv-build"
|
- name: "Upload wheels uv-build"
|
||||||
|
|
@ -157,7 +153,7 @@ jobs:
|
||||||
|
|
||||||
macos-aarch64:
|
macos-aarch64:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-macos-14
|
runs-on: macos-14
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
|
@ -174,7 +170,6 @@ jobs:
|
||||||
- name: "Build wheels - aarch64"
|
- name: "Build wheels - aarch64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: aarch64
|
target: aarch64
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
- name: "Test wheel - aarch64"
|
- name: "Test wheel - aarch64"
|
||||||
|
|
@ -211,7 +206,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build - aarch64"
|
- name: "Build wheels uv-build - aarch64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: aarch64
|
target: aarch64
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Test wheel - aarch64"
|
- name: "Test wheel - aarch64"
|
||||||
|
|
@ -253,7 +247,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --release --locked --out dist --features self-update,windows-gui-bin
|
args: --release --locked --out dist --features self-update,windows-gui-bin
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
|
|
@ -292,7 +285,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Test wheel uv-build"
|
- name: "Test wheel uv-build"
|
||||||
|
|
@ -332,7 +324,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
# Generally, we try to build in a target docker container. In this case however, a
|
# Generally, we try to build in a target docker container. In this case however, a
|
||||||
# 32-bit compiler runs out of memory (4GB memory limit for 32-bit), so we cross compile
|
# 32-bit compiler runs out of memory (4GB memory limit for 32-bit), so we cross compile
|
||||||
|
|
@ -399,7 +390,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
|
|
@ -417,7 +407,7 @@ jobs:
|
||||||
|
|
||||||
linux-arm:
|
linux-arm:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-ubuntu-22.04-8
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
|
@ -447,7 +437,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||||
|
|
@ -501,7 +490,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||||
|
|
@ -556,12 +544,13 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
rust-toolchain: ${{ matrix.platform.toolchain || null }}
|
# Until the llvm updates hit stable
|
||||||
|
# https://github.com/rust-lang/rust/issues/141287
|
||||||
|
rust-toolchain: nightly-2025-05-25
|
||||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||||
if: matrix.platform.arch != 'ppc64'
|
if: matrix.platform.arch != 'ppc64'
|
||||||
name: "Test wheel"
|
name: "Test wheel"
|
||||||
|
|
@ -611,7 +600,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -671,7 +659,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -730,7 +717,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -775,7 +761,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -829,7 +814,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -858,6 +842,115 @@ jobs:
|
||||||
name: wheels_uv_build-${{ matrix.platform.target }}
|
name: wheels_uv_build-${{ matrix.platform.target }}
|
||||||
path: crates/uv-build/dist
|
path: crates/uv-build/dist
|
||||||
|
|
||||||
|
linux-loongarch64:
|
||||||
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
|
timeout-minutes: 30
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform:
|
||||||
|
- target: loongarch64-unknown-linux-gnu
|
||||||
|
arch: loong64
|
||||||
|
base_image: --platform=linux/loong64 ghcr.io/loong64/debian:trixie
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.PYTHON_VERSION }}
|
||||||
|
- name: "Prep README.md"
|
||||||
|
run: python scripts/transform_readme.py --target pypi
|
||||||
|
|
||||||
|
# uv
|
||||||
|
- name: "Build wheels"
|
||||||
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.platform.target }}
|
||||||
|
manylinux: auto
|
||||||
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
args: --release --locked --out dist --features self-update
|
||||||
|
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||||
|
name: "Test wheel"
|
||||||
|
with:
|
||||||
|
base_image: ${{ matrix.platform.base_image }}
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 python3-venv
|
||||||
|
run: |
|
||||||
|
python3 -m venv .venv
|
||||||
|
.venv/bin/pip install -U pip
|
||||||
|
source .venv/bin/activate
|
||||||
|
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||||
|
${MODULE_NAME} --help
|
||||||
|
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||||
|
# python -m ${MODULE_NAME} --help
|
||||||
|
uvx --help
|
||||||
|
env: |
|
||||||
|
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||||
|
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||||
|
- name: "Upload wheels"
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: wheels_uv-${{ matrix.platform.target }}
|
||||||
|
path: dist
|
||||||
|
- name: "Archive binary"
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
ARCHIVE_NAME=uv-$TARGET
|
||||||
|
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||||
|
|
||||||
|
mkdir -p $ARCHIVE_NAME
|
||||||
|
cp target/$TARGET/release/uv $ARCHIVE_NAME/uv
|
||||||
|
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||||
|
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||||
|
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||||
|
env:
|
||||||
|
TARGET: ${{ matrix.platform.target }}
|
||||||
|
- name: "Upload binary"
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: artifacts-${{ matrix.platform.target }}
|
||||||
|
path: |
|
||||||
|
*.tar.gz
|
||||||
|
*.sha256
|
||||||
|
|
||||||
|
# uv-build
|
||||||
|
- name: "Build wheels uv-build"
|
||||||
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
|
with:
|
||||||
|
target: ${{ matrix.platform.target }}
|
||||||
|
manylinux: auto
|
||||||
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
|
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||||
|
name: "Test wheel uv-build"
|
||||||
|
with:
|
||||||
|
base_image: ${{ matrix.platform.base_image }}
|
||||||
|
githubToken: ${{ github.token }}
|
||||||
|
install: |
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3 python3-venv
|
||||||
|
run: |
|
||||||
|
python3 -m venv .venv
|
||||||
|
.venv/bin/pip install -U pip
|
||||||
|
source .venv/bin/activate
|
||||||
|
pip install ${PACKAGE_NAME}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||||
|
${MODULE_NAME}-build --help
|
||||||
|
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||||
|
# python -m ${MODULE_NAME}-build --help
|
||||||
|
env: |
|
||||||
|
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||||
|
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||||
|
- name: "Upload wheels uv-build"
|
||||||
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
|
with:
|
||||||
|
name: wheels_uv_build-${{ matrix.platform.target }}
|
||||||
|
path: crates/uv-build/dist
|
||||||
|
|
||||||
musllinux:
|
musllinux:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
@ -882,7 +975,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
|
|
@ -931,7 +1023,6 @@ jobs:
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
|
|
@ -956,7 +1047,7 @@ jobs:
|
||||||
|
|
||||||
musllinux-cross:
|
musllinux-cross:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-ubuntu-22.04-8
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
platform:
|
platform:
|
||||||
|
|
@ -982,7 +1073,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --release --locked --out dist --features self-update ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}}
|
args: --release --locked --out dist --features self-update ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}}
|
||||||
|
|
@ -1055,7 +1145,6 @@ jobs:
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --profile minimal-size --locked ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
|
|
|
||||||
|
|
@ -96,13 +96,13 @@ jobs:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||||
with:
|
with:
|
||||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||||
|
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|
@ -127,7 +127,7 @@ jobs:
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||||
with:
|
with:
|
||||||
|
|
@ -178,54 +178,54 @@ jobs:
|
||||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||||
image-mapping:
|
image-mapping:
|
||||||
- alpine:3.22,alpine3.22,alpine
|
- alpine:3.21,alpine3.21,alpine
|
||||||
- alpine:3.21,alpine3.21
|
- alpine:3.22,alpine3.22
|
||||||
- debian:trixie-slim,trixie-slim,debian-slim
|
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||||
- buildpack-deps:trixie,trixie,debian
|
- buildpack-deps:bookworm,bookworm,debian
|
||||||
- debian:bookworm-slim,bookworm-slim
|
- debian:trixie-slim,trixie-slim
|
||||||
- buildpack-deps:bookworm,bookworm
|
- buildpack-deps:trixie,trixie
|
||||||
- python:3.14-alpine3.23,python3.14-alpine3.23,python3.14-alpine
|
- python:3.14-rc-alpine,python3.14-rc-alpine
|
||||||
- python:3.13-alpine3.23,python3.13-alpine3.23,python3.13-alpine
|
- python:3.13-alpine,python3.13-alpine
|
||||||
- python:3.12-alpine3.23,python3.12-alpine3.23,python3.12-alpine
|
- python:3.12-alpine,python3.12-alpine
|
||||||
- python:3.11-alpine3.23,python3.11-alpine3.23,python3.11-alpine
|
- python:3.11-alpine,python3.11-alpine
|
||||||
- python:3.10-alpine3.23,python3.10-alpine3.23,python3.10-alpine
|
- python:3.10-alpine,python3.10-alpine
|
||||||
- python:3.9-alpine3.22,python3.9-alpine3.22,python3.9-alpine
|
- python:3.9-alpine,python3.9-alpine
|
||||||
- python:3.8-alpine3.20,python3.8-alpine3.20,python3.8-alpine
|
- python:3.8-alpine,python3.8-alpine
|
||||||
- python:3.14-trixie,python3.14-trixie
|
- python:3.14-rc-bookworm,python3.14-rc-bookworm
|
||||||
- python:3.13-trixie,python3.13-trixie
|
|
||||||
- python:3.12-trixie,python3.12-trixie
|
|
||||||
- python:3.11-trixie,python3.11-trixie
|
|
||||||
- python:3.10-trixie,python3.10-trixie
|
|
||||||
- python:3.9-trixie,python3.9-trixie
|
|
||||||
- python:3.14-slim-trixie,python3.14-trixie-slim
|
|
||||||
- python:3.13-slim-trixie,python3.13-trixie-slim
|
|
||||||
- python:3.12-slim-trixie,python3.12-trixie-slim
|
|
||||||
- python:3.11-slim-trixie,python3.11-trixie-slim
|
|
||||||
- python:3.10-slim-trixie,python3.10-trixie-slim
|
|
||||||
- python:3.9-slim-trixie,python3.9-trixie-slim
|
|
||||||
- python:3.14-bookworm,python3.14-bookworm
|
|
||||||
- python:3.13-bookworm,python3.13-bookworm
|
- python:3.13-bookworm,python3.13-bookworm
|
||||||
- python:3.12-bookworm,python3.12-bookworm
|
- python:3.12-bookworm,python3.12-bookworm
|
||||||
- python:3.11-bookworm,python3.11-bookworm
|
- python:3.11-bookworm,python3.11-bookworm
|
||||||
- python:3.10-bookworm,python3.10-bookworm
|
- python:3.10-bookworm,python3.10-bookworm
|
||||||
- python:3.9-bookworm,python3.9-bookworm
|
- python:3.9-bookworm,python3.9-bookworm
|
||||||
- python:3.8-bookworm,python3.8-bookworm
|
- python:3.8-bookworm,python3.8-bookworm
|
||||||
- python:3.14-slim-bookworm,python3.14-bookworm-slim
|
- python:3.14-rc-slim-bookworm,python3.14-rc-bookworm-slim
|
||||||
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
||||||
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
||||||
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
||||||
- python:3.10-slim-bookworm,python3.10-bookworm-slim
|
- python:3.10-slim-bookworm,python3.10-bookworm-slim
|
||||||
- python:3.9-slim-bookworm,python3.9-bookworm-slim
|
- python:3.9-slim-bookworm,python3.9-bookworm-slim
|
||||||
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
||||||
|
- python:3.14-rc-trixie,python3.14-rc-trixie
|
||||||
|
- python:3.13-trixie,python3.13-trixie
|
||||||
|
- python:3.12-trixie,python3.12-trixie
|
||||||
|
- python:3.11-trixie,python3.11-trixie
|
||||||
|
- python:3.10-trixie,python3.10-trixie
|
||||||
|
- python:3.9-trixie,python3.9-trixie
|
||||||
|
- python:3.14-rc-slim-trixie,python3.14-rc-trixie-slim
|
||||||
|
- python:3.13-slim-trixie,python3.13-trixie-slim
|
||||||
|
- python:3.12-slim-trixie,python3.12-trixie-slim
|
||||||
|
- python:3.11-slim-trixie,python3.11-trixie-slim
|
||||||
|
- python:3.10-slim-trixie,python3.10-trixie-slim
|
||||||
|
- python:3.9-slim-trixie,python3.9-trixie-slim
|
||||||
steps:
|
steps:
|
||||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||||
with:
|
with:
|
||||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||||
|
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|
@ -275,7 +275,7 @@ jobs:
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
# ghcr.io prefers index level annotations
|
# ghcr.io prefers index level annotations
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||||
|
|
@ -369,12 +369,12 @@ jobs:
|
||||||
- docker-publish-extra
|
- docker-publish-extra
|
||||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
username: astral
|
username: astral
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
||||||
|
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|
|
||||||
|
|
@ -27,8 +27,6 @@ jobs:
|
||||||
outputs:
|
outputs:
|
||||||
# Flag that is raised when any code is changed
|
# Flag that is raised when any code is changed
|
||||||
code: ${{ steps.changed.outputs.code_any_changed }}
|
code: ${{ steps.changed.outputs.code_any_changed }}
|
||||||
# Flag that is raised when uv.schema.json is changed (e.g., in a release PR)
|
|
||||||
schema: ${{ steps.changed.outputs.schema_changed }}
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
|
@ -42,16 +40,10 @@ jobs:
|
||||||
CHANGED_FILES=$(git diff --name-only ${{ github.event.pull_request.base.sha || 'origin/main' }}...HEAD)
|
CHANGED_FILES=$(git diff --name-only ${{ github.event.pull_request.base.sha || 'origin/main' }}...HEAD)
|
||||||
|
|
||||||
CODE_CHANGED=false
|
CODE_CHANGED=false
|
||||||
SCHEMA_CHANGED=false
|
|
||||||
|
|
||||||
while IFS= read -r file; do
|
while IFS= read -r file; do
|
||||||
# Check if the schema file changed (e.g., in a release PR)
|
# Generated markdown and JSON files are checked during test runs.
|
||||||
if [[ "${file}" == "uv.schema.json" ]]; then
|
if [[ "${file}" =~ ^docs/ && ! "${file}" =~ ^docs/reference/(cli|settings).md && ! "${file}" =~ ^docs/reference/environment.md ]]; then
|
||||||
echo "Detected schema change: ${file}"
|
|
||||||
SCHEMA_CHANGED=true
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "${file}" =~ ^docs/ ]]; then
|
|
||||||
echo "Skipping ${file} (matches docs/ pattern)"
|
echo "Skipping ${file} (matches docs/ pattern)"
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
@ -78,7 +70,6 @@ jobs:
|
||||||
|
|
||||||
done <<< "${CHANGED_FILES}"
|
done <<< "${CHANGED_FILES}"
|
||||||
echo "code_any_changed=${CODE_CHANGED}" >> "${GITHUB_OUTPUT}"
|
echo "code_any_changed=${CODE_CHANGED}" >> "${GITHUB_OUTPUT}"
|
||||||
echo "schema_changed=${SCHEMA_CHANGED}" >> "${GITHUB_OUTPUT}"
|
|
||||||
lint:
|
lint:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
name: "lint"
|
name: "lint"
|
||||||
|
|
@ -96,9 +87,7 @@ jobs:
|
||||||
run: rustup component add rustfmt
|
run: rustup component add rustfmt
|
||||||
|
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
with:
|
|
||||||
version: "0.9.13"
|
|
||||||
|
|
||||||
- name: "rustfmt"
|
- name: "rustfmt"
|
||||||
run: cargo fmt --all --check
|
run: cargo fmt --all --check
|
||||||
|
|
@ -127,7 +116,7 @@ jobs:
|
||||||
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
uses: ludeeus/action-shellcheck@00cae500b08a931fb5698e11e79bfbd38e612a38 # 2.0.0
|
||||||
env:
|
env:
|
||||||
# renovate: datasource=github-tags depName=koalaman/shellcheck
|
# renovate: datasource=github-tags depName=koalaman/shellcheck
|
||||||
SHELLCHECK_VERSION: "v0.11.0"
|
SHELLCHECK_VERSION: "v0.10.0"
|
||||||
SHELLCHECK_OPTS: --shell bash
|
SHELLCHECK_OPTS: --shell bash
|
||||||
with:
|
with:
|
||||||
version: ${{ env.SHELLCHECK_VERSION }}
|
version: ${{ env.SHELLCHECK_VERSION }}
|
||||||
|
|
@ -144,7 +133,7 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||||
- name: "Check uv_build dependencies"
|
- name: "Check uv_build dependencies"
|
||||||
|
|
@ -176,7 +165,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
workspaces: ${{ env.UV_WORKSPACE }}
|
workspaces: ${{ env.UV_WORKSPACE }}
|
||||||
|
|
||||||
|
|
@ -187,22 +176,6 @@ jobs:
|
||||||
working-directory: ${{ env.UV_WORKSPACE }}
|
working-directory: ${{ env.UV_WORKSPACE }}
|
||||||
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings
|
||||||
|
|
||||||
cargo-publish-dry-run:
|
|
||||||
timeout-minutes: 20
|
|
||||||
needs: determine_changes
|
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
|
||||||
runs-on: depot-ubuntu-22.04-8
|
|
||||||
name: "cargo publish dry-run"
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "cargo publish dry-run"
|
|
||||||
run: cargo publish --workspace --dry-run
|
|
||||||
|
|
||||||
cargo-dev-generate-all:
|
cargo-dev-generate-all:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
|
|
@ -213,16 +186,11 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||||
- name: "Generate all"
|
- name: "Generate all"
|
||||||
run: cargo dev generate-all --mode dry-run
|
run: cargo dev generate-all --mode check
|
||||||
- name: "Check sysconfig mappings"
|
|
||||||
run: cargo dev generate-sysconfig-metadata --mode check
|
|
||||||
- name: "Check JSON schema"
|
|
||||||
if: ${{ needs.determine_changes.outputs.schema == 'true' }}
|
|
||||||
run: cargo dev generate-json-schema --mode check
|
|
||||||
|
|
||||||
cargo-shear:
|
cargo-shear:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
|
|
@ -233,7 +201,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- name: "Install cargo shear"
|
- name: "Install cargo shear"
|
||||||
uses: taiki-e/install-action@d850aa816998e5cf15f67a78c7b933f2a5033f8a # v2.63.3
|
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
|
||||||
with:
|
with:
|
||||||
tool: cargo-shear
|
tool: cargo-shear
|
||||||
- run: cargo shear
|
- run: cargo shear
|
||||||
|
|
@ -255,15 +223,12 @@ jobs:
|
||||||
|
|
||||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
with:
|
|
||||||
version: "0.9.13"
|
|
||||||
|
|
||||||
- name: "Install required Python versions"
|
- name: "Install required Python versions"
|
||||||
run: uv python install
|
run: uv python install
|
||||||
|
|
||||||
|
|
@ -289,13 +254,12 @@ jobs:
|
||||||
UV_HTTP_RETRIES: 5
|
UV_HTTP_RETRIES: 5
|
||||||
run: |
|
run: |
|
||||||
cargo nextest run \
|
cargo nextest run \
|
||||||
--cargo-profile fast-build \
|
|
||||||
--features python-patch,native-auth,secret-service \
|
--features python-patch,native-auth,secret-service \
|
||||||
--workspace \
|
--workspace \
|
||||||
--status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow
|
--status-level skip --failure-output immediate-final --no-fail-fast -j 20 --final-status-level slow
|
||||||
|
|
||||||
cargo-test-macos:
|
cargo-test-macos:
|
||||||
timeout-minutes: 20
|
timeout-minutes: 15
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
# Only run macOS tests on main without opt-in
|
# Only run macOS tests on main without opt-in
|
||||||
if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos') || github.ref == 'refs/heads/main' }}
|
if: ${{ contains(github.event.pull_request.labels.*.name, 'test:macos') || github.ref == 'refs/heads/main' }}
|
||||||
|
|
@ -308,15 +272,12 @@ jobs:
|
||||||
|
|
||||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
||||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
with:
|
|
||||||
version: "0.9.13"
|
|
||||||
|
|
||||||
- name: "Install required Python versions"
|
- name: "Install required Python versions"
|
||||||
run: uv python install
|
run: uv python install
|
||||||
|
|
||||||
|
|
@ -331,9 +292,8 @@ jobs:
|
||||||
UV_HTTP_RETRIES: 5
|
UV_HTTP_RETRIES: 5
|
||||||
run: |
|
run: |
|
||||||
cargo nextest run \
|
cargo nextest run \
|
||||||
--cargo-profile fast-build \
|
|
||||||
--no-default-features \
|
--no-default-features \
|
||||||
--features python,python-managed,pypi,git,git-lfs,performance,crates-io,native-auth,apple-native \
|
--features python,python-managed,pypi,git,performance,crates-io,native-auth,apple-native \
|
||||||
--workspace \
|
--workspace \
|
||||||
--status-level skip --failure-output immediate-final --no-fail-fast -j 12 --final-status-level slow
|
--status-level skip --failure-output immediate-final --no-fail-fast -j 12 --final-status-level slow
|
||||||
|
|
||||||
|
|
@ -356,14 +316,11 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||||
|
|
||||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
with:
|
|
||||||
version: "0.9.13"
|
|
||||||
|
|
||||||
- name: "Install required Python versions"
|
- name: "Install required Python versions"
|
||||||
run: uv python install
|
run: uv python install
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
workspaces: ${{ env.UV_WORKSPACE }}
|
workspaces: ${{ env.UV_WORKSPACE }}
|
||||||
|
|
||||||
|
|
@ -387,7 +344,6 @@ jobs:
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cargo nextest run \
|
cargo nextest run \
|
||||||
--cargo-profile fast-build \
|
|
||||||
--no-default-features \
|
--no-default-features \
|
||||||
--features python,pypi,python-managed,native-auth,windows-native \
|
--features python,pypi,python-managed,native-auth,windows-native \
|
||||||
--workspace \
|
--workspace \
|
||||||
|
|
@ -417,7 +373,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||||
|
|
||||||
|
|
@ -477,7 +433,7 @@ jobs:
|
||||||
- name: Copy Git Repo to Dev Drive
|
- name: Copy Git Repo to Dev Drive
|
||||||
run: |
|
run: |
|
||||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
workspaces: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
|
|
@ -495,8 +451,8 @@ jobs:
|
||||||
working-directory: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
working-directory: ${{ env.UV_WORKSPACE }}/crates/uv-trampoline
|
||||||
run: |
|
run: |
|
||||||
cargo build --target ${{ matrix.target-arch }}-pc-windows-msvc
|
cargo build --target ${{ matrix.target-arch }}-pc-windows-msvc
|
||||||
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-console.exe ../uv-trampoline-builder/trampolines/uv-trampoline-${{ matrix.target-arch }}-console.exe
|
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-console.exe trampolines/uv-trampoline-${{ matrix.target-arch }}-console.exe
|
||||||
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-gui.exe ../uv-trampoline-builder/trampolines/uv-trampoline-${{ matrix.target-arch }}-gui.exe
|
cp target/${{ matrix.target-arch }}-pc-windows-msvc/debug/uv-trampoline-gui.exe trampolines/uv-trampoline-${{ matrix.target-arch }}-gui.exe
|
||||||
- name: "Test new binaries"
|
- name: "Test new binaries"
|
||||||
working-directory: ${{ env.UV_WORKSPACE }}
|
working-directory: ${{ env.UV_WORKSPACE }}
|
||||||
run: |
|
run: |
|
||||||
|
|
@ -509,7 +465,7 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: crate-ci/typos@64e4db431eb262bb5c6baa19dce280d78532830c # v1.37.3
|
- uses: crate-ci/typos@392b78fe18a52790c53f42456e46124f77346842 # v1.34.0
|
||||||
|
|
||||||
docs:
|
docs:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
|
|
@ -522,19 +478,8 @@ jobs:
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
with:
|
|
||||||
version: "0.9.13"
|
|
||||||
|
|
||||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
with:
|
|
||||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
|
||||||
- name: "Generate reference documentation"
|
|
||||||
run: |
|
|
||||||
cargo dev generate-options-reference
|
|
||||||
cargo dev generate-cli-reference
|
|
||||||
cargo dev generate-env-vars-reference
|
|
||||||
- name: "Add SSH key"
|
- name: "Add SSH key"
|
||||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||||
|
|
@ -561,18 +506,18 @@ jobs:
|
||||||
|
|
||||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
run: cargo build --profile no-debug
|
run: cargo build
|
||||||
|
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: uv-linux-libc-${{ github.sha }}
|
name: uv-linux-libc-${{ github.sha }}
|
||||||
path: |
|
path: |
|
||||||
./target/no-debug/uv
|
./target/debug/uv
|
||||||
./target/no-debug/uvx
|
./target/debug/uvx
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-linux-aarch64:
|
build-binary-linux-aarch64:
|
||||||
|
|
@ -588,18 +533,18 @@ jobs:
|
||||||
|
|
||||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
run: cargo build --profile no-debug
|
run: cargo build
|
||||||
|
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: uv-linux-aarch64-${{ github.sha }}
|
name: uv-linux-aarch64-${{ github.sha }}
|
||||||
path: |
|
path: |
|
||||||
./target/no-debug/uv
|
./target/debug/uv
|
||||||
./target/no-debug/uvx
|
./target/debug/uvx
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-linux-musl:
|
build-binary-linux-musl:
|
||||||
|
|
@ -620,18 +565,18 @@ jobs:
|
||||||
sudo apt-get install musl-tools
|
sudo apt-get install musl-tools
|
||||||
rustup target add x86_64-unknown-linux-musl
|
rustup target add x86_64-unknown-linux-musl
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
run: cargo build --profile no-debug --target x86_64-unknown-linux-musl --bin uv --bin uvx
|
run: cargo build --target x86_64-unknown-linux-musl --bin uv --bin uvx
|
||||||
|
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: uv-linux-musl-${{ github.sha }}
|
name: uv-linux-musl-${{ github.sha }}
|
||||||
path: |
|
path: |
|
||||||
./target/x86_64-unknown-linux-musl/no-debug/uv
|
./target/x86_64-unknown-linux-musl/debug/uv
|
||||||
./target/x86_64-unknown-linux-musl/no-debug/uvx
|
./target/x86_64-unknown-linux-musl/debug/uvx
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-macos-aarch64:
|
build-binary-macos-aarch64:
|
||||||
|
|
@ -647,17 +592,17 @@ jobs:
|
||||||
|
|
||||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
run: cargo build --profile no-debug --bin uv --bin uvx
|
run: cargo build --bin uv --bin uvx
|
||||||
|
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: uv-macos-aarch64-${{ github.sha }}
|
name: uv-macos-aarch64-${{ github.sha }}
|
||||||
path: |
|
path: |
|
||||||
./target/no-debug/uv
|
./target/debug/uv
|
||||||
./target/no-debug/uvx
|
./target/debug/uvx
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-macos-x86_64:
|
build-binary-macos-x86_64:
|
||||||
|
|
@ -673,17 +618,17 @@ jobs:
|
||||||
|
|
||||||
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
- uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
run: cargo build --profile no-debug --bin uv --bin uvx
|
run: cargo build --bin uv --bin uvx
|
||||||
|
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: uv-macos-x86_64-${{ github.sha }}
|
name: uv-macos-x86_64-${{ github.sha }}
|
||||||
path: |
|
path: |
|
||||||
./target/no-debug/uv
|
./target/debug/uv
|
||||||
./target/no-debug/uvx
|
./target/debug/uvx
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-windows-x86_64:
|
build-binary-windows-x86_64:
|
||||||
|
|
@ -705,21 +650,21 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
workspaces: ${{ env.UV_WORKSPACE }}
|
workspaces: ${{ env.UV_WORKSPACE }}
|
||||||
|
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
working-directory: ${{ env.UV_WORKSPACE }}
|
working-directory: ${{ env.UV_WORKSPACE }}
|
||||||
run: cargo build --profile no-debug --bin uv --bin uvx
|
run: cargo build --bin uv --bin uvx
|
||||||
|
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: uv-windows-x86_64-${{ github.sha }}
|
name: uv-windows-x86_64-${{ github.sha }}
|
||||||
path: |
|
path: |
|
||||||
${{ env.UV_WORKSPACE }}/target/no-debug/uv.exe
|
${{ env.UV_WORKSPACE }}/target/debug/uv.exe
|
||||||
${{ env.UV_WORKSPACE }}/target/no-debug/uvx.exe
|
${{ env.UV_WORKSPACE }}/target/debug/uvx.exe
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-windows-aarch64:
|
build-binary-windows-aarch64:
|
||||||
|
|
@ -742,7 +687,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
Copy-Item -Path "${{ github.workspace }}" -Destination "$Env:UV_WORKSPACE" -Recurse
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
with:
|
with:
|
||||||
workspaces: ${{ env.UV_WORKSPACE }}
|
workspaces: ${{ env.UV_WORKSPACE }}
|
||||||
|
|
||||||
|
|
@ -751,15 +696,15 @@ jobs:
|
||||||
|
|
||||||
- name: "Build"
|
- name: "Build"
|
||||||
working-directory: ${{ env.UV_WORKSPACE }}
|
working-directory: ${{ env.UV_WORKSPACE }}
|
||||||
run: cargo build --profile no-debug --target aarch64-pc-windows-msvc
|
run: cargo build --target aarch64-pc-windows-msvc
|
||||||
|
|
||||||
- name: "Upload binary"
|
- name: "Upload binary"
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: uv-windows-aarch64-${{ github.sha }}
|
name: uv-windows-aarch64-${{ github.sha }}
|
||||||
path: |
|
path: |
|
||||||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/no-debug/uv.exe
|
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uv.exe
|
||||||
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/no-debug/uvx.exe
|
${{ env.UV_WORKSPACE }}/target/aarch64-pc-windows-msvc/debug/uvx.exe
|
||||||
retention-days: 1
|
retention-days: 1
|
||||||
|
|
||||||
build-binary-msrv:
|
build-binary-msrv:
|
||||||
|
|
@ -783,11 +728,11 @@ jobs:
|
||||||
MSRV: ${{ steps.msrv.outputs.value }}
|
MSRV: ${{ steps.msrv.outputs.value }}
|
||||||
- name: "Install mold"
|
- name: "Install mold"
|
||||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
- run: cargo +${MSRV} build --profile no-debug
|
- run: cargo +${MSRV} build
|
||||||
env:
|
env:
|
||||||
MSRV: ${{ steps.msrv.outputs.value }}
|
MSRV: ${{ steps.msrv.outputs.value }}
|
||||||
- run: ./target/no-debug/uv --version
|
- run: ./target/debug/uv --version
|
||||||
|
|
||||||
build-binary-freebsd:
|
build-binary-freebsd:
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
|
|
@ -800,7 +745,7 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
- name: "Cross build"
|
- name: "Cross build"
|
||||||
run: |
|
run: |
|
||||||
# Install cross from `freebsd-firecracker`
|
# Install cross from `freebsd-firecracker`
|
||||||
|
|
@ -808,10 +753,10 @@ jobs:
|
||||||
chmod +x cross
|
chmod +x cross
|
||||||
mv cross /usr/local/bin/cross
|
mv cross /usr/local/bin/cross
|
||||||
|
|
||||||
cross build --target x86_64-unknown-freebsd --profile no-debug
|
cross build --target x86_64-unknown-freebsd
|
||||||
|
|
||||||
- name: Test in Firecracker VM
|
- name: Test in Firecracker VM
|
||||||
uses: acj/freebsd-firecracker-action@a5a3fc1709c5b5368141a5699f10259aca3cd965 # v0.6.0
|
uses: acj/freebsd-firecracker-action@e8e9155e944111ea65be7a606c69b32092f3c4c8 # v0.5.2
|
||||||
with:
|
with:
|
||||||
verbose: false
|
verbose: false
|
||||||
checkout: false
|
checkout: false
|
||||||
|
|
@ -822,8 +767,8 @@ jobs:
|
||||||
cat <<EOF > $include_path
|
cat <<EOF > $include_path
|
||||||
target
|
target
|
||||||
target/x86_64-unknown-freebsd
|
target/x86_64-unknown-freebsd
|
||||||
target/x86_64-unknown-freebsd/no-debug
|
target/x86_64-unknown-freebsd/debug
|
||||||
target/x86_64-unknown-freebsd/no-debug/uv
|
target/x86_64-unknown-freebsd/debug/uv
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
rsync -r -e "ssh" \
|
rsync -r -e "ssh" \
|
||||||
|
|
@ -833,7 +778,7 @@ jobs:
|
||||||
--exclude "*" \
|
--exclude "*" \
|
||||||
. firecracker:
|
. firecracker:
|
||||||
run-in-vm: |
|
run-in-vm: |
|
||||||
mv target/x86_64-unknown-freebsd/no-debug/uv uv
|
mv target/x86_64-unknown-freebsd/debug/uv uv
|
||||||
chmod +x uv
|
chmod +x uv
|
||||||
./uv --version
|
./uv --version
|
||||||
|
|
||||||
|
|
@ -1330,30 +1275,6 @@ jobs:
|
||||||
./uv run python -c ""
|
./uv run python -c ""
|
||||||
./uv run -p 3.13 python -c ""
|
./uv run -p 3.13 python -c ""
|
||||||
|
|
||||||
integration-test-windows-python-install-manager:
|
|
||||||
timeout-minutes: 10
|
|
||||||
needs: build-binary-windows-x86_64
|
|
||||||
name: "integration test | windows python install manager"
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: "Download binary"
|
|
||||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
|
||||||
with:
|
|
||||||
name: uv-windows-x86_64-${{ github.sha }}
|
|
||||||
|
|
||||||
- name: "Install Python via Python Install manager"
|
|
||||||
run: |
|
|
||||||
# https://www.python.org/downloads/release/pymanager-250/
|
|
||||||
winget install --accept-package-agreements --accept-source-agreements 9NQ7512CXL7T
|
|
||||||
# Call Python Install Manager's py.exe by full path to avoid legacy py.exe
|
|
||||||
& "$env:LOCALAPPDATA\Microsoft\WindowsApps\py.exe" install 3.14
|
|
||||||
|
|
||||||
# https://github.com/astral-sh/uv/issues/16204
|
|
||||||
- name: "Check temporary environment creation"
|
|
||||||
run: |
|
|
||||||
./uv run -p $env:LOCALAPPDATA\Python\pythoncore-3.14-64\python.exe --with numpy python -c "import sys; print(sys.executable)"
|
|
||||||
|
|
||||||
integration-test-pypy-linux:
|
integration-test-pypy-linux:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
needs: build-binary-linux-libc
|
needs: build-binary-linux-libc
|
||||||
|
|
@ -1773,7 +1694,7 @@ jobs:
|
||||||
name: uv-linux-musl-${{ github.sha }}
|
name: uv-linux-musl-${{ github.sha }}
|
||||||
|
|
||||||
- name: "Setup WSL"
|
- name: "Setup WSL"
|
||||||
uses: Vampire/setup-wsl@6a8db447be7ed35f2f499c02c6e60ff77ef11278 # v6.0.0
|
uses: Vampire/setup-wsl@6a8db447be7ed35f2f499c02c6e60ff77ef11278 # v6
|
||||||
with:
|
with:
|
||||||
distribution: Ubuntu-22.04
|
distribution: Ubuntu-22.04
|
||||||
|
|
||||||
|
|
@ -1873,7 +1794,7 @@ jobs:
|
||||||
run: chmod +x ./uv
|
run: chmod +x ./uv
|
||||||
|
|
||||||
- name: "Configure AWS credentials"
|
- name: "Configure AWS credentials"
|
||||||
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708 # v5.1.1
|
uses: aws-actions/configure-aws-credentials@8e2d02296bcf12c081e192c60f3cb80d77f4f76a
|
||||||
with:
|
with:
|
||||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
|
@ -1999,7 +1920,7 @@ jobs:
|
||||||
../uv build
|
../uv build
|
||||||
|
|
||||||
- name: "Publish astral-test-pypa-gh-action"
|
- name: "Publish astral-test-pypa-gh-action"
|
||||||
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
|
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # release/v1
|
||||||
with:
|
with:
|
||||||
# With this GitHub action, we can't do as rigid checks as with our custom Python script, so we publish more
|
# With this GitHub action, we can't do as rigid checks as with our custom Python script, so we publish more
|
||||||
# leniently
|
# leniently
|
||||||
|
|
@ -2032,7 +1953,6 @@ jobs:
|
||||||
UV_TEST_PUBLISH_GITLAB_PAT: ${{ secrets.UV_TEST_PUBLISH_GITLAB_PAT }}
|
UV_TEST_PUBLISH_GITLAB_PAT: ${{ secrets.UV_TEST_PUBLISH_GITLAB_PAT }}
|
||||||
UV_TEST_PUBLISH_CODEBERG_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CODEBERG_TOKEN }}
|
UV_TEST_PUBLISH_CODEBERG_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CODEBERG_TOKEN }}
|
||||||
UV_TEST_PUBLISH_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CLOUDSMITH_TOKEN }}
|
UV_TEST_PUBLISH_CLOUDSMITH_TOKEN: ${{ secrets.UV_TEST_PUBLISH_CLOUDSMITH_TOKEN }}
|
||||||
UV_TEST_PUBLISH_PYX_TOKEN: ${{ secrets.UV_TEST_PUBLISH_PYX_TOKEN }}
|
|
||||||
UV_TEST_PUBLISH_PYTHON_VERSION: ${{ env.PYTHON_VERSION }}
|
UV_TEST_PUBLISH_PYTHON_VERSION: ${{ env.PYTHON_VERSION }}
|
||||||
|
|
||||||
integration-uv-build-backend:
|
integration-uv-build-backend:
|
||||||
|
|
@ -2067,22 +1987,22 @@ jobs:
|
||||||
|
|
||||||
# Test the main path (`build_wheel`) through pip
|
# Test the main path (`build_wheel`) through pip
|
||||||
./uv venv -v --seed
|
./uv venv -v --seed
|
||||||
./uv run --no-project python -m pip install -v test/packages/built-by-uv --find-links crates/uv-build/dist --no-index --no-deps
|
./uv run --no-project python -m pip install -v scripts/packages/built-by-uv --find-links crates/uv-build/dist --no-index --no-deps
|
||||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||||
|
|
||||||
# Test both `build_wheel` and `build_sdist` through uv
|
# Test both `build_wheel` and `build_sdist` through uv
|
||||||
./uv venv -c -v
|
./uv venv -c -v
|
||||||
./uv build -v --force-pep517 test/packages/built-by-uv --find-links crates/uv-build/dist --offline
|
./uv build -v --force-pep517 scripts/packages/built-by-uv --find-links crates/uv-build/dist --offline
|
||||||
./uv pip install -v test/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||||
|
|
||||||
# Test both `build_wheel` and `build_sdist` through the official `build`
|
# Test both `build_wheel` and `build_sdist` through the official `build`
|
||||||
rm -rf test/packages/built-by-uv/dist/
|
rm -rf scripts/packages/built-by-uv/dist/
|
||||||
./uv venv -c -v
|
./uv venv -c -v
|
||||||
./uv pip install build
|
./uv pip install build
|
||||||
# Add the uv binary to PATH for `build` to find
|
# Add the uv binary to PATH for `build` to find
|
||||||
PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv test/packages/built-by-uv
|
PATH="$(pwd):$PATH" UV_OFFLINE=1 UV_FIND_LINKS=crates/uv-build/dist ./uv run --no-project python -m build -v --installer uv scripts/packages/built-by-uv
|
||||||
./uv pip install -v test/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
./uv pip install -v scripts/packages/built-by-uv/dist/*.tar.gz --find-links crates/uv-build/dist --offline --no-deps
|
||||||
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
./uv run --no-project python -c "from built_by_uv import greet; print(greet())"
|
||||||
|
|
||||||
cache-test-ubuntu:
|
cache-test-ubuntu:
|
||||||
|
|
@ -2268,11 +2188,11 @@ jobs:
|
||||||
needs: build-binary-linux-musl
|
needs: build-binary-linux-musl
|
||||||
name: "check system | python on rocky linux ${{ matrix.rocky-version }}"
|
name: "check system | python on rocky linux ${{ matrix.rocky-version }}"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: rockylinux/rockylinux:${{ matrix.rocky-version }}
|
container: rockylinux:${{ matrix.rocky-version }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
rocky-version: ["8", "9", "10"]
|
rocky-version: ["8", "9"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
|
@ -2281,29 +2201,12 @@ jobs:
|
||||||
- name: "Install Python"
|
- name: "Install Python"
|
||||||
if: matrix.rocky-version == '8'
|
if: matrix.rocky-version == '8'
|
||||||
run: |
|
run: |
|
||||||
for i in {1..5}; do
|
dnf install python39 python39-pip which -y
|
||||||
dnf install python39 python39-pip which -y && break || { echo "Attempt $i failed, retrying in 10 seconds..."; sleep 10; }
|
|
||||||
if [ $i -eq 5 ]; then
|
|
||||||
echo "Failed to install Python after 5 attempts"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: "Install Python"
|
- name: "Install Python"
|
||||||
if: matrix.rocky-version == '9'
|
if: matrix.rocky-version == '9'
|
||||||
run: |
|
run: |
|
||||||
for i in {1..5}; do
|
dnf install python3.9 python3.9-pip which -y
|
||||||
dnf install python3.9 python3.9-pip which -y && break || { echo "Attempt $i failed, retrying in 10 seconds..."; sleep 10; }
|
|
||||||
if [ $i -eq 5 ]; then
|
|
||||||
echo "Failed to install Python after 5 attempts"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: "Install Python"
|
|
||||||
if: matrix.rocky-version == '10'
|
|
||||||
run: |
|
|
||||||
dnf install python3 python3-pip which -y
|
|
||||||
|
|
||||||
- name: "Download binary"
|
- name: "Download binary"
|
||||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
|
|
@ -2516,7 +2419,7 @@ jobs:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
needs: build-binary-macos-x86_64
|
needs: build-binary-macos-x86_64
|
||||||
name: "check system | python on macos x86-64"
|
name: "check system | python on macos x86-64"
|
||||||
runs-on: macos-15-intel # github-macos-15-x86_64-4
|
runs-on: macos-13 # github-macos-13-x86_64-4
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
|
@ -2538,7 +2441,7 @@ jobs:
|
||||||
run: echo $(which python3)
|
run: echo $(which python3)
|
||||||
|
|
||||||
- name: "Validate global Python install"
|
- name: "Validate global Python install"
|
||||||
run: python3 scripts/check_system_python.py --uv ./uv --externally-managed
|
run: python3 scripts/check_system_python.py --uv ./uv
|
||||||
|
|
||||||
system-test-windows-python-310:
|
system-test-windows-python-310:
|
||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
|
|
@ -2924,14 +2827,14 @@ jobs:
|
||||||
runs-on: codspeed-macro
|
runs-on: codspeed-macro
|
||||||
needs: determine_changes
|
needs: determine_changes
|
||||||
if: ${{ github.repository == 'astral-sh/uv' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
if: ${{ github.repository == 'astral-sh/uv' && !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||||
timeout-minutes: 25
|
timeout-minutes: 20
|
||||||
steps:
|
steps:
|
||||||
- name: "Checkout Branch"
|
- name: "Checkout Branch"
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
@ -2946,17 +2849,16 @@ jobs:
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y libsasl2-dev libldap2-dev libkrb5-dev
|
sudo apt-get install -y libsasl2-dev libldap2-dev libkrb5-dev
|
||||||
cargo run --bin uv -- venv --cache-dir .cache
|
cargo run --bin uv -- venv --cache-dir .cache
|
||||||
cargo run --bin uv -- pip compile test/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
cargo run --bin uv -- pip compile scripts/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||||
cargo run --bin uv -- pip compile test/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
cargo run --bin uv -- pip compile scripts/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||||
|
|
||||||
- name: "Build benchmarks"
|
- name: "Build benchmarks"
|
||||||
run: cargo codspeed build --profile profiling -p uv-bench
|
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||||
with:
|
with:
|
||||||
run: cargo codspeed run
|
run: cargo codspeed run
|
||||||
mode: walltime
|
|
||||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|
||||||
benchmarks-instrumented:
|
benchmarks-instrumented:
|
||||||
|
|
@ -2971,7 +2873,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
|
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||||
|
|
||||||
- name: "Install Rust toolchain"
|
- name: "Install Rust toolchain"
|
||||||
run: rustup show
|
run: rustup show
|
||||||
|
|
@ -2986,15 +2888,14 @@ jobs:
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y libsasl2-dev libldap2-dev libkrb5-dev
|
sudo apt-get install -y libsasl2-dev libldap2-dev libkrb5-dev
|
||||||
cargo run --bin uv -- venv --cache-dir .cache
|
cargo run --bin uv -- venv --cache-dir .cache
|
||||||
cargo run --bin uv -- pip compile test/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
cargo run --bin uv -- pip compile scripts/requirements/jupyter.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||||
cargo run --bin uv -- pip compile test/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
cargo run --bin uv -- pip compile scripts/requirements/airflow.in --universal --exclude-newer 2024-08-08 --cache-dir .cache
|
||||||
|
|
||||||
- name: "Build benchmarks"
|
- name: "Build benchmarks"
|
||||||
run: cargo codspeed build --profile profiling -p uv-bench
|
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
|
||||||
|
|
||||||
- name: "Run benchmarks"
|
- name: "Run benchmarks"
|
||||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
|
||||||
with:
|
with:
|
||||||
run: cargo codspeed run
|
run: cargo codspeed run
|
||||||
mode: instrumentation
|
|
||||||
token: ${{ secrets.CODSPEED_TOKEN }}
|
token: ${{ secrets.CODSPEED_TOKEN }}
|
||||||
|
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
# Publish a release to crates.io.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
|
||||||
# within `cargo-dist`.
|
|
||||||
name: "Publish to crates.io"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
crates-publish-uv:
|
|
||||||
name: Upload uv to crates.io
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment:
|
|
||||||
name: release
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
# TODO(zanieb): Switch to trusted publishing once published
|
|
||||||
# - uses: rust-lang/crates-io-auth-action@v1
|
|
||||||
# id: auth
|
|
||||||
- name: Publish workspace crates
|
|
||||||
# Note `--no-verify` is safe because we do a publish dry-run elsewhere in CI
|
|
||||||
run: cargo publish --workspace --no-verify
|
|
||||||
env:
|
|
||||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_TOKEN }}
|
|
||||||
|
|
@ -36,14 +36,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: "Generate reference documentation"
|
|
||||||
run: |
|
|
||||||
cargo dev generate-options-reference
|
|
||||||
cargo dev generate-cli-reference
|
|
||||||
cargo dev generate-env-vars-reference
|
|
||||||
|
|
||||||
- name: "Set docs display name"
|
- name: "Set docs display name"
|
||||||
run: |
|
run: |
|
||||||
version="${VERSION}"
|
version="${VERSION}"
|
||||||
|
|
|
||||||
|
|
@ -18,15 +18,18 @@ jobs:
|
||||||
environment:
|
environment:
|
||||||
name: release
|
name: release
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # For PyPI's trusted publishing
|
# For PyPI's trusted publishing.
|
||||||
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
with:
|
with:
|
||||||
pattern: wheels_uv-*
|
pattern: wheels_uv-*
|
||||||
path: wheels_uv
|
path: wheels_uv
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
- name: Remove wheels unsupported by PyPI
|
||||||
|
run: rm wheels_uv/*loong*
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
run: uv publish -v wheels_uv/*
|
run: uv publish -v wheels_uv/*
|
||||||
|
|
||||||
|
|
@ -36,14 +39,17 @@ jobs:
|
||||||
environment:
|
environment:
|
||||||
name: release
|
name: release
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # For PyPI's trusted publishing
|
# For PyPI's trusted publishing.
|
||||||
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
with:
|
with:
|
||||||
pattern: wheels_uv_build-*
|
pattern: wheels_uv_build-*
|
||||||
path: wheels_uv_build
|
path: wheels_uv_build
|
||||||
merge-multiple: true
|
merge-multiple: true
|
||||||
|
- name: Remove wheels unsupported by PyPI
|
||||||
|
run: rm wheels_uv_build/*loong*
|
||||||
- name: Publish to PyPI
|
- name: Publish to PyPI
|
||||||
run: uv publish -v wheels_uv_build/*
|
run: uv publish -v wheels_uv_build/*
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
# This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist
|
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||||
#
|
#
|
||||||
# Copyright 2022-2024, axodotdev
|
# Copyright 2022-2024, axodotdev
|
||||||
|
# Copyright 2025 Astral Software Inc.
|
||||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||||
#
|
#
|
||||||
# CI that:
|
# CI that:
|
||||||
|
|
@ -68,7 +69,7 @@ jobs:
|
||||||
# we specify bash to get pipefail; it guards against the `curl` command
|
# we specify bash to get pipefail; it guards against the `curl` command
|
||||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||||
shell: bash
|
shell: bash
|
||||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7/cargo-dist-installer.sh | sh"
|
||||||
- name: Cache dist
|
- name: Cache dist
|
||||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||||
with:
|
with:
|
||||||
|
|
@ -168,8 +169,8 @@ jobs:
|
||||||
- custom-build-binaries
|
- custom-build-binaries
|
||||||
- custom-build-docker
|
- custom-build-docker
|
||||||
- build-global-artifacts
|
- build-global-artifacts
|
||||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "depot-ubuntu-latest-4"
|
||||||
|
|
@ -222,36 +223,17 @@ jobs:
|
||||||
"id-token": "write"
|
"id-token": "write"
|
||||||
"packages": "write"
|
"packages": "write"
|
||||||
|
|
||||||
custom-publish-crates:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- host
|
|
||||||
- custom-publish-pypi # DIRTY: see #16989
|
|
||||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
|
||||||
uses: ./.github/workflows/publish-crates.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
# publish jobs get escalated permissions
|
|
||||||
permissions:
|
|
||||||
"contents": "read"
|
|
||||||
|
|
||||||
# Create a GitHub Release while uploading all files to it
|
# Create a GitHub Release while uploading all files to it
|
||||||
announce:
|
announce:
|
||||||
needs:
|
needs:
|
||||||
- plan
|
- plan
|
||||||
- host
|
- host
|
||||||
- custom-publish-pypi
|
- custom-publish-pypi
|
||||||
- custom-publish-crates
|
|
||||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||||
# "host" however must run to completion, no skipping allowed!
|
# "host" however must run to completion, no skipping allowed!
|
||||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-crates.result == 'skipped' || needs.custom-publish-crates.result == 'success') }}
|
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "depot-ubuntu-latest-4"
|
||||||
permissions:
|
|
||||||
"attestations": "write"
|
|
||||||
"contents": "write"
|
|
||||||
"id-token": "write"
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
|
|
@ -270,15 +252,6 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
# Remove the granular manifests
|
# Remove the granular manifests
|
||||||
rm -f artifacts/*-dist-manifest.json
|
rm -f artifacts/*-dist-manifest.json
|
||||||
- name: Attest
|
|
||||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2
|
|
||||||
with:
|
|
||||||
subject-path: |
|
|
||||||
artifacts/*.json
|
|
||||||
artifacts/*.sh
|
|
||||||
artifacts/*.ps1
|
|
||||||
artifacts/*.zip
|
|
||||||
artifacts/*.tar.gz
|
|
||||||
- name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
env:
|
env:
|
||||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
with:
|
with:
|
||||||
version: "latest"
|
version: "latest"
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
|
|
@ -49,4 +49,3 @@ jobs:
|
||||||
title: "Sync latest Python releases"
|
title: "Sync latest Python releases"
|
||||||
body: "Automated update for Python releases."
|
body: "Automated update for Python releases."
|
||||||
base: "main"
|
base: "main"
|
||||||
draft: true
|
|
||||||
|
|
|
||||||
|
|
@ -37,11 +37,6 @@ profile.json.gz
|
||||||
# MkDocs
|
# MkDocs
|
||||||
/site
|
/site
|
||||||
|
|
||||||
# Generated reference docs (use `cargo dev generate-all` to regenerate)
|
|
||||||
/docs/reference/cli.md
|
|
||||||
/docs/reference/environment.md
|
|
||||||
/docs/reference/settings.md
|
|
||||||
|
|
||||||
# macOS
|
# macOS
|
||||||
**/.DS_Store
|
**/.DS_Store
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ repos:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
- repo: https://github.com/crate-ci/typos
|
||||||
rev: v1.37.2
|
rev: v1.36.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: typos
|
- id: typos
|
||||||
|
|
||||||
|
|
@ -42,7 +42,7 @@ repos:
|
||||||
types_or: [yaml, json5]
|
types_or: [yaml, json5]
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.13.3
|
rev: v0.12.12
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
|
|
||||||
|
|
@ -4,5 +4,5 @@ PREVIEW-CHANGELOG.md
|
||||||
docs/reference/cli.md
|
docs/reference/cli.md
|
||||||
docs/reference/settings.md
|
docs/reference/settings.md
|
||||||
docs/reference/environment.md
|
docs/reference/environment.md
|
||||||
test/ecosystem/home-assistant-core/LICENSE.md
|
ecosystem/home-assistant-core/LICENSE.md
|
||||||
docs/guides/integration/gitlab.md
|
docs/guides/integration/gitlab.md
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,3 @@
|
||||||
# These are versions of Python required for running uv's own test suite. You can add or remove
|
|
||||||
# versions here as needed for tests; this doesn't impact uv's own functionality. They can be
|
|
||||||
# installed through any means you like, e.g. `uv python install` if you already have a build of uv,
|
|
||||||
# `cargo run python install`, or through some other installer.
|
|
||||||
#
|
|
||||||
# In uv's CI in GitHub Actions, they are bootstrapped by an existing released version of uv,
|
|
||||||
# installed by the astral-sh/setup-uv action If you need a newer or different version, you will
|
|
||||||
# first need to complete a uv release capable of installing that version, get it picked up by
|
|
||||||
# astral-sh/setup-uv, and update its hash in .github/workflows.
|
|
||||||
|
|
||||||
3.14.0
|
3.14.0
|
||||||
3.13.2
|
3.13.2
|
||||||
3.12.9
|
3.12.9
|
||||||
|
|
|
||||||
905
CHANGELOG.md
905
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
|
|
@ -1,125 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
- [Our Pledge](#our-pledge)
|
|
||||||
- [Our Standards](#our-standards)
|
|
||||||
- [Enforcement Responsibilities](#enforcement-responsibilities)
|
|
||||||
- [Scope](#scope)
|
|
||||||
- [Enforcement](#enforcement)
|
|
||||||
- [Enforcement Guidelines](#enforcement-guidelines)
|
|
||||||
- [1. Correction](#1-correction)
|
|
||||||
- [2. Warning](#2-warning)
|
|
||||||
- [3. Temporary Ban](#3-temporary-ban)
|
|
||||||
- [4. Permanent Ban](#4-permanent-ban)
|
|
||||||
- [Attribution](#attribution)
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
We as members, contributors, and leaders pledge to make participation in our community a
|
|
||||||
harassment-free experience for everyone, regardless of age, body size, visible or invisible
|
|
||||||
disability, ethnicity, sex characteristics, gender identity and expression, level of experience,
|
|
||||||
education, socio-economic status, nationality, personal appearance, race, religion, or sexual
|
|
||||||
identity and orientation.
|
|
||||||
|
|
||||||
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and
|
|
||||||
healthy community.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to a positive environment for our community include:
|
|
||||||
|
|
||||||
- Demonstrating empathy and kindness toward other people
|
|
||||||
- Being respectful of differing opinions, viewpoints, and experiences
|
|
||||||
- Giving and gracefully accepting constructive feedback
|
|
||||||
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the
|
|
||||||
experience
|
|
||||||
- Focusing on what is best not just for us as individuals, but for the overall community
|
|
||||||
|
|
||||||
Examples of unacceptable behavior include:
|
|
||||||
|
|
||||||
- The use of sexualized language or imagery, and sexual attention or advances of any kind
|
|
||||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
|
||||||
- Public or private harassment
|
|
||||||
- Publishing others' private information, such as a physical or email address, without their
|
|
||||||
explicit permission
|
|
||||||
- Other conduct which could reasonably be considered inappropriate in a professional setting
|
|
||||||
|
|
||||||
## Enforcement Responsibilities
|
|
||||||
|
|
||||||
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior
|
|
||||||
and will take appropriate and fair corrective action in response to any behavior that they deem
|
|
||||||
inappropriate, threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
Community leaders have the right and responsibility to remove, edit, or reject comments, commits,
|
|
||||||
code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and
|
|
||||||
will communicate reasons for moderation decisions when appropriate.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies within all community spaces, and also applies when an individual is
|
|
||||||
officially representing the community in public spaces. Examples of representing our community
|
|
||||||
include using an official e-mail address, posting via an official social media account, or acting as
|
|
||||||
an appointed representative at an online or offline event.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community
|
|
||||||
leaders responsible for enforcement at <hey@astral.sh>. All complaints will be reviewed and
|
|
||||||
investigated promptly and fairly.
|
|
||||||
|
|
||||||
All community leaders are obligated to respect the privacy and security of the reporter of any
|
|
||||||
incident.
|
|
||||||
|
|
||||||
## Enforcement Guidelines
|
|
||||||
|
|
||||||
Community leaders will follow these Community Impact Guidelines in determining the consequences for
|
|
||||||
any action they deem in violation of this Code of Conduct:
|
|
||||||
|
|
||||||
### 1. Correction
|
|
||||||
|
|
||||||
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or
|
|
||||||
unwelcome in the community.
|
|
||||||
|
|
||||||
**Consequence**: A private, written warning from community leaders, providing clarity around the
|
|
||||||
nature of the violation and an explanation of why the behavior was inappropriate. A public apology
|
|
||||||
may be requested.
|
|
||||||
|
|
||||||
### 2. Warning
|
|
||||||
|
|
||||||
**Community Impact**: A violation through a single incident or series of actions.
|
|
||||||
|
|
||||||
**Consequence**: A warning with consequences for continued behavior. No interaction with the people
|
|
||||||
involved, including unsolicited interaction with those enforcing the Code of Conduct, for a
|
|
||||||
specified period of time. This includes avoiding interactions in community spaces as well as
|
|
||||||
external channels like social media. Violating these terms may lead to a temporary or permanent ban.
|
|
||||||
|
|
||||||
### 3. Temporary Ban
|
|
||||||
|
|
||||||
**Community Impact**: A serious violation of community standards, including sustained inappropriate
|
|
||||||
behavior.
|
|
||||||
|
|
||||||
**Consequence**: A temporary ban from any sort of interaction or public communication with the
|
|
||||||
community for a specified period of time. No public or private interaction with the people involved,
|
|
||||||
including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this
|
|
||||||
period. Violating these terms may lead to a permanent ban.
|
|
||||||
|
|
||||||
### 4. Permanent Ban
|
|
||||||
|
|
||||||
**Community Impact**: Demonstrating a pattern of violation of community standards, including
|
|
||||||
sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement
|
|
||||||
of classes of individuals.
|
|
||||||
|
|
||||||
**Consequence**: A permanent ban from any sort of public interaction within the community.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available
|
|
||||||
[here](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
|
||||||
|
|
||||||
Community Impact Guidelines were inspired by
|
|
||||||
[Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity).
|
|
||||||
|
|
||||||
For answers to common questions about this code of conduct, see the
|
|
||||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available
|
|
||||||
[here](https://www.contributor-covenant.org/translations).
|
|
||||||
|
|
||||||
[homepage]: https://www.contributor-covenant.org
|
|
||||||
|
|
@ -1,34 +1,10 @@
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
## Finding ways to help
|
We have issues labeled as
|
||||||
|
[Good First Issue](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
||||||
We label issues that would be good for a first time contributor as
|
and
|
||||||
[`good first issue`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
|
[Help Wanted](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||||
These usually do not require significant experience with Rust or the uv code base.
|
which are good opportunities for new contributors.
|
||||||
|
|
||||||
We label issues that we think are a good opportunity for subsequent contributions as
|
|
||||||
[`help wanted`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22).
|
|
||||||
These require varying levels of experience with Rust and uv. Often, we want to accomplish these
|
|
||||||
tasks but do not have the resources to do so ourselves.
|
|
||||||
|
|
||||||
You don't need our permission to start on an issue we have labeled as appropriate for community
|
|
||||||
contribution as described above. However, it's a good idea to indicate that you are going to work on
|
|
||||||
an issue to avoid concurrent attempts to solve the same problem.
|
|
||||||
|
|
||||||
Please check in with us before starting work on an issue that has not been labeled as appropriate
|
|
||||||
for community contribution. We're happy to receive contributions for other issues, but it's
|
|
||||||
important to make sure we have consensus on the solution to the problem first.
|
|
||||||
|
|
||||||
Outside of issues with the labels above, issues labeled as
|
|
||||||
[`bug`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22bug%22) are the
|
|
||||||
best candidates for contribution. In contrast, issues labeled with `needs-decision` or
|
|
||||||
`needs-design` are _not_ good candidates for contribution. Please do not open pull requests for
|
|
||||||
issues with these labels.
|
|
||||||
|
|
||||||
Please do not open pull requests for new features without prior discussion. While we appreciate
|
|
||||||
exploration of new features, we will almost always close these pull requests immediately. Adding a
|
|
||||||
new feature to uv creates a long-term maintenance burden and requires strong consensus from the uv
|
|
||||||
team before it is appropriate to begin work on an implementation.
|
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
|
|
||||||
|
|
@ -86,13 +62,6 @@ cargo test --package <package> --test <test> -- <test_name> -- --exact
|
||||||
cargo insta review
|
cargo insta review
|
||||||
```
|
```
|
||||||
|
|
||||||
### Git and Git LFS
|
|
||||||
|
|
||||||
A subset of uv tests require both [Git](https://git-scm.com) and [Git LFS](https://git-lfs.com/) to
|
|
||||||
execute properly.
|
|
||||||
|
|
||||||
These tests can be disabled by turning off either `git` or `git-lfs` uv features.
|
|
||||||
|
|
||||||
### Local testing
|
### Local testing
|
||||||
|
|
||||||
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
||||||
|
|
@ -102,15 +71,6 @@ cargo run -- venv
|
||||||
cargo run -- pip install requests
|
cargo run -- pip install requests
|
||||||
```
|
```
|
||||||
|
|
||||||
## Crate structure
|
|
||||||
|
|
||||||
Rust does not allow circular dependencies between crates. To visualize the crate hierarchy, install
|
|
||||||
[cargo-depgraph](https://github.com/jplatte/cargo-depgraph) and graphviz, then run:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
cargo depgraph --dedup-transitive-deps --workspace-only | dot -Tpng > graph.png
|
|
||||||
```
|
|
||||||
|
|
||||||
## Running inside a Docker container
|
## Running inside a Docker container
|
||||||
|
|
||||||
Source distributions can run arbitrary code on build and can make unwanted modifications to your
|
Source distributions can run arbitrary code on build and can make unwanted modifications to your
|
||||||
|
|
@ -136,7 +96,7 @@ Please refer to Ruff's
|
||||||
it applies to uv, too.
|
it applies to uv, too.
|
||||||
|
|
||||||
We provide diverse sets of requirements for testing and benchmarking the resolver in
|
We provide diverse sets of requirements for testing and benchmarking the resolver in
|
||||||
`test/requirements` and for the installer in `test/requirements/compiled`.
|
`scripts/requirements` and for the installer in `scripts/requirements/compiled`.
|
||||||
|
|
||||||
You can use `scripts/benchmark` to benchmark predefined workloads between uv versions and with other
|
You can use `scripts/benchmark` to benchmark predefined workloads between uv versions and with other
|
||||||
tools, e.g., from the `scripts/benchmark` directory:
|
tools, e.g., from the `scripts/benchmark` directory:
|
||||||
|
|
@ -147,7 +107,7 @@ uv run resolver \
|
||||||
--poetry \
|
--poetry \
|
||||||
--benchmark \
|
--benchmark \
|
||||||
resolve-cold \
|
resolve-cold \
|
||||||
../test/requirements/trio.in
|
../scripts/requirements/trio.in
|
||||||
```
|
```
|
||||||
|
|
||||||
### Analyzing concurrency
|
### Analyzing concurrency
|
||||||
|
|
@ -157,7 +117,7 @@ visualize parallel requests and find any spots where uv is CPU-bound. Example us
|
||||||
`uv-dev` respectively:
|
`uv-dev` respectively:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile test/requirements/jupyter.in
|
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile scripts/requirements/jupyter.in
|
||||||
```
|
```
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
178
Cargo.toml
178
Cargo.toml
|
|
@ -4,88 +4,89 @@ exclude = [
|
||||||
"scripts",
|
"scripts",
|
||||||
# Needs nightly
|
# Needs nightly
|
||||||
"crates/uv-trampoline",
|
"crates/uv-trampoline",
|
||||||
|
# Only used to pull in features, allocators, etc. — we specifically don't want them
|
||||||
|
# to be part of a workspace-wide cargo check, cargo clippy, etc.
|
||||||
|
"crates/uv-performance-memory-allocator",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
rust-version = "1.89"
|
rust-version = "1.87"
|
||||||
homepage = "https://pypi.org/project/uv/"
|
homepage = "https://pypi.org/project/uv/"
|
||||||
|
documentation = "https://pypi.org/project/uv/"
|
||||||
repository = "https://github.com/astral-sh/uv"
|
repository = "https://github.com/astral-sh/uv"
|
||||||
authors = ["uv"]
|
authors = ["uv"]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
uv-auth = { version = "0.0.8", path = "crates/uv-auth" }
|
uv-auth = { path = "crates/uv-auth" }
|
||||||
uv-bin-install = { version = "0.0.8", path = "crates/uv-bin-install" }
|
uv-bin-install = { path = "crates/uv-bin-install" }
|
||||||
uv-build-backend = { version = "0.0.8", path = "crates/uv-build-backend" }
|
uv-build-backend = { path = "crates/uv-build-backend" }
|
||||||
uv-build-frontend = { version = "0.0.8", path = "crates/uv-build-frontend" }
|
uv-build-frontend = { path = "crates/uv-build-frontend" }
|
||||||
uv-cache = { version = "0.0.8", path = "crates/uv-cache" }
|
uv-cache = { path = "crates/uv-cache" }
|
||||||
uv-cache-info = { version = "0.0.8", path = "crates/uv-cache-info" }
|
uv-cache-info = { path = "crates/uv-cache-info" }
|
||||||
uv-cache-key = { version = "0.0.8", path = "crates/uv-cache-key" }
|
uv-cache-key = { path = "crates/uv-cache-key" }
|
||||||
uv-cli = { version = "0.0.8", path = "crates/uv-cli" }
|
uv-cli = { path = "crates/uv-cli" }
|
||||||
uv-client = { version = "0.0.8", path = "crates/uv-client" }
|
uv-client = { path = "crates/uv-client" }
|
||||||
uv-configuration = { version = "0.0.8", path = "crates/uv-configuration" }
|
uv-configuration = { path = "crates/uv-configuration" }
|
||||||
uv-console = { version = "0.0.8", path = "crates/uv-console" }
|
uv-console = { path = "crates/uv-console" }
|
||||||
uv-dirs = { version = "0.0.8", path = "crates/uv-dirs" }
|
uv-dirs = { path = "crates/uv-dirs" }
|
||||||
uv-dispatch = { version = "0.0.8", path = "crates/uv-dispatch" }
|
uv-dispatch = { path = "crates/uv-dispatch" }
|
||||||
uv-distribution = { version = "0.0.8", path = "crates/uv-distribution" }
|
uv-distribution = { path = "crates/uv-distribution" }
|
||||||
uv-distribution-filename = { version = "0.0.8", path = "crates/uv-distribution-filename" }
|
uv-distribution-filename = { path = "crates/uv-distribution-filename" }
|
||||||
uv-distribution-types = { version = "0.0.8", path = "crates/uv-distribution-types" }
|
uv-distribution-types = { path = "crates/uv-distribution-types" }
|
||||||
uv-extract = { version = "0.0.8", path = "crates/uv-extract" }
|
uv-extract = { path = "crates/uv-extract" }
|
||||||
uv-flags = { version = "0.0.8", path = "crates/uv-flags" }
|
uv-fs = { path = "crates/uv-fs", features = ["serde", "tokio"] }
|
||||||
uv-fs = { version = "0.0.8", path = "crates/uv-fs", features = ["serde", "tokio"] }
|
uv-git = { path = "crates/uv-git" }
|
||||||
uv-git = { version = "0.0.8", path = "crates/uv-git" }
|
uv-git-types = { path = "crates/uv-git-types" }
|
||||||
uv-git-types = { version = "0.0.8", path = "crates/uv-git-types" }
|
uv-globfilter = { path = "crates/uv-globfilter" }
|
||||||
uv-globfilter = { version = "0.0.8", path = "crates/uv-globfilter" }
|
uv-install-wheel = { path = "crates/uv-install-wheel", default-features = false }
|
||||||
uv-install-wheel = { version = "0.0.8", path = "crates/uv-install-wheel", default-features = false }
|
uv-installer = { path = "crates/uv-installer" }
|
||||||
uv-installer = { version = "0.0.8", path = "crates/uv-installer" }
|
uv-keyring = { path = "crates/uv-keyring" }
|
||||||
uv-keyring = { version = "0.0.8", path = "crates/uv-keyring" }
|
uv-logging = { path = "crates/uv-logging" }
|
||||||
uv-logging = { version = "0.0.8", path = "crates/uv-logging" }
|
uv-macros = { path = "crates/uv-macros" }
|
||||||
uv-macros = { version = "0.0.8", path = "crates/uv-macros" }
|
uv-metadata = { path = "crates/uv-metadata" }
|
||||||
uv-metadata = { version = "0.0.8", path = "crates/uv-metadata" }
|
uv-normalize = { path = "crates/uv-normalize" }
|
||||||
uv-normalize = { version = "0.0.8", path = "crates/uv-normalize" }
|
uv-once-map = { path = "crates/uv-once-map" }
|
||||||
uv-once-map = { version = "0.0.8", path = "crates/uv-once-map" }
|
uv-options-metadata = { path = "crates/uv-options-metadata" }
|
||||||
uv-options-metadata = { version = "0.0.8", path = "crates/uv-options-metadata" }
|
uv-pep440 = { path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
||||||
uv-performance-memory-allocator = { version = "0.0.8", path = "crates/uv-performance-memory-allocator" }
|
uv-pep508 = { path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||||
uv-pep440 = { version = "0.0.8", path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
uv-platform = { path = "crates/uv-platform" }
|
||||||
uv-pep508 = { version = "0.0.8", path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
uv-platform-tags = { path = "crates/uv-platform-tags" }
|
||||||
uv-platform = { version = "0.0.8", path = "crates/uv-platform" }
|
uv-preview = { path = "crates/uv-preview" }
|
||||||
uv-platform-tags = { version = "0.0.8", path = "crates/uv-platform-tags" }
|
uv-publish = { path = "crates/uv-publish" }
|
||||||
uv-preview = { version = "0.0.8", path = "crates/uv-preview" }
|
uv-pypi-types = { path = "crates/uv-pypi-types" }
|
||||||
uv-publish = { version = "0.0.8", path = "crates/uv-publish" }
|
uv-python = { path = "crates/uv-python" }
|
||||||
uv-pypi-types = { version = "0.0.8", path = "crates/uv-pypi-types" }
|
uv-redacted = { path = "crates/uv-redacted" }
|
||||||
uv-python = { version = "0.0.8", path = "crates/uv-python" }
|
uv-requirements = { path = "crates/uv-requirements" }
|
||||||
uv-redacted = { version = "0.0.8", path = "crates/uv-redacted" }
|
uv-requirements-txt = { path = "crates/uv-requirements-txt" }
|
||||||
uv-requirements = { version = "0.0.8", path = "crates/uv-requirements" }
|
uv-resolver = { path = "crates/uv-resolver" }
|
||||||
uv-requirements-txt = { version = "0.0.8", path = "crates/uv-requirements-txt" }
|
uv-scripts = { path = "crates/uv-scripts" }
|
||||||
uv-resolver = { version = "0.0.8", path = "crates/uv-resolver" }
|
uv-settings = { path = "crates/uv-settings" }
|
||||||
uv-scripts = { version = "0.0.8", path = "crates/uv-scripts" }
|
uv-shell = { path = "crates/uv-shell" }
|
||||||
uv-settings = { version = "0.0.8", path = "crates/uv-settings" }
|
uv-small-str = { path = "crates/uv-small-str" }
|
||||||
uv-shell = { version = "0.0.8", path = "crates/uv-shell" }
|
uv-state = { path = "crates/uv-state" }
|
||||||
uv-small-str = { version = "0.0.8", path = "crates/uv-small-str" }
|
uv-static = { path = "crates/uv-static" }
|
||||||
uv-state = { version = "0.0.8", path = "crates/uv-state" }
|
uv-tool = { path = "crates/uv-tool" }
|
||||||
uv-static = { version = "0.0.8", path = "crates/uv-static" }
|
uv-torch = { path = "crates/uv-torch" }
|
||||||
uv-tool = { version = "0.0.8", path = "crates/uv-tool" }
|
uv-trampoline-builder = { path = "crates/uv-trampoline-builder" }
|
||||||
uv-torch = { version = "0.0.8", path = "crates/uv-torch" }
|
uv-types = { path = "crates/uv-types" }
|
||||||
uv-trampoline-builder = { version = "0.0.8", path = "crates/uv-trampoline-builder" }
|
uv-version = { path = "crates/uv-version" }
|
||||||
uv-types = { version = "0.0.8", path = "crates/uv-types" }
|
uv-virtualenv = { path = "crates/uv-virtualenv" }
|
||||||
uv-version = { version = "0.9.18", path = "crates/uv-version" }
|
uv-warnings = { path = "crates/uv-warnings" }
|
||||||
uv-virtualenv = { version = "0.0.8", path = "crates/uv-virtualenv" }
|
uv-workspace = { path = "crates/uv-workspace" }
|
||||||
uv-warnings = { version = "0.0.8", path = "crates/uv-warnings" }
|
|
||||||
uv-workspace = { version = "0.0.8", path = "crates/uv-workspace" }
|
|
||||||
|
|
||||||
ambient-id = { version = "0.0.7", default-features = false, features = ["astral-reqwest-middleware"] }
|
|
||||||
anstream = { version = "0.6.15" }
|
anstream = { version = "0.6.15" }
|
||||||
anyhow = { version = "1.0.89" }
|
anyhow = { version = "1.0.89" }
|
||||||
arcstr = { version = "1.2.0" }
|
arcstr = { version = "1.2.0" }
|
||||||
arrayvec = { version = "0.7.6" }
|
arrayvec = { version = "0.7.6" }
|
||||||
astral-tokio-tar = { version = "0.5.6" }
|
astral-tokio-tar = { version = "0.5.3" }
|
||||||
async-channel = { version = "2.3.1" }
|
async-channel = { version = "2.3.1" }
|
||||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||||
async-trait = { version = "0.1.82" }
|
async-trait = { version = "0.1.82" }
|
||||||
async_http_range_reader = { version = "0.9.1", package = "astral_async_http_range_reader" }
|
async_http_range_reader = { version = "0.9.1" }
|
||||||
async_zip = { version = "0.0.17", package = "astral_async_zip", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "285e48742b74ab109887d62e1ae79e7c15fd4878", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||||
axoupdater = { version = "0.9.0", default-features = false }
|
axoupdater = { version = "0.9.0", default-features = false }
|
||||||
backon = { version = "1.3.0" }
|
backon = { version = "1.3.0" }
|
||||||
base64 = { version = "0.22.1" }
|
base64 = { version = "0.22.1" }
|
||||||
|
|
@ -100,27 +101,25 @@ configparser = { version = "3.1.0" }
|
||||||
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
||||||
csv = { version = "1.3.0" }
|
csv = { version = "1.3.0" }
|
||||||
ctrlc = { version = "3.4.5" }
|
ctrlc = { version = "3.4.5" }
|
||||||
cyclonedx-bom = { version = "0.8.0" }
|
|
||||||
dashmap = { version = "6.1.0" }
|
dashmap = { version = "6.1.0" }
|
||||||
data-encoding = { version = "2.6.0" }
|
data-encoding = { version = "2.6.0" }
|
||||||
diskus = { version = "0.9.0", default-features = false }
|
|
||||||
dotenvy = { version = "0.15.7" }
|
dotenvy = { version = "0.15.7" }
|
||||||
dunce = { version = "1.0.5" }
|
dunce = { version = "1.0.5" }
|
||||||
either = { version = "1.13.0" }
|
either = { version = "1.13.0" }
|
||||||
encoding_rs_io = { version = "0.1.7" }
|
encoding_rs_io = { version = "0.1.7" }
|
||||||
embed-manifest = { version = "1.5.0" }
|
etcetera = { version = "0.10.0" }
|
||||||
etcetera = { version = "0.11.0" }
|
|
||||||
fastrand = { version = "2.3.0" }
|
|
||||||
flate2 = { version = "1.0.33", default-features = false, features = ["zlib-rs"] }
|
flate2 = { version = "1.0.33", default-features = false, features = ["zlib-rs"] }
|
||||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
|
fs2 = { version = "0.4.3" }
|
||||||
futures = { version = "0.3.30" }
|
futures = { version = "0.3.30" }
|
||||||
glob = { version = "0.3.1" }
|
glob = { version = "0.3.1" }
|
||||||
globset = { version = "0.4.15" }
|
globset = { version = "0.4.15" }
|
||||||
globwalk = { version = "0.9.1" }
|
globwalk = { version = "0.9.1" }
|
||||||
goblin = { version = "0.10.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] }
|
goblin = { version = "0.10.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] }
|
||||||
h2 = { version = "0.4.7" }
|
h2 = { version = "0.4.7" }
|
||||||
hashbrown = { version = "0.16.0" }
|
hashbrown = { version = "0.15.1" }
|
||||||
hex = { version = "0.4.3" }
|
hex = { version = "0.4.3" }
|
||||||
|
home = { version = "0.5.9" }
|
||||||
html-escape = { version = "0.2.13" }
|
html-escape = { version = "0.2.13" }
|
||||||
http = { version = "1.1.0" }
|
http = { version = "1.1.0" }
|
||||||
indexmap = { version = "2.5.0" }
|
indexmap = { version = "2.5.0" }
|
||||||
|
|
@ -135,6 +134,7 @@ memchr = { version = "2.7.4" }
|
||||||
miette = { version = "7.2.0", features = ["fancy-no-backtrace"] }
|
miette = { version = "7.2.0", features = ["fancy-no-backtrace"] }
|
||||||
nanoid = { version = "0.4.0" }
|
nanoid = { version = "0.4.0" }
|
||||||
nix = { version = "0.30.0", features = ["signal"] }
|
nix = { version = "0.30.0", features = ["signal"] }
|
||||||
|
once_cell = { version = "1.20.2" }
|
||||||
open = { version = "5.3.2" }
|
open = { version = "5.3.2" }
|
||||||
owo-colors = { version = "4.1.0" }
|
owo-colors = { version = "4.1.0" }
|
||||||
path-slash = { version = "0.2.1" }
|
path-slash = { version = "0.2.1" }
|
||||||
|
|
@ -143,17 +143,16 @@ percent-encoding = { version = "2.3.1" }
|
||||||
petgraph = { version = "0.8.0" }
|
petgraph = { version = "0.8.0" }
|
||||||
proc-macro2 = { version = "1.0.86" }
|
proc-macro2 = { version = "1.0.86" }
|
||||||
procfs = { version = "0.17.0", default-features = false, features = ["flate2"] }
|
procfs = { version = "0.17.0", default-features = false, features = ["flate2"] }
|
||||||
pubgrub = { version = "0.3.3" , package = "astral-pubgrub" }
|
pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||||
quote = { version = "1.0.37" }
|
quote = { version = "1.0.37" }
|
||||||
rayon = { version = "1.10.0" }
|
rayon = { version = "1.10.0" }
|
||||||
ref-cast = { version = "1.0.24" }
|
ref-cast = { version = "1.0.24" }
|
||||||
reflink-copy = { version = "0.1.19" }
|
reflink-copy = { version = "0.1.19" }
|
||||||
regex = { version = "1.10.6" }
|
regex = { version = "1.10.6" }
|
||||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||||
reqsign = { version = "0.18.0", features = ["aws", "default-context"], default-features = false }
|
|
||||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "system-proxy", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "system-proxy", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||||
reqwest-middleware = { version = "0.4.2", package = "astral-reqwest-middleware", features = ["multipart"] }
|
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2", features = ["multipart"] }
|
||||||
reqwest-retry = { version = "0.7.0", package = "astral-reqwest-retry" }
|
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" }
|
||||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||||
rmp-serde = { version = "1.3.0" }
|
rmp-serde = { version = "1.3.0" }
|
||||||
rust-netrc = { version = "0.1.2" }
|
rust-netrc = { version = "0.1.2" }
|
||||||
|
|
@ -170,7 +169,7 @@ serde-untagged = { version = "0.1.6" }
|
||||||
serde_json = { version = "1.0.128" }
|
serde_json = { version = "1.0.128" }
|
||||||
sha2 = { version = "0.10.8" }
|
sha2 = { version = "0.10.8" }
|
||||||
smallvec = { version = "1.13.2" }
|
smallvec = { version = "1.13.2" }
|
||||||
spdx = { version = "0.13.0" }
|
spdx = { version = "0.10.6" }
|
||||||
syn = { version = "2.0.77" }
|
syn = { version = "2.0.77" }
|
||||||
sys-info = { version = "0.9.1" }
|
sys-info = { version = "0.9.1" }
|
||||||
tar = { version = "0.4.43" }
|
tar = { version = "0.4.43" }
|
||||||
|
|
@ -178,8 +177,8 @@ target-lexicon = { version = "0.13.0" }
|
||||||
tempfile = { version = "3.14.0" }
|
tempfile = { version = "3.14.0" }
|
||||||
textwrap = { version = "0.16.1" }
|
textwrap = { version = "0.16.1" }
|
||||||
thiserror = { version = "2.0.0" }
|
thiserror = { version = "2.0.0" }
|
||||||
astral-tl = { version = "0.7.11" }
|
tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101a8ff9f591ef51f314ec" }
|
||||||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync", "time"] }
|
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] }
|
||||||
tokio-stream = { version = "0.1.16" }
|
tokio-stream = { version = "0.1.16" }
|
||||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||||
|
|
@ -193,13 +192,12 @@ unicode-width = { version = "0.2.0" }
|
||||||
unscanny = { version = "0.1.0" }
|
unscanny = { version = "0.1.0" }
|
||||||
url = { version = "2.5.2", features = ["serde"] }
|
url = { version = "2.5.2", features = ["serde"] }
|
||||||
uuid = { version = "1.16.0" }
|
uuid = { version = "1.16.0" }
|
||||||
version-ranges = { version = "0.1.3", package = "astral-version-ranges" }
|
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||||
walkdir = { version = "2.5.0" }
|
walkdir = { version = "2.5.0" }
|
||||||
which = { version = "8.0.0", features = ["regex"] }
|
which = { version = "8.0.0", features = ["regex"] }
|
||||||
windows = { version = "0.59.0", features = ["std", "Win32_Globalization", "Win32_System_LibraryLoader", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem", "Win32_Security", "Win32_System_Registry", "Win32_System_IO", "Win32_System_Ioctl"] }
|
windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_Security", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem", "Win32_System_Registry", "Win32_System_IO", "Win32_System_Ioctl"] }
|
||||||
windows-registry = { version = "0.5.0" }
|
windows-registry = { version = "0.5.0" }
|
||||||
wiremock = { version = "0.6.4" }
|
wiremock = { version = "0.6.4" }
|
||||||
wmi = { version = "0.16.0", default-features = false }
|
|
||||||
xz2 = { version = "0.1.7" }
|
xz2 = { version = "0.1.7" }
|
||||||
zeroize = { version = "1.8.1" }
|
zeroize = { version = "1.8.1" }
|
||||||
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
||||||
|
|
@ -212,19 +210,19 @@ byteorder = { version = "1.5.0" }
|
||||||
filetime = { version = "0.2.25" }
|
filetime = { version = "0.2.25" }
|
||||||
http-body-util = { version = "0.1.2" }
|
http-body-util = { version = "0.1.2" }
|
||||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||||
hyper-util = { version = "0.1.8", features = ["tokio", "server", "http1"] }
|
hyper-util = { version = "0.1.8", features = ["tokio"] }
|
||||||
ignore = { version = "0.4.23" }
|
ignore = { version = "0.4.23" }
|
||||||
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
||||||
predicates = { version = "3.1.2" }
|
predicates = { version = "3.1.2" }
|
||||||
rcgen = { version = "0.14.5", features = ["crypto", "pem", "ring"], default-features = false }
|
|
||||||
rustls = { version = "0.23.29", default-features = false }
|
|
||||||
similar = { version = "2.6.0" }
|
similar = { version = "2.6.0" }
|
||||||
temp-env = { version = "0.3.6" }
|
temp-env = { version = "0.3.6" }
|
||||||
test-case = { version = "3.3.1" }
|
test-case = { version = "3.3.1" }
|
||||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||||
tokio-rustls = { version = "0.26.2", default-features = false }
|
|
||||||
whoami = { version = "1.6.0" }
|
whoami = { version = "1.6.0" }
|
||||||
|
|
||||||
|
[workspace.metadata.cargo-shear]
|
||||||
|
ignored = ["flate2", "xz2", "h2"]
|
||||||
|
|
||||||
[workspace.lints.rust]
|
[workspace.lints.rust]
|
||||||
unsafe_code = "warn"
|
unsafe_code = "warn"
|
||||||
unreachable_pub = "warn"
|
unreachable_pub = "warn"
|
||||||
|
|
@ -310,18 +308,8 @@ strip = false
|
||||||
debug = "full"
|
debug = "full"
|
||||||
lto = false
|
lto = false
|
||||||
|
|
||||||
# Profile for fast test execution: Skip debug info generation, and
|
|
||||||
# apply basic optimization, which speed up build and running tests.
|
|
||||||
[profile.fast-build]
|
[profile.fast-build]
|
||||||
inherits = "dev"
|
inherits = "dev"
|
||||||
opt-level = 1
|
|
||||||
debug = 0
|
|
||||||
strip = "debuginfo"
|
|
||||||
|
|
||||||
# Profile for faster builds: Skip debug info generation, for faster
|
|
||||||
# builds of smaller binaries.
|
|
||||||
[profile.no-debug]
|
|
||||||
inherits = "dev"
|
|
||||||
debug = 0
|
debug = 0
|
||||||
strip = "debuginfo"
|
strip = "debuginfo"
|
||||||
|
|
||||||
|
|
@ -336,3 +324,7 @@ codegen-units = 1
|
||||||
# The profile that 'cargo dist' will build with.
|
# The profile that 'cargo dist' will build with.
|
||||||
[profile.dist]
|
[profile.dist]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" }
|
||||||
|
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "7650ed76215a962a96d94a79be71c27bffde7ab2" }
|
||||||
|
|
|
||||||
|
|
@ -23,15 +23,8 @@ RUN case "$TARGETPLATFORM" in \
|
||||||
*) exit 1 ;; \
|
*) exit 1 ;; \
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Temporarily using nightly-2025-11-02 for bundled musl v1.2.5
|
|
||||||
# Ref: https://github.com/rust-lang/rust/pull/142682
|
|
||||||
# TODO(samypr100): Remove when toolchain updates to 1.93
|
|
||||||
COPY <<EOF rust-toolchain.toml
|
|
||||||
[toolchain]
|
|
||||||
channel = "nightly-2025-11-02"
|
|
||||||
EOF
|
|
||||||
# Update rustup whenever we bump the rust version
|
# Update rustup whenever we bump the rust version
|
||||||
# COPY rust-toolchain.toml rust-toolchain.toml
|
COPY rust-toolchain.toml rust-toolchain.toml
|
||||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
||||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
ENV PATH="$HOME/.cargo/bin:$PATH"
|
||||||
# Install the toolchain then the musl target
|
# Install the toolchain then the musl target
|
||||||
|
|
|
||||||
33
README.md
33
README.md
|
|
@ -42,7 +42,7 @@ An extremely fast Python package and project manager, written in Rust.
|
||||||
- 🖥️ Supports macOS, Linux, and Windows.
|
- 🖥️ Supports macOS, Linux, and Windows.
|
||||||
|
|
||||||
uv is backed by [Astral](https://astral.sh), the creators of
|
uv is backed by [Astral](https://astral.sh), the creators of
|
||||||
[Ruff](https://github.com/astral-sh/ruff) and [ty](https://github.com/astral-sh/ty).
|
[Ruff](https://github.com/astral-sh/ruff).
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
|
@ -192,12 +192,14 @@ uv installs Python and allows quickly switching between versions.
|
||||||
Install multiple Python versions:
|
Install multiple Python versions:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ uv python install 3.12 3.13 3.14
|
$ uv python install 3.10 3.11 3.12
|
||||||
Installed 3 versions in 972ms
|
Searching for Python versions matching: Python 3.10
|
||||||
+ cpython-3.12.12-macos-aarch64-none (python3.12)
|
Searching for Python versions matching: Python 3.11
|
||||||
+ cpython-3.13.9-macos-aarch64-none (python3.13)
|
Searching for Python versions matching: Python 3.12
|
||||||
+ cpython-3.14.0-macos-aarch64-none (python3.14)
|
Installed 3 versions in 3.42s
|
||||||
|
+ cpython-3.10.14-macos-aarch64-none
|
||||||
|
+ cpython-3.11.9-macos-aarch64-none
|
||||||
|
+ cpython-3.12.4-macos-aarch64-none
|
||||||
```
|
```
|
||||||
|
|
||||||
Download Python versions as needed:
|
Download Python versions as needed:
|
||||||
|
|
@ -268,6 +270,14 @@ Installed 43 packages in 208ms
|
||||||
|
|
||||||
See the [pip interface documentation](https://docs.astral.sh/uv/pip/index/) to get started.
|
See the [pip interface documentation](https://docs.astral.sh/uv/pip/index/) to get started.
|
||||||
|
|
||||||
|
## Platform support
|
||||||
|
|
||||||
|
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
||||||
|
|
||||||
|
## Versioning policy
|
||||||
|
|
||||||
|
See uv's [versioning policy](https://docs.astral.sh/uv/reference/versioning/) document.
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
We are passionate about supporting contributors of all levels of experience and would love to see
|
We are passionate about supporting contributors of all levels of experience and would love to see
|
||||||
|
|
@ -284,15 +294,6 @@ It's pronounced as "you - vee" ([`/juː viː/`](https://en.wikipedia.org/wiki/He
|
||||||
|
|
||||||
Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details.
|
Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details.
|
||||||
|
|
||||||
#### What platforms does uv support?
|
|
||||||
|
|
||||||
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
|
||||||
|
|
||||||
#### Is uv ready for production?
|
|
||||||
|
|
||||||
Yes, uv is stable and widely used in production. See uv's
|
|
||||||
[versioning policy](https://docs.astral.sh/uv/reference/versioning/) document for details.
|
|
||||||
|
|
||||||
## Acknowledgements
|
## Acknowledgements
|
||||||
|
|
||||||
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're
|
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're
|
||||||
|
|
|
||||||
2
STYLE.md
2
STYLE.md
|
|
@ -16,7 +16,7 @@ documentation_.
|
||||||
1. If a message ends with a single relevant value, precede it with a colon, e.g.,
|
1. If a message ends with a single relevant value, precede it with a colon, e.g.,
|
||||||
`This is the value: value`. If the value is a literal, wrap it in backticks.
|
`This is the value: value`. If the value is a literal, wrap it in backticks.
|
||||||
1. Markdown files should be wrapped at 100 characters.
|
1. Markdown files should be wrapped at 100 characters.
|
||||||
1. Use a space, not an equals sign, for command-line arguments with a value, e.g.
|
1. Use a space, not an equals sign, for command line arguments with a value, e.g.
|
||||||
`--resolution lowest`, not `--resolution=lowest`.
|
`--resolution lowest`, not `--resolution=lowest`.
|
||||||
|
|
||||||
## Styling uv
|
## Styling uv
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
[files]
|
[files]
|
||||||
extend-exclude = [
|
extend-exclude = [
|
||||||
"**/snapshots/",
|
"**/snapshots/",
|
||||||
"test/ecosystem/**",
|
"ecosystem/**",
|
||||||
"test/requirements/**/*.in",
|
"scripts/**/*.in",
|
||||||
"crates/uv-build-frontend/src/pipreqs/mapping",
|
"crates/uv-build-frontend/src/pipreqs/mapping",
|
||||||
]
|
]
|
||||||
ignore-hidden = false
|
ignore-hidden = false
|
||||||
|
|
|
||||||
|
|
@ -982,7 +982,7 @@ for more details.
|
||||||
([#9135](https://github.com/astral-sh/uv/pull/9135))
|
([#9135](https://github.com/astral-sh/uv/pull/9135))
|
||||||
- Tweak script `--no-project` comment ([#10331](https://github.com/astral-sh/uv/pull/10331))
|
- Tweak script `--no-project` comment ([#10331](https://github.com/astral-sh/uv/pull/10331))
|
||||||
- Update copyright year ([#10297](https://github.com/astral-sh/uv/pull/10297))
|
- Update copyright year ([#10297](https://github.com/astral-sh/uv/pull/10297))
|
||||||
- Add instructions for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
- Add instructinos for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
||||||
|
|
||||||
## 0.5.16
|
## 0.5.16
|
||||||
|
|
||||||
|
|
|
||||||
1108
changelogs/0.8.x.md
1108
changelogs/0.8.x.md
File diff suppressed because it is too large
Load Diff
|
|
@ -1,13 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-auth"
|
name = "uv-auth"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
|
||||||
homepage = { workspace = true }
|
|
||||||
repository = { workspace = true }
|
|
||||||
authors = { workspace = true }
|
|
||||||
license = { workspace = true }
|
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
@ -37,7 +31,6 @@ futures = { workspace = true }
|
||||||
http = { workspace = true }
|
http = { workspace = true }
|
||||||
jiff = { workspace = true }
|
jiff = { workspace = true }
|
||||||
percent-encoding = { workspace = true }
|
percent-encoding = { workspace = true }
|
||||||
reqsign = { workspace = true }
|
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
reqwest-middleware = { workspace = true }
|
reqwest-middleware = { workspace = true }
|
||||||
rust-netrc = { workspace = true }
|
rust-netrc = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-auth
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-auth).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -29,6 +29,6 @@ impl AsRef<[u8]> for AccessToken {
|
||||||
|
|
||||||
impl std::fmt::Display for AccessToken {
|
impl std::fmt::Display for AccessToken {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "****")
|
write!(f, "{}", self.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,8 +11,8 @@ use url::Url;
|
||||||
use uv_once_map::OnceMap;
|
use uv_once_map::OnceMap;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::credentials::{Authentication, Username};
|
use crate::Realm;
|
||||||
use crate::{Credentials, Realm};
|
use crate::credentials::{Credentials, Username};
|
||||||
|
|
||||||
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
|
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||||
|
|
||||||
|
|
@ -33,14 +33,13 @@ impl Display for FetchUrl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)] // All internal types are redacted.
|
|
||||||
pub struct CredentialsCache {
|
pub struct CredentialsCache {
|
||||||
/// A cache per realm and username
|
/// A cache per realm and username
|
||||||
realms: RwLock<FxHashMap<(Realm, Username), Arc<Authentication>>>,
|
realms: RwLock<FxHashMap<(Realm, Username), Arc<Credentials>>>,
|
||||||
/// A cache tracking the result of realm or index URL fetches from external services
|
/// A cache tracking the result of realm or index URL fetches from external services
|
||||||
pub(crate) fetches: FxOnceMap<(FetchUrl, Username), Option<Arc<Authentication>>>,
|
pub(crate) fetches: FxOnceMap<(FetchUrl, Username), Option<Arc<Credentials>>>,
|
||||||
/// A cache per URL, uses a trie for efficient prefix queries.
|
/// A cache per URL, uses a trie for efficient prefix queries.
|
||||||
urls: RwLock<UrlTrie<Arc<Authentication>>>,
|
urls: RwLock<UrlTrie>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for CredentialsCache {
|
impl Default for CredentialsCache {
|
||||||
|
|
@ -59,33 +58,8 @@ impl CredentialsCache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
|
||||||
///
|
|
||||||
/// Returns `true` if the store was updated.
|
|
||||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
|
||||||
if let Some(credentials) = Credentials::from_url(url) {
|
|
||||||
trace!("Caching credentials for {url}");
|
|
||||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
|
||||||
///
|
|
||||||
/// Returns `true` if the store was updated.
|
|
||||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
|
||||||
trace!("Caching credentials for {url}");
|
|
||||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the credentials that should be used for a realm and username, if any.
|
/// Return the credentials that should be used for a realm and username, if any.
|
||||||
pub(crate) fn get_realm(
|
pub(crate) fn get_realm(&self, realm: Realm, username: Username) -> Option<Arc<Credentials>> {
|
||||||
&self,
|
|
||||||
realm: Realm,
|
|
||||||
username: Username,
|
|
||||||
) -> Option<Arc<Authentication>> {
|
|
||||||
let realms = self.realms.read().unwrap();
|
let realms = self.realms.read().unwrap();
|
||||||
let given_username = username.is_some();
|
let given_username = username.is_some();
|
||||||
let key = (realm, username);
|
let key = (realm, username);
|
||||||
|
|
@ -119,7 +93,7 @@ impl CredentialsCache {
|
||||||
/// Note we do not cache per username, but if a username is passed we will confirm that the
|
/// Note we do not cache per username, but if a username is passed we will confirm that the
|
||||||
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
|
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
|
||||||
/// If multiple usernames are used per URL, the realm cache should be queried instead.
|
/// If multiple usernames are used per URL, the realm cache should be queried instead.
|
||||||
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Authentication>> {
|
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Credentials>> {
|
||||||
let urls = self.urls.read().unwrap();
|
let urls = self.urls.read().unwrap();
|
||||||
let credentials = urls.get(url);
|
let credentials = urls.get(url);
|
||||||
if let Some(credentials) = credentials {
|
if let Some(credentials) = credentials {
|
||||||
|
|
@ -138,7 +112,7 @@ impl CredentialsCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update the cache with the given credentials.
|
/// Update the cache with the given credentials.
|
||||||
pub(crate) fn insert(&self, url: &Url, credentials: Arc<Authentication>) {
|
pub(crate) fn insert(&self, url: &Url, credentials: Arc<Credentials>) {
|
||||||
// Do not cache empty credentials
|
// Do not cache empty credentials
|
||||||
if credentials.is_empty() {
|
if credentials.is_empty() {
|
||||||
return;
|
return;
|
||||||
|
|
@ -165,8 +139,8 @@ impl CredentialsCache {
|
||||||
fn insert_realm(
|
fn insert_realm(
|
||||||
&self,
|
&self,
|
||||||
key: (Realm, Username),
|
key: (Realm, Username),
|
||||||
credentials: &Arc<Authentication>,
|
credentials: &Arc<Credentials>,
|
||||||
) -> Option<Arc<Authentication>> {
|
) -> Option<Arc<Credentials>> {
|
||||||
// Do not cache empty credentials
|
// Do not cache empty credentials
|
||||||
if credentials.is_empty() {
|
if credentials.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
|
|
@ -192,33 +166,24 @@ impl CredentialsCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct UrlTrie<T> {
|
struct UrlTrie {
|
||||||
states: Vec<TrieState<T>>,
|
states: Vec<TrieState>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Default)]
|
||||||
struct TrieState<T> {
|
struct TrieState {
|
||||||
children: Vec<(String, usize)>,
|
children: Vec<(String, usize)>,
|
||||||
value: Option<T>,
|
value: Option<Arc<Credentials>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Default for TrieState<T> {
|
impl UrlTrie {
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
children: vec![],
|
|
||||||
value: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> UrlTrie<T> {
|
|
||||||
fn new() -> Self {
|
fn new() -> Self {
|
||||||
let mut trie = Self { states: vec![] };
|
let mut trie = Self { states: vec![] };
|
||||||
trie.alloc();
|
trie.alloc();
|
||||||
trie
|
trie
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, url: &Url) -> Option<&T> {
|
fn get(&self, url: &Url) -> Option<&Arc<Credentials>> {
|
||||||
let mut state = 0;
|
let mut state = 0;
|
||||||
let realm = Realm::from(url).to_string();
|
let realm = Realm::from(url).to_string();
|
||||||
for component in [realm.as_str()]
|
for component in [realm.as_str()]
|
||||||
|
|
@ -233,7 +198,7 @@ impl<T> UrlTrie<T> {
|
||||||
self.states[state].value.as_ref()
|
self.states[state].value.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, url: &Url, value: T) {
|
fn insert(&mut self, url: &Url, value: Arc<Credentials>) {
|
||||||
let mut state = 0;
|
let mut state = 0;
|
||||||
let realm = Realm::from(url).to_string();
|
let realm = Realm::from(url).to_string();
|
||||||
for component in [realm.as_str()]
|
for component in [realm.as_str()]
|
||||||
|
|
@ -261,7 +226,7 @@ impl<T> UrlTrie<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> TrieState<T> {
|
impl TrieState {
|
||||||
fn get(&self, component: &str) -> Option<usize> {
|
fn get(&self, component: &str) -> Option<usize> {
|
||||||
let i = self.index(component).ok()?;
|
let i = self.index(component).ok()?;
|
||||||
Some(self.children[i].1)
|
Some(self.children[i].1)
|
||||||
|
|
@ -295,21 +260,28 @@ impl From<(Realm, Username)> for RealmUsername {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::Credentials;
|
|
||||||
use crate::credentials::Password;
|
use crate::credentials::Password;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_trie() {
|
fn test_trie() {
|
||||||
let credentials1 =
|
let credentials1 = Arc::new(Credentials::basic(
|
||||||
Credentials::basic(Some("username1".to_string()), Some("password1".to_string()));
|
Some("username1".to_string()),
|
||||||
let credentials2 =
|
Some("password1".to_string()),
|
||||||
Credentials::basic(Some("username2".to_string()), Some("password2".to_string()));
|
));
|
||||||
let credentials3 =
|
let credentials2 = Arc::new(Credentials::basic(
|
||||||
Credentials::basic(Some("username3".to_string()), Some("password3".to_string()));
|
Some("username2".to_string()),
|
||||||
let credentials4 =
|
Some("password2".to_string()),
|
||||||
Credentials::basic(Some("username4".to_string()), Some("password4".to_string()));
|
));
|
||||||
|
let credentials3 = Arc::new(Credentials::basic(
|
||||||
|
Some("username3".to_string()),
|
||||||
|
Some("password3".to_string()),
|
||||||
|
));
|
||||||
|
let credentials4 = Arc::new(Credentials::basic(
|
||||||
|
Some("username4".to_string()),
|
||||||
|
Some("password4".to_string()),
|
||||||
|
));
|
||||||
|
|
||||||
let mut trie = UrlTrie::new();
|
let mut trie = UrlTrie::new();
|
||||||
trie.insert(
|
trie.insert(
|
||||||
|
|
@ -367,10 +339,10 @@ mod tests {
|
||||||
fn test_url_with_credentials() {
|
fn test_url_with_credentials() {
|
||||||
let username = Username::new(Some(String::from("username")));
|
let username = Username::new(Some(String::from("username")));
|
||||||
let password = Password::new(String::from("password"));
|
let password = Password::new(String::from("password"));
|
||||||
let credentials = Arc::new(Authentication::from(Credentials::Basic {
|
let credentials = Arc::new(Credentials::Basic {
|
||||||
username: username.clone(),
|
username: username.clone(),
|
||||||
password: Some(password),
|
password: Some(password),
|
||||||
}));
|
});
|
||||||
let cache = CredentialsCache::default();
|
let cache = CredentialsCache::default();
|
||||||
// Insert with URL with credentials and get with redacted URL.
|
// Insert with URL with credentials and get with redacted URL.
|
||||||
let url = Url::parse("https://username:password@example.com/foobar").unwrap();
|
let url = Url::parse("https://username:password@example.com/foobar").unwrap();
|
||||||
|
|
|
||||||
|
|
@ -1,36 +1,31 @@
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fmt;
|
|
||||||
use std::io::Read;
|
|
||||||
use std::io::Write;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use base64::prelude::BASE64_STANDARD;
|
use base64::prelude::BASE64_STANDARD;
|
||||||
use base64::read::DecoderReader;
|
use base64::read::DecoderReader;
|
||||||
use base64::write::EncoderWriter;
|
use base64::write::EncoderWriter;
|
||||||
use http::Uri;
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::fmt;
|
||||||
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use netrc::Netrc;
|
use netrc::Netrc;
|
||||||
use reqsign::aws::DefaultSigner;
|
|
||||||
use reqwest::Request;
|
use reqwest::Request;
|
||||||
use reqwest::header::HeaderValue;
|
use reqwest::header::HeaderValue;
|
||||||
use serde::{Deserialize, Serialize};
|
use std::io::Read;
|
||||||
|
use std::io::Write;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub enum Credentials {
|
pub enum Credentials {
|
||||||
/// RFC 7617 HTTP Basic Authentication
|
|
||||||
Basic {
|
Basic {
|
||||||
/// The username to use for authentication.
|
/// The username to use for authentication.
|
||||||
username: Username,
|
username: Username,
|
||||||
/// The password to use for authentication.
|
/// The password to use for authentication.
|
||||||
password: Option<Password>,
|
password: Option<Password>,
|
||||||
},
|
},
|
||||||
/// RFC 6750 Bearer Token Authentication
|
|
||||||
Bearer {
|
Bearer {
|
||||||
/// The token to use for authentication.
|
/// The token to use for authentication.
|
||||||
token: Token,
|
token: Vec<u8>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -102,36 +97,6 @@ impl fmt::Debug for Password {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct Token(Vec<u8>);
|
|
||||||
|
|
||||||
impl Token {
|
|
||||||
pub fn new(token: Vec<u8>) -> Self {
|
|
||||||
Self(token)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the [`Token`] as a byte slice.
|
|
||||||
pub fn as_slice(&self) -> &[u8] {
|
|
||||||
self.0.as_slice()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert the [`Token`] into its underlying [`Vec<u8>`].
|
|
||||||
pub fn into_bytes(self) -> Vec<u8> {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return whether the [`Token`] is empty.
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.0.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for Token {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "****")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Credentials {
|
impl Credentials {
|
||||||
/// Create a set of HTTP Basic Authentication credentials.
|
/// Create a set of HTTP Basic Authentication credentials.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
|
@ -145,9 +110,7 @@ impl Credentials {
|
||||||
/// Create a set of Bearer Authentication credentials.
|
/// Create a set of Bearer Authentication credentials.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn bearer(token: Vec<u8>) -> Self {
|
pub fn bearer(token: Vec<u8>) -> Self {
|
||||||
Self::Bearer {
|
Self::Bearer { token }
|
||||||
token: Token::new(token),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn username(&self) -> Option<&str> {
|
pub fn username(&self) -> Option<&str> {
|
||||||
|
|
@ -318,7 +281,7 @@ impl Credentials {
|
||||||
// Parse a `Bearer` authentication header.
|
// Parse a `Bearer` authentication header.
|
||||||
if let Some(token) = header.as_bytes().strip_prefix(b"Bearer ") {
|
if let Some(token) = header.as_bytes().strip_prefix(b"Bearer ") {
|
||||||
return Some(Self::Bearer {
|
return Some(Self::Bearer {
|
||||||
token: Token::new(token.to_vec()),
|
token: token.to_vec(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -382,127 +345,6 @@ impl Credentials {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) enum Authentication {
|
|
||||||
/// HTTP Basic or Bearer Authentication credentials.
|
|
||||||
Credentials(Credentials),
|
|
||||||
|
|
||||||
/// AWS Signature Version 4 signing.
|
|
||||||
Signer(DefaultSigner),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for Authentication {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
match (self, other) {
|
|
||||||
(Self::Credentials(a), Self::Credentials(b)) => a == b,
|
|
||||||
(Self::Signer(..), Self::Signer(..)) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for Authentication {}
|
|
||||||
|
|
||||||
impl From<Credentials> for Authentication {
|
|
||||||
fn from(credentials: Credentials) -> Self {
|
|
||||||
Self::Credentials(credentials)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<DefaultSigner> for Authentication {
|
|
||||||
fn from(signer: DefaultSigner) -> Self {
|
|
||||||
Self::Signer(signer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Authentication {
|
|
||||||
/// Return the password used for authentication, if any.
|
|
||||||
pub(crate) fn password(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.password(),
|
|
||||||
Self::Signer(..) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the username used for authentication, if any.
|
|
||||||
pub(crate) fn username(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.username(),
|
|
||||||
Self::Signer(..) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the username used for authentication, if any.
|
|
||||||
pub(crate) fn as_username(&self) -> Cow<'_, Username> {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.as_username(),
|
|
||||||
Self::Signer(..) => Cow::Owned(Username::none()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the username used for authentication, if any.
|
|
||||||
pub(crate) fn to_username(&self) -> Username {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.to_username(),
|
|
||||||
Self::Signer(..) => Username::none(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if the object contains a means of authenticating.
|
|
||||||
pub(crate) fn is_authenticated(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.is_authenticated(),
|
|
||||||
Self::Signer(..) => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if the object contains no credentials.
|
|
||||||
pub(crate) fn is_empty(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.is_empty(),
|
|
||||||
Self::Signer(..) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Apply the authentication to the given request.
|
|
||||||
///
|
|
||||||
/// Any existing credentials will be overridden.
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) async fn authenticate(&self, mut request: Request) -> Request {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.authenticate(request),
|
|
||||||
Self::Signer(signer) => {
|
|
||||||
// Build an `http::Request` from the `reqwest::Request`.
|
|
||||||
// SAFETY: If we have a valid `reqwest::Request`, we expect (e.g.) the URL to be valid.
|
|
||||||
let uri = Uri::from_str(request.url().as_str()).unwrap();
|
|
||||||
let mut http_req = http::Request::builder()
|
|
||||||
.method(request.method().clone())
|
|
||||||
.uri(uri)
|
|
||||||
.body(())
|
|
||||||
.unwrap();
|
|
||||||
*http_req.headers_mut() = request.headers().clone();
|
|
||||||
|
|
||||||
// Sign the parts.
|
|
||||||
let (mut parts, ()) = http_req.into_parts();
|
|
||||||
signer
|
|
||||||
.sign(&mut parts, None)
|
|
||||||
.await
|
|
||||||
.expect("AWS signing should succeed");
|
|
||||||
|
|
||||||
// Copy over the signed headers.
|
|
||||||
request.headers_mut().extend(parts.headers);
|
|
||||||
|
|
||||||
// Copy over the signed path and query, if any.
|
|
||||||
if let Some(path_and_query) = parts.uri.path_and_query() {
|
|
||||||
request.url_mut().set_path(path_and_query.path());
|
|
||||||
request.url_mut().set_query(path_and_query.query());
|
|
||||||
}
|
|
||||||
request
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use insta::assert_debug_snapshot;
|
use insta::assert_debug_snapshot;
|
||||||
|
|
@ -623,15 +465,4 @@ mod tests {
|
||||||
"Basic { username: Username(Some(\"user\")), password: Some(****) }"
|
"Basic { username: Username(Some(\"user\")), password: Some(****) }"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bearer_token_obfuscation() {
|
|
||||||
let token = "super_secret_token";
|
|
||||||
let credentials = Credentials::bearer(token.into());
|
|
||||||
let debugged = format!("{credentials:?}");
|
|
||||||
assert!(
|
|
||||||
!debugged.contains(token),
|
|
||||||
"Token should be obfuscated in Debug impl: {debugged}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -95,9 +95,9 @@ impl Indexes {
|
||||||
index_urls
|
index_urls
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the index for a URL if one exists.
|
/// Get the index URL prefix for a URL if one exists.
|
||||||
pub fn index_for(&self, url: &Url) -> Option<&Index> {
|
pub fn index_url_for(&self, url: &Url) -> Option<&DisplaySafeUrl> {
|
||||||
self.find_prefix_index(url)
|
self.find_prefix_index(url).map(|index| &index.url)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the [`AuthPolicy`] for a URL.
|
/// Get the [`AuthPolicy`] for a URL.
|
||||||
|
|
|
||||||
|
|
@ -87,26 +87,9 @@ impl KeyringProvider {
|
||||||
// Ensure we strip credentials from the URL before storing
|
// Ensure we strip credentials from the URL before storing
|
||||||
let url = url.without_credentials();
|
let url = url.without_credentials();
|
||||||
|
|
||||||
// If there's no path, we'll perform a host-level login
|
|
||||||
let target = if let Some(host) = url.host_str().filter(|_| !url.path().is_empty()) {
|
|
||||||
let mut target = String::new();
|
|
||||||
if url.scheme() != "https" {
|
|
||||||
target.push_str(url.scheme());
|
|
||||||
target.push_str("://");
|
|
||||||
}
|
|
||||||
target.push_str(host);
|
|
||||||
if let Some(port) = url.port() {
|
|
||||||
target.push(':');
|
|
||||||
target.push_str(&port.to_string());
|
|
||||||
}
|
|
||||||
target
|
|
||||||
} else {
|
|
||||||
url.to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
match &self.backend {
|
match &self.backend {
|
||||||
KeyringProviderBackend::Native => {
|
KeyringProviderBackend::Native => {
|
||||||
self.store_native(&target, username, password).await?;
|
self.store_native(url.as_str(), username, password).await?;
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
KeyringProviderBackend::Subprocess => {
|
KeyringProviderBackend::Subprocess => {
|
||||||
|
|
@ -139,26 +122,9 @@ impl KeyringProvider {
|
||||||
// Ensure we strip credentials from the URL before storing
|
// Ensure we strip credentials from the URL before storing
|
||||||
let url = url.without_credentials();
|
let url = url.without_credentials();
|
||||||
|
|
||||||
// If there's no path, we'll perform a host-level login
|
|
||||||
let target = if let Some(host) = url.host_str().filter(|_| !url.path().is_empty()) {
|
|
||||||
let mut target = String::new();
|
|
||||||
if url.scheme() != "https" {
|
|
||||||
target.push_str(url.scheme());
|
|
||||||
target.push_str("://");
|
|
||||||
}
|
|
||||||
target.push_str(host);
|
|
||||||
if let Some(port) = url.port() {
|
|
||||||
target.push(':');
|
|
||||||
target.push_str(&port.to_string());
|
|
||||||
}
|
|
||||||
target
|
|
||||||
} else {
|
|
||||||
url.to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
match &self.backend {
|
match &self.backend {
|
||||||
KeyringProviderBackend::Native => {
|
KeyringProviderBackend::Native => {
|
||||||
self.remove_native(&target, username).await?;
|
self.remove_native(url.as_str(), username).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
KeyringProviderBackend::Subprocess => {
|
KeyringProviderBackend::Subprocess => {
|
||||||
|
|
@ -404,13 +370,12 @@ mod tests {
|
||||||
let url = Url::parse("file:/etc/bin/").unwrap();
|
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||||
let keyring = KeyringProvider::empty();
|
let keyring = KeyringProvider::empty();
|
||||||
// Panics due to debug assertion; returns `None` in production
|
// Panics due to debug assertion; returns `None` in production
|
||||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user"));
|
let result = std::panic::AssertUnwindSafe(
|
||||||
if cfg!(debug_assertions) {
|
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user")),
|
||||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
)
|
||||||
assert!(result.is_err());
|
.catch_unwind()
|
||||||
} else {
|
.await;
|
||||||
assert_eq!(fetch.await, None);
|
assert!(result.is_err());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
@ -418,13 +383,12 @@ mod tests {
|
||||||
let url = Url::parse("https://user:password@example.com").unwrap();
|
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||||
let keyring = KeyringProvider::empty();
|
let keyring = KeyringProvider::empty();
|
||||||
// Panics due to debug assertion; returns `None` in production
|
// Panics due to debug assertion; returns `None` in production
|
||||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
let result = std::panic::AssertUnwindSafe(
|
||||||
if cfg!(debug_assertions) {
|
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
)
|
||||||
assert!(result.is_err());
|
.catch_unwind()
|
||||||
} else {
|
.await;
|
||||||
assert_eq!(fetch.await, None);
|
assert!(result.is_err());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
@ -432,13 +396,12 @@ mod tests {
|
||||||
let url = Url::parse("https://example.com").unwrap();
|
let url = Url::parse("https://example.com").unwrap();
|
||||||
let keyring = KeyringProvider::empty();
|
let keyring = KeyringProvider::empty();
|
||||||
// Panics due to debug assertion; returns `None` in production
|
// Panics due to debug assertion; returns `None` in production
|
||||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
let result = std::panic::AssertUnwindSafe(
|
||||||
if cfg!(debug_assertions) {
|
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
)
|
||||||
assert!(result.is_err());
|
.catch_unwind()
|
||||||
} else {
|
.await;
|
||||||
assert_eq!(fetch.await, None);
|
assert!(result.is_err());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,17 @@
|
||||||
|
use std::sync::{Arc, LazyLock};
|
||||||
|
|
||||||
|
use tracing::trace;
|
||||||
|
|
||||||
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
pub use access_token::AccessToken;
|
pub use access_token::AccessToken;
|
||||||
pub use cache::CredentialsCache;
|
use cache::CredentialsCache;
|
||||||
pub use credentials::{Credentials, Username};
|
pub use credentials::{Credentials, Username};
|
||||||
pub use index::{AuthPolicy, Index, Indexes};
|
pub use index::{AuthPolicy, Index, Indexes};
|
||||||
pub use keyring::KeyringProvider;
|
pub use keyring::KeyringProvider;
|
||||||
pub use middleware::AuthMiddleware;
|
pub use middleware::AuthMiddleware;
|
||||||
pub use pyx::{
|
pub use pyx::{DEFAULT_TOLERANCE_SECS, PyxOAuthTokens, PyxTokenStore, PyxTokens, TokenStoreError};
|
||||||
DEFAULT_TOLERANCE_SECS, PyxJwt, PyxOAuthTokens, PyxTokenStore, PyxTokens, TokenStoreError,
|
pub use realm::Realm;
|
||||||
};
|
|
||||||
pub use realm::{Realm, RealmRef};
|
|
||||||
pub use service::{Service, ServiceParseError};
|
pub use service::{Service, ServiceParseError};
|
||||||
pub use store::{AuthBackend, AuthScheme, TextCredentialStore, TomlCredentialError};
|
pub use store::{AuthBackend, AuthScheme, TextCredentialStore, TomlCredentialError};
|
||||||
|
|
||||||
|
|
@ -22,3 +26,32 @@ mod pyx;
|
||||||
mod realm;
|
mod realm;
|
||||||
mod service;
|
mod service;
|
||||||
mod store;
|
mod store;
|
||||||
|
|
||||||
|
// TODO(zanieb): Consider passing a cache explicitly throughout
|
||||||
|
|
||||||
|
/// Global authentication cache for a uv invocation
|
||||||
|
///
|
||||||
|
/// This is used to share credentials across uv clients.
|
||||||
|
pub(crate) static CREDENTIALS_CACHE: LazyLock<CredentialsCache> =
|
||||||
|
LazyLock::new(CredentialsCache::default);
|
||||||
|
|
||||||
|
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||||
|
///
|
||||||
|
/// Returns `true` if the store was updated.
|
||||||
|
pub fn store_credentials_from_url(url: &DisplaySafeUrl) -> bool {
|
||||||
|
if let Some(credentials) = Credentials::from_url(url) {
|
||||||
|
trace!("Caching credentials for {url}");
|
||||||
|
CREDENTIALS_CACHE.insert(url, Arc::new(credentials));
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||||
|
///
|
||||||
|
/// Returns `true` if the store was updated.
|
||||||
|
pub fn store_credentials(url: &DisplaySafeUrl, credentials: Arc<Credentials>) {
|
||||||
|
trace!("Caching credentials for {url}");
|
||||||
|
CREDENTIALS_CACHE.insert(url, credentials);
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -10,24 +10,19 @@ use tracing::{debug, trace, warn};
|
||||||
|
|
||||||
use uv_preview::{Preview, PreviewFeatures};
|
use uv_preview::{Preview, PreviewFeatures};
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_static::EnvVars;
|
|
||||||
use uv_warnings::owo_colors::OwoColorize;
|
use uv_warnings::owo_colors::OwoColorize;
|
||||||
|
|
||||||
use crate::credentials::Authentication;
|
use crate::providers::HuggingFaceProvider;
|
||||||
use crate::providers::{HuggingFaceProvider, S3EndpointProvider};
|
|
||||||
use crate::pyx::{DEFAULT_TOLERANCE_SECS, PyxTokenStore};
|
use crate::pyx::{DEFAULT_TOLERANCE_SECS, PyxTokenStore};
|
||||||
use crate::{
|
use crate::{
|
||||||
AccessToken, CredentialsCache, KeyringProvider,
|
AccessToken, CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||||
cache::FetchUrl,
|
cache::FetchUrl,
|
||||||
credentials::{Credentials, Username},
|
credentials::{Credentials, Username},
|
||||||
index::{AuthPolicy, Indexes},
|
index::{AuthPolicy, Indexes},
|
||||||
realm::Realm,
|
realm::Realm,
|
||||||
};
|
};
|
||||||
use crate::{Index, TextCredentialStore};
|
|
||||||
|
|
||||||
/// Cached check for whether we're running in Dependabot.
|
use crate::{TextCredentialStore, TomlCredentialError};
|
||||||
static IS_DEPENDABOT: LazyLock<bool> =
|
|
||||||
LazyLock::new(|| std::env::var(EnvVars::DEPENDABOT).is_ok_and(|value| value == "true"));
|
|
||||||
|
|
||||||
/// Strategy for loading netrc files.
|
/// Strategy for loading netrc files.
|
||||||
enum NetrcMode {
|
enum NetrcMode {
|
||||||
|
|
@ -65,55 +60,49 @@ impl NetrcMode {
|
||||||
|
|
||||||
/// Strategy for loading text-based credential files.
|
/// Strategy for loading text-based credential files.
|
||||||
enum TextStoreMode {
|
enum TextStoreMode {
|
||||||
Automatic(tokio::sync::OnceCell<Option<TextCredentialStore>>),
|
Automatic(LazyLock<Option<TextCredentialStore>>),
|
||||||
Enabled(TextCredentialStore),
|
Enabled(TextCredentialStore),
|
||||||
Disabled,
|
Disabled,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for TextStoreMode {
|
impl Default for TextStoreMode {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self::Automatic(tokio::sync::OnceCell::new())
|
// TODO(zanieb): Reconsider this pattern. We're just mirroring the [`NetrcMode`]
|
||||||
|
// implementation for now.
|
||||||
|
Self::Automatic(LazyLock::new(|| {
|
||||||
|
let path = TextCredentialStore::default_file()
|
||||||
|
.inspect_err(|err| {
|
||||||
|
warn!("Failed to determine credentials file path: {}", err);
|
||||||
|
})
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
|
match TextCredentialStore::read(&path) {
|
||||||
|
Ok((store, _lock)) => {
|
||||||
|
debug!("Loaded credential file {}", path.display());
|
||||||
|
Some(store)
|
||||||
|
}
|
||||||
|
Err(TomlCredentialError::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||||
|
debug!("No credentials file found at {}", path.display());
|
||||||
|
None
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
warn!(
|
||||||
|
"Failed to load credentials from {}: {}",
|
||||||
|
path.display(),
|
||||||
|
err
|
||||||
|
);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TextStoreMode {
|
impl TextStoreMode {
|
||||||
async fn load_default_store() -> Option<TextCredentialStore> {
|
|
||||||
let path = TextCredentialStore::default_file()
|
|
||||||
.inspect_err(|err| {
|
|
||||||
warn!("Failed to determine credentials file path: {}", err);
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
match TextCredentialStore::read(&path).await {
|
|
||||||
Ok((store, _lock)) => {
|
|
||||||
debug!("Loaded credential file {}", path.display());
|
|
||||||
Some(store)
|
|
||||||
}
|
|
||||||
Err(err)
|
|
||||||
if err
|
|
||||||
.as_io_error()
|
|
||||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
|
||||||
{
|
|
||||||
debug!("No credentials file found at {}", path.display());
|
|
||||||
None
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
warn!(
|
|
||||||
"Failed to load credentials from {}: {}",
|
|
||||||
path.display(),
|
|
||||||
err
|
|
||||||
);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the parsed credential store, if enabled.
|
/// Get the parsed credential store, if enabled.
|
||||||
async fn get(&self) -> Option<&TextCredentialStore> {
|
fn get(&self) -> Option<&TextCredentialStore> {
|
||||||
match self {
|
match self {
|
||||||
// TODO(zanieb): Reconsider this pattern. We're just mirroring the [`NetrcMode`]
|
Self::Automatic(lock) => lock.as_ref(),
|
||||||
// implementation for now.
|
|
||||||
Self::Automatic(lock) => lock.get_or_init(Self::load_default_store).await.as_ref(),
|
|
||||||
Self::Enabled(store) => Some(store),
|
Self::Enabled(store) => Some(store),
|
||||||
Self::Disabled => None,
|
Self::Disabled => None,
|
||||||
}
|
}
|
||||||
|
|
@ -129,15 +118,6 @@ enum TokenState {
|
||||||
Initialized(Option<AccessToken>),
|
Initialized(Option<AccessToken>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
enum S3CredentialState {
|
|
||||||
/// The S3 credential state has not yet been initialized.
|
|
||||||
Uninitialized,
|
|
||||||
/// The S3 credential state has been initialized, with either a signer or `None` if
|
|
||||||
/// no S3 endpoint is configured.
|
|
||||||
Initialized(Option<Arc<Authentication>>),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A middleware that adds basic authentication to requests.
|
/// A middleware that adds basic authentication to requests.
|
||||||
///
|
///
|
||||||
/// Uses a cache to propagate credentials from previously seen requests and
|
/// Uses a cache to propagate credentials from previously seen requests and
|
||||||
|
|
@ -146,8 +126,7 @@ pub struct AuthMiddleware {
|
||||||
netrc: NetrcMode,
|
netrc: NetrcMode,
|
||||||
text_store: TextStoreMode,
|
text_store: TextStoreMode,
|
||||||
keyring: Option<KeyringProvider>,
|
keyring: Option<KeyringProvider>,
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
cache: Option<CredentialsCache>,
|
||||||
cache: Arc<CredentialsCache>,
|
|
||||||
/// Auth policies for specific URLs.
|
/// Auth policies for specific URLs.
|
||||||
indexes: Indexes,
|
indexes: Indexes,
|
||||||
/// Set all endpoints as needing authentication. We never try to send an
|
/// Set all endpoints as needing authentication. We never try to send an
|
||||||
|
|
@ -159,31 +138,21 @@ pub struct AuthMiddleware {
|
||||||
pyx_token_store: Option<PyxTokenStore>,
|
pyx_token_store: Option<PyxTokenStore>,
|
||||||
/// Tokens to use for persistent credentials.
|
/// Tokens to use for persistent credentials.
|
||||||
pyx_token_state: Mutex<TokenState>,
|
pyx_token_state: Mutex<TokenState>,
|
||||||
/// Cached S3 credentials to avoid running the credential helper multiple times.
|
|
||||||
s3_credential_state: Mutex<S3CredentialState>,
|
|
||||||
preview: Preview,
|
preview: Preview,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for AuthMiddleware {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthMiddleware {
|
impl AuthMiddleware {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
netrc: NetrcMode::default(),
|
netrc: NetrcMode::default(),
|
||||||
text_store: TextStoreMode::default(),
|
text_store: TextStoreMode::default(),
|
||||||
keyring: None,
|
keyring: None,
|
||||||
// TODO(konsti): There shouldn't be a credential cache without that in the initializer.
|
cache: None,
|
||||||
cache: Arc::new(CredentialsCache::default()),
|
|
||||||
indexes: Indexes::new(),
|
indexes: Indexes::new(),
|
||||||
only_authenticated: false,
|
only_authenticated: false,
|
||||||
base_client: None,
|
base_client: None,
|
||||||
pyx_token_store: None,
|
pyx_token_store: None,
|
||||||
pyx_token_state: Mutex::new(TokenState::Uninitialized),
|
pyx_token_state: Mutex::new(TokenState::Uninitialized),
|
||||||
s3_credential_state: Mutex::new(S3CredentialState::Uninitialized),
|
|
||||||
preview: Preview::default(),
|
preview: Preview::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -231,14 +200,7 @@ impl AuthMiddleware {
|
||||||
/// Configure the [`CredentialsCache`] to use.
|
/// Configure the [`CredentialsCache`] to use.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn with_cache(mut self, cache: CredentialsCache) -> Self {
|
pub fn with_cache(mut self, cache: CredentialsCache) -> Self {
|
||||||
self.cache = Arc::new(cache);
|
self.cache = Some(cache);
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configure the [`CredentialsCache`] to use from an existing [`Arc`].
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_cache_arc(mut self, cache: Arc<CredentialsCache>) -> Self {
|
|
||||||
self.cache = cache;
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -271,9 +233,17 @@ impl AuthMiddleware {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
/// Get the configured authentication store.
|
||||||
|
///
|
||||||
|
/// If not set, the global store is used.
|
||||||
fn cache(&self) -> &CredentialsCache {
|
fn cache(&self) -> &CredentialsCache {
|
||||||
&self.cache
|
self.cache.as_ref().unwrap_or(&CREDENTIALS_CACHE)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AuthMiddleware {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -322,16 +292,16 @@ impl Middleware for AuthMiddleware {
|
||||||
next: Next<'_>,
|
next: Next<'_>,
|
||||||
) -> reqwest_middleware::Result<Response> {
|
) -> reqwest_middleware::Result<Response> {
|
||||||
// Check for credentials attached to the request already
|
// Check for credentials attached to the request already
|
||||||
let request_credentials = Credentials::from_request(&request).map(Authentication::from);
|
let request_credentials = Credentials::from_request(&request);
|
||||||
|
|
||||||
// In the middleware, existing credentials are already moved from the URL
|
// In the middleware, existing credentials are already moved from the URL
|
||||||
// to the headers so for display purposes we restore some information
|
// to the headers so for display purposes we restore some information
|
||||||
let url = tracing_url(&request, request_credentials.as_ref());
|
let url = tracing_url(&request, request_credentials.as_ref());
|
||||||
let index = self.indexes.index_for(request.url());
|
let maybe_index_url = self.indexes.index_url_for(request.url());
|
||||||
let auth_policy = self.indexes.auth_policy_for(request.url());
|
let auth_policy = self.indexes.auth_policy_for(request.url());
|
||||||
trace!("Handling request for {url} with authentication policy {auth_policy}");
|
trace!("Handling request for {url} with authentication policy {auth_policy}");
|
||||||
|
|
||||||
let credentials: Option<Arc<Authentication>> = if matches!(auth_policy, AuthPolicy::Never) {
|
let credentials: Option<Arc<Credentials>> = if matches!(auth_policy, AuthPolicy::Never) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
if let Some(request_credentials) = request_credentials {
|
if let Some(request_credentials) = request_credentials {
|
||||||
|
|
@ -342,7 +312,7 @@ impl Middleware for AuthMiddleware {
|
||||||
extensions,
|
extensions,
|
||||||
next,
|
next,
|
||||||
&url,
|
&url,
|
||||||
index,
|
maybe_index_url,
|
||||||
auth_policy,
|
auth_policy,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
@ -355,7 +325,7 @@ impl Middleware for AuthMiddleware {
|
||||||
// making a failing request
|
// making a failing request
|
||||||
let credentials = self.cache().get_url(request.url(), &Username::none());
|
let credentials = self.cache().get_url(request.url(), &Username::none());
|
||||||
if let Some(credentials) = credentials.as_ref() {
|
if let Some(credentials) = credentials.as_ref() {
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
|
|
||||||
// If it's fully authenticated, finish the request
|
// If it's fully authenticated, finish the request
|
||||||
if credentials.is_authenticated() {
|
if credentials.is_authenticated() {
|
||||||
|
|
@ -382,15 +352,11 @@ impl Middleware for AuthMiddleware {
|
||||||
.is_some_and(|token_store| token_store.is_known_url(request.url()));
|
.is_some_and(|token_store| token_store.is_known_url(request.url()));
|
||||||
|
|
||||||
let must_authenticate = self.only_authenticated
|
let must_authenticate = self.only_authenticated
|
||||||
|| (match auth_policy {
|
|| match auth_policy {
|
||||||
AuthPolicy::Auto => is_known_url,
|
AuthPolicy::Auto => is_known_url,
|
||||||
AuthPolicy::Always => true,
|
AuthPolicy::Always => true,
|
||||||
AuthPolicy::Never => false,
|
AuthPolicy::Never => false,
|
||||||
}
|
};
|
||||||
// Dependabot intercepts HTTP requests and injects credentials, which means that we
|
|
||||||
// cannot eagerly enforce an `AuthPolicy` as we don't know whether credentials will be
|
|
||||||
// added outside of uv.
|
|
||||||
&& !*IS_DEPENDABOT);
|
|
||||||
|
|
||||||
let (mut retry_request, response) = if !must_authenticate {
|
let (mut retry_request, response) = if !must_authenticate {
|
||||||
let url = tracing_url(&request, credentials.as_deref());
|
let url = tracing_url(&request, credentials.as_deref());
|
||||||
|
|
@ -440,8 +406,8 @@ impl Middleware for AuthMiddleware {
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|credentials| credentials.to_username())
|
.map(|credentials| credentials.to_username())
|
||||||
.unwrap_or(Username::none());
|
.unwrap_or(Username::none());
|
||||||
let credentials = if let Some(index) = index {
|
let credentials = if let Some(index_url) = maybe_index_url {
|
||||||
self.cache().get_url(&index.url, &username).or_else(|| {
|
self.cache().get_url(index_url, &username).or_else(|| {
|
||||||
self.cache()
|
self.cache()
|
||||||
.get_realm(Realm::from(&**retry_request_url), username)
|
.get_realm(Realm::from(&**retry_request_url), username)
|
||||||
})
|
})
|
||||||
|
|
@ -456,7 +422,7 @@ impl Middleware for AuthMiddleware {
|
||||||
if let Some(credentials) = credentials.as_ref() {
|
if let Some(credentials) = credentials.as_ref() {
|
||||||
if credentials.is_authenticated() {
|
if credentials.is_authenticated() {
|
||||||
trace!("Retrying request for {url} with credentials from cache {credentials:?}");
|
trace!("Retrying request for {url} with credentials from cache {credentials:?}");
|
||||||
retry_request = credentials.authenticate(retry_request).await;
|
retry_request = credentials.authenticate(retry_request);
|
||||||
return self
|
return self
|
||||||
.complete_request(None, retry_request, extensions, next, auth_policy)
|
.complete_request(None, retry_request, extensions, next, auth_policy)
|
||||||
.await;
|
.await;
|
||||||
|
|
@ -469,12 +435,12 @@ impl Middleware for AuthMiddleware {
|
||||||
.fetch_credentials(
|
.fetch_credentials(
|
||||||
credentials.as_deref(),
|
credentials.as_deref(),
|
||||||
retry_request_url,
|
retry_request_url,
|
||||||
index,
|
maybe_index_url,
|
||||||
auth_policy,
|
auth_policy,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
retry_request = credentials.authenticate(retry_request).await;
|
retry_request = credentials.authenticate(retry_request);
|
||||||
trace!("Retrying request for {url} with {credentials:?}");
|
trace!("Retrying request for {url} with {credentials:?}");
|
||||||
return self
|
return self
|
||||||
.complete_request(
|
.complete_request(
|
||||||
|
|
@ -490,7 +456,7 @@ impl Middleware for AuthMiddleware {
|
||||||
if let Some(credentials) = credentials.as_ref() {
|
if let Some(credentials) = credentials.as_ref() {
|
||||||
if !attempt_has_username {
|
if !attempt_has_username {
|
||||||
trace!("Retrying request for {url} with username from cache {credentials:?}");
|
trace!("Retrying request for {url} with username from cache {credentials:?}");
|
||||||
retry_request = credentials.authenticate(retry_request).await;
|
retry_request = credentials.authenticate(retry_request);
|
||||||
return self
|
return self
|
||||||
.complete_request(None, retry_request, extensions, next, auth_policy)
|
.complete_request(None, retry_request, extensions, next, auth_policy)
|
||||||
.await;
|
.await;
|
||||||
|
|
@ -526,7 +492,7 @@ impl AuthMiddleware {
|
||||||
/// If credentials are present, insert them into the cache on success.
|
/// If credentials are present, insert them into the cache on success.
|
||||||
async fn complete_request(
|
async fn complete_request(
|
||||||
&self,
|
&self,
|
||||||
credentials: Option<Arc<Authentication>>,
|
credentials: Option<Arc<Credentials>>,
|
||||||
request: Request,
|
request: Request,
|
||||||
extensions: &mut Extensions,
|
extensions: &mut Extensions,
|
||||||
next: Next<'_>,
|
next: Next<'_>,
|
||||||
|
|
@ -536,7 +502,7 @@ impl AuthMiddleware {
|
||||||
// Nothing to insert into the cache if we don't have credentials
|
// Nothing to insert into the cache if we don't have credentials
|
||||||
return next.run(request, extensions).await;
|
return next.run(request, extensions).await;
|
||||||
};
|
};
|
||||||
let url = DisplaySafeUrl::from_url(request.url().clone());
|
let url = DisplaySafeUrl::from(request.url().clone());
|
||||||
if matches!(auth_policy, AuthPolicy::Always) && credentials.password().is_none() {
|
if matches!(auth_policy, AuthPolicy::Always) && credentials.password().is_none() {
|
||||||
return Err(Error::Middleware(format_err!("Missing password for {url}")));
|
return Err(Error::Middleware(format_err!("Missing password for {url}")));
|
||||||
}
|
}
|
||||||
|
|
@ -558,12 +524,12 @@ impl AuthMiddleware {
|
||||||
/// Use known request credentials to complete the request.
|
/// Use known request credentials to complete the request.
|
||||||
async fn complete_request_with_request_credentials(
|
async fn complete_request_with_request_credentials(
|
||||||
&self,
|
&self,
|
||||||
credentials: Authentication,
|
credentials: Credentials,
|
||||||
mut request: Request,
|
mut request: Request,
|
||||||
extensions: &mut Extensions,
|
extensions: &mut Extensions,
|
||||||
next: Next<'_>,
|
next: Next<'_>,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
index: Option<&Index>,
|
index_url: Option<&DisplaySafeUrl>,
|
||||||
auth_policy: AuthPolicy,
|
auth_policy: AuthPolicy,
|
||||||
) -> reqwest_middleware::Result<Response> {
|
) -> reqwest_middleware::Result<Response> {
|
||||||
let credentials = Arc::new(credentials);
|
let credentials = Arc::new(credentials);
|
||||||
|
|
@ -579,21 +545,17 @@ impl AuthMiddleware {
|
||||||
trace!("Request for {url} is missing a password, looking for credentials");
|
trace!("Request for {url} is missing a password, looking for credentials");
|
||||||
|
|
||||||
// There's just a username, try to find a password.
|
// There's just a username, try to find a password.
|
||||||
// If we have an index, check the cache for that URL. Otherwise,
|
// If we have an index URL, check the cache for that URL. Otherwise,
|
||||||
// check for the realm.
|
// check for the realm.
|
||||||
let maybe_cached_credentials = if let Some(index) = index {
|
let maybe_cached_credentials = if let Some(index_url) = index_url {
|
||||||
self.cache()
|
self.cache()
|
||||||
.get_url(&index.url, credentials.as_username().as_ref())
|
.get_url(index_url, credentials.as_username().as_ref())
|
||||||
.or_else(|| {
|
|
||||||
self.cache()
|
|
||||||
.get_url(&index.root_url, credentials.as_username().as_ref())
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
self.cache()
|
self.cache()
|
||||||
.get_realm(Realm::from(request.url()), credentials.to_username())
|
.get_realm(Realm::from(request.url()), credentials.to_username())
|
||||||
};
|
};
|
||||||
if let Some(credentials) = maybe_cached_credentials {
|
if let Some(credentials) = maybe_cached_credentials {
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
// Do not insert already-cached credentials
|
// Do not insert already-cached credentials
|
||||||
let credentials = None;
|
let credentials = None;
|
||||||
return self
|
return self
|
||||||
|
|
@ -605,27 +567,27 @@ impl AuthMiddleware {
|
||||||
.cache()
|
.cache()
|
||||||
.get_url(request.url(), credentials.as_username().as_ref())
|
.get_url(request.url(), credentials.as_username().as_ref())
|
||||||
{
|
{
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
// Do not insert already-cached credentials
|
// Do not insert already-cached credentials
|
||||||
None
|
None
|
||||||
} else if let Some(credentials) = self
|
} else if let Some(credentials) = self
|
||||||
.fetch_credentials(
|
.fetch_credentials(
|
||||||
Some(&credentials),
|
Some(&credentials),
|
||||||
DisplaySafeUrl::ref_cast(request.url()),
|
DisplaySafeUrl::ref_cast(request.url()),
|
||||||
index,
|
index_url,
|
||||||
auth_policy,
|
auth_policy,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
} else if index.is_some() {
|
} else if index_url.is_some() {
|
||||||
// If this is a known index, we fall back to checking for the realm.
|
// If this is a known index, we fall back to checking for the realm.
|
||||||
if let Some(credentials) = self
|
if let Some(credentials) = self
|
||||||
.cache()
|
.cache()
|
||||||
.get_realm(Realm::from(request.url()), credentials.to_username())
|
.get_realm(Realm::from(request.url()), credentials.to_username())
|
||||||
{
|
{
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
} else {
|
} else {
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
|
|
@ -644,19 +606,19 @@ impl AuthMiddleware {
|
||||||
/// Supports netrc file and keyring lookups.
|
/// Supports netrc file and keyring lookups.
|
||||||
async fn fetch_credentials(
|
async fn fetch_credentials(
|
||||||
&self,
|
&self,
|
||||||
credentials: Option<&Authentication>,
|
credentials: Option<&Credentials>,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
index: Option<&Index>,
|
maybe_index_url: Option<&DisplaySafeUrl>,
|
||||||
auth_policy: AuthPolicy,
|
auth_policy: AuthPolicy,
|
||||||
) -> Option<Arc<Authentication>> {
|
) -> Option<Arc<Credentials>> {
|
||||||
let username = Username::from(
|
let username = Username::from(
|
||||||
credentials.map(|credentials| credentials.username().unwrap_or_default().to_string()),
|
credentials.map(|credentials| credentials.username().unwrap_or_default().to_string()),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Fetches can be expensive, so we will only run them _once_ per realm or index URL and username combination
|
// Fetches can be expensive, so we will only run them _once_ per realm or index URL and username combination
|
||||||
// All other requests for the same realm or index URL will wait until the first one completes
|
// All other requests for the same realm or index URL will wait until the first one completes
|
||||||
let key = if let Some(index) = index {
|
let key = if let Some(index_url) = maybe_index_url {
|
||||||
(FetchUrl::Index(index.url.clone()), username)
|
(FetchUrl::Index(index_url.clone()), username)
|
||||||
} else {
|
} else {
|
||||||
(FetchUrl::Realm(Realm::from(&**url)), username)
|
(FetchUrl::Realm(Realm::from(&**url)), username)
|
||||||
};
|
};
|
||||||
|
|
@ -680,38 +642,13 @@ impl AuthMiddleware {
|
||||||
return credentials;
|
return credentials;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Support for known providers, like Hugging Face and S3.
|
// Support for known providers, like Hugging Face.
|
||||||
if let Some(credentials) = HuggingFaceProvider::credentials_for(url)
|
if let Some(credentials) = HuggingFaceProvider::credentials_for(url).map(Arc::new) {
|
||||||
.map(Authentication::from)
|
|
||||||
.map(Arc::new)
|
|
||||||
{
|
|
||||||
debug!("Found Hugging Face credentials for {url}");
|
debug!("Found Hugging Face credentials for {url}");
|
||||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||||
return Some(credentials);
|
return Some(credentials);
|
||||||
}
|
}
|
||||||
|
|
||||||
if S3EndpointProvider::is_s3_endpoint(url, self.preview) {
|
|
||||||
let mut s3_state = self.s3_credential_state.lock().await;
|
|
||||||
|
|
||||||
// If the S3 credential state is uninitialized, initialize it.
|
|
||||||
let credentials = match &*s3_state {
|
|
||||||
S3CredentialState::Uninitialized => {
|
|
||||||
trace!("Initializing S3 credentials for {url}");
|
|
||||||
let signer = S3EndpointProvider::create_signer();
|
|
||||||
let credentials = Arc::new(Authentication::from(signer));
|
|
||||||
*s3_state = S3CredentialState::Initialized(Some(credentials.clone()));
|
|
||||||
Some(credentials)
|
|
||||||
}
|
|
||||||
S3CredentialState::Initialized(credentials) => credentials.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(credentials) = credentials {
|
|
||||||
debug!("Found S3 credentials for {url}");
|
|
||||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
|
||||||
return Some(credentials);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this is a known URL, authenticate it via the token store.
|
// If this is a known URL, authenticate it via the token store.
|
||||||
if let Some(base_client) = self.base_client.as_ref() {
|
if let Some(base_client) = self.base_client.as_ref() {
|
||||||
if let Some(token_store) = self.pyx_token_store.as_ref() {
|
if let Some(token_store) = self.pyx_token_store.as_ref() {
|
||||||
|
|
@ -741,7 +678,7 @@ impl AuthMiddleware {
|
||||||
|
|
||||||
let credentials = token.map(|token| {
|
let credentials = token.map(|token| {
|
||||||
trace!("Using credentials from token store for {url}");
|
trace!("Using credentials from token store for {url}");
|
||||||
Arc::new(Authentication::from(Credentials::from(token)))
|
Arc::new(Credentials::from(token))
|
||||||
});
|
});
|
||||||
|
|
||||||
// Register the fetch for this key
|
// Register the fetch for this key
|
||||||
|
|
@ -767,16 +704,9 @@ impl AuthMiddleware {
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
|
|
||||||
// Text credential store support.
|
// Text credential store support.
|
||||||
} else if let Some(credentials) = self.text_store.get().await.and_then(|text_store| {
|
} else if let Some(credentials) = self.text_store.get().and_then(|text_store| {
|
||||||
debug!("Checking text store for credentials for {url}");
|
debug!("Checking text store for credentials for {url}");
|
||||||
text_store
|
text_store.get_credentials(url, credentials.as_ref().and_then(|credentials| credentials.username())).cloned()
|
||||||
.get_credentials(
|
|
||||||
url,
|
|
||||||
credentials
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|credentials| credentials.username()),
|
|
||||||
)
|
|
||||||
.cloned()
|
|
||||||
}) {
|
}) {
|
||||||
debug!("Found credentials in plaintext store for {url}");
|
debug!("Found credentials in plaintext store for {url}");
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
|
|
@ -789,22 +719,13 @@ impl AuthMiddleware {
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
String::new()
|
||||||
};
|
};
|
||||||
if let Some(index) = index {
|
if let Some(index_url) = maybe_index_url {
|
||||||
// N.B. The native store performs an exact look up right now, so we use the root
|
debug!("Checking native store for credentials for index URL {}{}", display_username, index_url);
|
||||||
// URL of the index instead of relying on prefix-matching.
|
native_store.fetch(DisplaySafeUrl::ref_cast(index_url), username).await
|
||||||
debug!(
|
|
||||||
"Checking native store for credentials for index URL {}{}",
|
|
||||||
display_username, index.root_url
|
|
||||||
);
|
|
||||||
native_store.fetch(&index.root_url, username).await
|
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
debug!("Checking native store for credentials for URL {}{}", display_username, url);
|
||||||
"Checking native store for credentials for URL {}{}",
|
|
||||||
display_username, url
|
|
||||||
);
|
|
||||||
native_store.fetch(url, username).await
|
native_store.fetch(url, username).await
|
||||||
}
|
}
|
||||||
// TODO(zanieb): We should have a realm fallback here too
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
@ -821,37 +742,24 @@ impl AuthMiddleware {
|
||||||
// URLs; instead, we fetch if there's a username or if the user has requested to
|
// URLs; instead, we fetch if there's a username or if the user has requested to
|
||||||
// always authenticate.
|
// always authenticate.
|
||||||
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
|
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
|
||||||
if let Some(index) = index {
|
if let Some(index_url) = maybe_index_url {
|
||||||
debug!(
|
debug!("Checking keyring for credentials for index URL {}@{}", username, index_url);
|
||||||
"Checking keyring for credentials for index URL {}@{}",
|
keyring.fetch(DisplaySafeUrl::ref_cast(index_url), Some(username)).await
|
||||||
username, index.url
|
|
||||||
);
|
|
||||||
keyring
|
|
||||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), Some(username))
|
|
||||||
.await
|
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
debug!("Checking keyring for credentials for full URL {}@{}", username, url);
|
||||||
"Checking keyring for credentials for full URL {}@{}",
|
|
||||||
username, url
|
|
||||||
);
|
|
||||||
keyring.fetch(url, Some(username)).await
|
keyring.fetch(url, Some(username)).await
|
||||||
}
|
}
|
||||||
} else if matches!(auth_policy, AuthPolicy::Always) {
|
} else if matches!(auth_policy, AuthPolicy::Always) {
|
||||||
if let Some(index) = index {
|
if let Some(index_url) = maybe_index_url {
|
||||||
debug!(
|
debug!(
|
||||||
"Checking keyring for credentials for index URL {} without username due to `authenticate = always`",
|
"Checking keyring for credentials for index URL {index_url} without username due to `authenticate = always`"
|
||||||
index.url
|
|
||||||
);
|
);
|
||||||
keyring
|
keyring.fetch(DisplaySafeUrl::ref_cast(index_url), None).await
|
||||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), None)
|
|
||||||
.await
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
debug!("Skipping keyring fetch for {url} without username; use `authenticate = always` to force");
|
||||||
"Skipping keyring fetch for {url} without username; use `authenticate = always` to force"
|
|
||||||
);
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -861,9 +769,8 @@ impl AuthMiddleware {
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
}
|
||||||
|
.map(Arc::new);
|
||||||
let credentials = credentials.map(Authentication::from).map(Arc::new);
|
|
||||||
|
|
||||||
// Register the fetch for this key
|
// Register the fetch for this key
|
||||||
self.cache().fetches.done(key, credentials.clone());
|
self.cache().fetches.done(key, credentials.clone());
|
||||||
|
|
@ -872,9 +779,9 @@ impl AuthMiddleware {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tracing_url(request: &Request, credentials: Option<&Authentication>) -> DisplaySafeUrl {
|
fn tracing_url(request: &Request, credentials: Option<&Credentials>) -> DisplaySafeUrl {
|
||||||
let mut url = DisplaySafeUrl::from_url(request.url().clone());
|
let mut url = DisplaySafeUrl::from(request.url().clone());
|
||||||
if let Some(Authentication::Credentials(creds)) = credentials {
|
if let Some(creds) = credentials {
|
||||||
if let Some(username) = creds.username() {
|
if let Some(username) = creds.username() {
|
||||||
let _ = url.set_username(username);
|
let _ = url.set_username(username);
|
||||||
}
|
}
|
||||||
|
|
@ -1015,10 +922,10 @@ mod tests {
|
||||||
let cache = CredentialsCache::new();
|
let cache = CredentialsCache::new();
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url,
|
&base_url,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username.to_string()),
|
Some(username.to_string()),
|
||||||
Some(password.to_string()),
|
Some(password.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -1069,10 +976,7 @@ mod tests {
|
||||||
let cache = CredentialsCache::new();
|
let cache = CredentialsCache::new();
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url,
|
&base_url,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(Some(username.to_string()), None)),
|
||||||
Some(username.to_string()),
|
|
||||||
None,
|
|
||||||
))),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -1465,10 +1369,7 @@ mod tests {
|
||||||
// URL.
|
// URL.
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url,
|
&base_url,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(Some(username.to_string()), None)),
|
||||||
Some(username.to_string()),
|
|
||||||
None,
|
|
||||||
))),
|
|
||||||
);
|
);
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
.with(AuthMiddleware::new().with_cache(cache).with_keyring(Some(
|
.with(AuthMiddleware::new().with_cache(cache).with_keyring(Some(
|
||||||
|
|
@ -1517,17 +1418,17 @@ mod tests {
|
||||||
// Seed the cache with our credentials
|
// Seed the cache with our credentials
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_1,
|
&base_url_1,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_1.to_string()),
|
Some(username_1.to_string()),
|
||||||
Some(password_1.to_string()),
|
Some(password_1.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_2,
|
&base_url_2,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_2.to_string()),
|
Some(username_2.to_string()),
|
||||||
Some(password_2.to_string()),
|
Some(password_2.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -1712,17 +1613,17 @@ mod tests {
|
||||||
// Seed the cache with our credentials
|
// Seed the cache with our credentials
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_1,
|
&base_url_1,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_1.to_string()),
|
Some(username_1.to_string()),
|
||||||
Some(password_1.to_string()),
|
Some(password_1.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_2,
|
&base_url_2,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_2.to_string()),
|
Some(username_2.to_string()),
|
||||||
Some(password_2.to_string()),
|
Some(password_2.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -2062,13 +1963,13 @@ mod tests {
|
||||||
let base_url_2 = base_url.join("prefix_2")?;
|
let base_url_2 = base_url.join("prefix_2")?;
|
||||||
let indexes = Indexes::from_indexes(vec![
|
let indexes = Indexes::from_indexes(vec![
|
||||||
Index {
|
Index {
|
||||||
url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||||
root_url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
root_url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||||
auth_policy: AuthPolicy::Auto,
|
auth_policy: AuthPolicy::Auto,
|
||||||
},
|
},
|
||||||
Index {
|
Index {
|
||||||
url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||||
root_url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
root_url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||||
auth_policy: AuthPolicy::Auto,
|
auth_policy: AuthPolicy::Auto,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
|
|
@ -2170,8 +2071,8 @@ mod tests {
|
||||||
let base_url = Url::parse(&server.uri())?;
|
let base_url = Url::parse(&server.uri())?;
|
||||||
let index_url = base_url.join("prefix_1")?;
|
let index_url = base_url.join("prefix_1")?;
|
||||||
let indexes = Indexes::from_indexes(vec![Index {
|
let indexes = Indexes::from_indexes(vec![Index {
|
||||||
url: DisplaySafeUrl::from_url(index_url.clone()),
|
url: DisplaySafeUrl::from(index_url.clone()),
|
||||||
root_url: DisplaySafeUrl::from_url(index_url.clone()),
|
root_url: DisplaySafeUrl::from(index_url.clone()),
|
||||||
auth_policy: AuthPolicy::Auto,
|
auth_policy: AuthPolicy::Auto,
|
||||||
}]);
|
}]);
|
||||||
|
|
||||||
|
|
@ -2225,7 +2126,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn indexes_for(url: &Url, policy: AuthPolicy) -> Indexes {
|
fn indexes_for(url: &Url, policy: AuthPolicy) -> Indexes {
|
||||||
let mut url = DisplaySafeUrl::from_url(url.clone());
|
let mut url = DisplaySafeUrl::from(url.clone());
|
||||||
url.set_password(None).ok();
|
url.set_password(None).ok();
|
||||||
url.set_username("").ok();
|
url.set_username("").ok();
|
||||||
Indexes::from_indexes(vec![Index {
|
Indexes::from_indexes(vec![Index {
|
||||||
|
|
@ -2326,7 +2227,7 @@ mod tests {
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
client.get(server.uri()).send().await,
|
client.get(server.uri()).send().await,
|
||||||
Err(reqwest_middleware::Error::Middleware(_))
|
Err(reqwest_middleware::Error::Middleware(_))
|
||||||
));
|
),);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -2425,20 +2326,20 @@ mod tests {
|
||||||
DisplaySafeUrl::parse("https://pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
DisplaySafeUrl::parse("https://pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
let creds = Authentication::from(Credentials::Basic {
|
let creds = Credentials::Basic {
|
||||||
username: Username::new(Some(String::from("user"))),
|
username: Username::new(Some(String::from("user"))),
|
||||||
password: None,
|
password: None,
|
||||||
});
|
};
|
||||||
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tracing_url(&req, Some(&creds)),
|
tracing_url(&req, Some(&creds)),
|
||||||
DisplaySafeUrl::parse("https://user@pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
DisplaySafeUrl::parse("https://user@pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
let creds = Authentication::from(Credentials::Basic {
|
let creds = Credentials::Basic {
|
||||||
username: Username::new(Some(String::from("user"))),
|
username: Username::new(Some(String::from("user"))),
|
||||||
password: Some(Password::new(String::from("password"))),
|
password: Some(Password::new(String::from("password"))),
|
||||||
});
|
};
|
||||||
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tracing_url(&req, Some(&creds)),
|
tracing_url(&req, Some(&creds)),
|
||||||
|
|
@ -2459,7 +2360,7 @@ mod tests {
|
||||||
let mut store = TextCredentialStore::default();
|
let mut store = TextCredentialStore::default();
|
||||||
let service = crate::Service::try_from(base_url.to_string()).unwrap();
|
let service = crate::Service::try_from(base_url.to_string()).unwrap();
|
||||||
let credentials =
|
let credentials =
|
||||||
Credentials::basic(Some(username.to_string()), Some(password.to_string()));
|
crate::Credentials::basic(Some(username.to_string()), Some(password.to_string()));
|
||||||
store.insert(service.clone(), credentials);
|
store.insert(service.clone(), credentials);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,10 @@
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
use reqsign::aws::DefaultSigner;
|
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_preview::{Preview, PreviewFeatures};
|
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
use uv_warnings::warn_user_once;
|
|
||||||
|
|
||||||
use crate::Credentials;
|
use crate::Credentials;
|
||||||
use crate::credentials::Token;
|
|
||||||
use crate::realm::{Realm, RealmRef};
|
use crate::realm::{Realm, RealmRef};
|
||||||
|
|
||||||
/// The [`Realm`] for the Hugging Face platform.
|
/// The [`Realm`] for the Hugging Face platform.
|
||||||
|
|
@ -46,59 +40,10 @@ impl HuggingFaceProvider {
|
||||||
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
||||||
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
||||||
return Some(Credentials::Bearer {
|
return Some(Credentials::Bearer {
|
||||||
token: Token::new(token.clone()),
|
token: token.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The [`Url`] for the S3 endpoint, if set.
|
|
||||||
static S3_ENDPOINT_REALM: LazyLock<Option<Realm>> = LazyLock::new(|| {
|
|
||||||
let s3_endpoint_url = std::env::var(EnvVars::UV_S3_ENDPOINT_URL).ok()?;
|
|
||||||
let url = Url::parse(&s3_endpoint_url).expect("Failed to parse S3 endpoint URL");
|
|
||||||
Some(Realm::from(&url))
|
|
||||||
});
|
|
||||||
|
|
||||||
/// A provider for authentication credentials for S3 endpoints.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub(crate) struct S3EndpointProvider;
|
|
||||||
|
|
||||||
impl S3EndpointProvider {
|
|
||||||
/// Returns `true` if the URL matches the configured S3 endpoint.
|
|
||||||
pub(crate) fn is_s3_endpoint(url: &Url, preview: Preview) -> bool {
|
|
||||||
if let Some(s3_endpoint_realm) = S3_ENDPOINT_REALM.as_ref().map(RealmRef::from) {
|
|
||||||
if !preview.is_enabled(PreviewFeatures::S3_ENDPOINT) {
|
|
||||||
warn_user_once!(
|
|
||||||
"The `s3-endpoint` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
|
|
||||||
PreviewFeatures::S3_ENDPOINT
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Treat any URL on the same domain or subdomain as available for S3 signing.
|
|
||||||
let realm = RealmRef::from(url);
|
|
||||||
if realm == s3_endpoint_realm || realm.is_subdomain_of(s3_endpoint_realm) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new S3 signer with the configured region.
|
|
||||||
///
|
|
||||||
/// This is potentially expensive as it may invoke credential helpers, so the result
|
|
||||||
/// should be cached.
|
|
||||||
pub(crate) fn create_signer() -> DefaultSigner {
|
|
||||||
// TODO(charlie): Can `reqsign` infer the region for us? Profiles, for example,
|
|
||||||
// often have a region set already.
|
|
||||||
let region = std::env::var(EnvVars::AWS_REGION)
|
|
||||||
.map(Cow::Owned)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
std::env::var(EnvVars::AWS_DEFAULT_REGION)
|
|
||||||
.map(Cow::Owned)
|
|
||||||
.unwrap_or_else(|_| Cow::Borrowed("us-east-1"))
|
|
||||||
});
|
|
||||||
reqsign::aws::default_signer("s3", ®ion)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -10,12 +10,11 @@ use tracing::debug;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_cache_key::CanonicalUrl;
|
use uv_cache_key::CanonicalUrl;
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_small_str::SmallString;
|
use uv_small_str::SmallString;
|
||||||
use uv_state::{StateBucket, StateStore};
|
use uv_state::{StateBucket, StateStore};
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
||||||
use crate::credentials::Token;
|
|
||||||
use crate::{AccessToken, Credentials, Realm};
|
use crate::{AccessToken, Credentials, Realm};
|
||||||
|
|
||||||
/// Retrieve the pyx API key from the environment variable, or return `None`.
|
/// Retrieve the pyx API key from the environment variable, or return `None`.
|
||||||
|
|
@ -85,7 +84,7 @@ impl From<PyxTokens> for Credentials {
|
||||||
impl From<AccessToken> for Credentials {
|
impl From<AccessToken> for Credentials {
|
||||||
fn from(access_token: AccessToken) -> Self {
|
fn from(access_token: AccessToken) -> Self {
|
||||||
Self::Bearer {
|
Self::Bearer {
|
||||||
token: Token::new(access_token.into_bytes()),
|
token: access_token.into_bytes(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -284,6 +283,7 @@ impl PyxTokenStore {
|
||||||
|
|
||||||
/// Read the tokens from the store.
|
/// Read the tokens from the store.
|
||||||
pub async fn read(&self) -> Result<Option<PyxTokens>, TokenStoreError> {
|
pub async fn read(&self) -> Result<Option<PyxTokens>, TokenStoreError> {
|
||||||
|
// Retrieve the API URL from the environment variable, or error if unset.
|
||||||
if let Some(api_key) = read_pyx_api_key() {
|
if let Some(api_key) = read_pyx_api_key() {
|
||||||
// Read the API key tokens from a file based on the API key.
|
// Read the API key tokens from a file based on the API key.
|
||||||
let digest = uv_cache_key::cache_digest(&api_key);
|
let digest = uv_cache_key::cache_digest(&api_key);
|
||||||
|
|
@ -368,9 +368,9 @@ impl PyxTokenStore {
|
||||||
tolerance_secs: u64,
|
tolerance_secs: u64,
|
||||||
) -> Result<PyxTokens, TokenStoreError> {
|
) -> Result<PyxTokens, TokenStoreError> {
|
||||||
// Decode the access token.
|
// Decode the access token.
|
||||||
let jwt = PyxJwt::decode(match &tokens {
|
let jwt = Jwt::decode(match &tokens {
|
||||||
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token,
|
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token.as_str(),
|
||||||
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token,
|
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token.as_str(),
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// If the access token is expired, refresh it.
|
// If the access token is expired, refresh it.
|
||||||
|
|
@ -473,7 +473,7 @@ impl PyxTokenStore {
|
||||||
#[derive(thiserror::Error, Debug)]
|
#[derive(thiserror::Error, Debug)]
|
||||||
pub enum TokenStoreError {
|
pub enum TokenStoreError {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Url(#[from] DisplaySafeUrlError),
|
Url(#[from] url::ParseError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Io(#[from] io::Error),
|
Io(#[from] io::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
|
|
@ -503,20 +503,14 @@ impl TokenStoreError {
|
||||||
|
|
||||||
/// The payload of the JWT.
|
/// The payload of the JWT.
|
||||||
#[derive(Debug, serde::Deserialize)]
|
#[derive(Debug, serde::Deserialize)]
|
||||||
pub struct PyxJwt {
|
struct Jwt {
|
||||||
/// The expiration time of the JWT, as a Unix timestamp.
|
exp: Option<i64>,
|
||||||
pub exp: Option<i64>,
|
|
||||||
/// The issuer of the JWT.
|
|
||||||
pub iss: Option<String>,
|
|
||||||
/// The name of the organization, if any.
|
|
||||||
#[serde(rename = "urn:pyx:org_name")]
|
|
||||||
pub name: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PyxJwt {
|
impl Jwt {
|
||||||
/// Decode the JWT from the access token.
|
/// Decode the JWT from the access token.
|
||||||
pub fn decode(access_token: &AccessToken) -> Result<Self, JwtError> {
|
fn decode(access_token: &str) -> Result<Self, JwtError> {
|
||||||
let mut token_segments = access_token.as_str().splitn(3, '.');
|
let mut token_segments = access_token.splitn(3, '.');
|
||||||
|
|
||||||
let _header = token_segments.next().ok_or(JwtError::MissingHeader)?;
|
let _header = token_segments.next().ok_or(JwtError::MissingHeader)?;
|
||||||
let payload = token_segments.next().ok_or(JwtError::MissingPayload)?;
|
let payload = token_segments.next().ok_or(JwtError::MissingPayload)?;
|
||||||
|
|
@ -591,7 +585,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_known_url() {
|
fn test_is_known_url() {
|
||||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
let api_url = DisplaySafeUrl::from(Url::parse("https://api.pyx.dev").unwrap());
|
||||||
let cdn_domain = "astralhosted.com";
|
let cdn_domain = "astralhosted.com";
|
||||||
|
|
||||||
// Same realm as API.
|
// Same realm as API.
|
||||||
|
|
@ -646,7 +640,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_is_known_domain() {
|
fn test_is_known_domain() {
|
||||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
let api_url = DisplaySafeUrl::from(Url::parse("https://api.pyx.dev").unwrap());
|
||||||
let cdn_domain = "astralhosted.com";
|
let cdn_domain = "astralhosted.com";
|
||||||
|
|
||||||
// Same realm as API.
|
// Same realm as API.
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
use std::{fmt::Display, fmt::Formatter};
|
use std::{fmt::Display, fmt::Formatter};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
use uv_small_str::SmallString;
|
use uv_small_str::SmallString;
|
||||||
|
|
||||||
/// Used to determine if authentication information should be retained on a new URL.
|
/// Used to determine if authentication information should be retained on a new URL.
|
||||||
|
|
@ -30,12 +29,6 @@ pub struct Realm {
|
||||||
port: Option<u16>,
|
port: Option<u16>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&DisplaySafeUrl> for Realm {
|
|
||||||
fn from(url: &DisplaySafeUrl) -> Self {
|
|
||||||
Self::from(&**url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Url> for Realm {
|
impl From<&Url> for Realm {
|
||||||
fn from(url: &Url) -> Self {
|
fn from(url: &Url) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -82,27 +75,12 @@ impl Hash for Realm {
|
||||||
|
|
||||||
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct RealmRef<'a> {
|
pub(crate) struct RealmRef<'a> {
|
||||||
scheme: &'a str,
|
scheme: &'a str,
|
||||||
host: Option<&'a str>,
|
host: Option<&'a str>,
|
||||||
port: Option<u16>,
|
port: Option<u16>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RealmRef<'_> {
|
|
||||||
/// Returns true if this realm is a subdomain of the other realm.
|
|
||||||
pub(crate) fn is_subdomain_of(&self, other: Self) -> bool {
|
|
||||||
other.scheme == self.scheme
|
|
||||||
&& other.port == self.port
|
|
||||||
&& other.host.is_some_and(|other_host| {
|
|
||||||
self.host.is_some_and(|self_host| {
|
|
||||||
self_host
|
|
||||||
.strip_suffix(other_host)
|
|
||||||
.is_some_and(|prefix| prefix.ends_with('.'))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a Url> for RealmRef<'a> {
|
impl<'a> From<&'a Url> for RealmRef<'a> {
|
||||||
fn from(url: &'a Url) -> Self {
|
fn from(url: &'a Url) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -237,87 +215,4 @@ mod tests {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_is_subdomain_of() -> Result<(), ParseError> {
|
|
||||||
use crate::realm::RealmRef;
|
|
||||||
|
|
||||||
// Subdomain relationship: sub.example.com is a subdomain of example.com
|
|
||||||
let subdomain_url = Url::parse("https://sub.example.com")?;
|
|
||||||
let domain_url = Url::parse("https://example.com")?;
|
|
||||||
let subdomain = RealmRef::from(&subdomain_url);
|
|
||||||
let domain = RealmRef::from(&domain_url);
|
|
||||||
assert!(subdomain.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Deeper subdomain: foo.bar.example.com is a subdomain of example.com
|
|
||||||
let deep_subdomain_url = Url::parse("https://foo.bar.example.com")?;
|
|
||||||
let deep_subdomain = RealmRef::from(&deep_subdomain_url);
|
|
||||||
assert!(deep_subdomain.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Deeper subdomain: foo.bar.example.com is also a subdomain of bar.example.com
|
|
||||||
let parent_subdomain_url = Url::parse("https://bar.example.com")?;
|
|
||||||
let parent_subdomain = RealmRef::from(&parent_subdomain_url);
|
|
||||||
assert!(deep_subdomain.is_subdomain_of(parent_subdomain));
|
|
||||||
|
|
||||||
// Not a subdomain: example.com is not a subdomain of sub.example.com
|
|
||||||
assert!(!domain.is_subdomain_of(subdomain));
|
|
||||||
|
|
||||||
// Same domain is not a subdomain of itself
|
|
||||||
assert!(!domain.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Different TLD: example.org is not a subdomain of example.com
|
|
||||||
let different_tld_url = Url::parse("https://example.org")?;
|
|
||||||
let different_tld = RealmRef::from(&different_tld_url);
|
|
||||||
assert!(!different_tld.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Partial match but not a subdomain: notexample.com is not a subdomain of example.com
|
|
||||||
let partial_match_url = Url::parse("https://notexample.com")?;
|
|
||||||
let partial_match = RealmRef::from(&partial_match_url);
|
|
||||||
assert!(!partial_match.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Different scheme: http subdomain is not a subdomain of https domain
|
|
||||||
let http_subdomain_url = Url::parse("http://sub.example.com")?;
|
|
||||||
let https_domain_url = Url::parse("https://example.com")?;
|
|
||||||
let http_subdomain = RealmRef::from(&http_subdomain_url);
|
|
||||||
let https_domain = RealmRef::from(&https_domain_url);
|
|
||||||
assert!(!http_subdomain.is_subdomain_of(https_domain));
|
|
||||||
|
|
||||||
// Different port: same subdomain with different port is not a subdomain
|
|
||||||
let subdomain_port_8080_url = Url::parse("https://sub.example.com:8080")?;
|
|
||||||
let domain_port_9090_url = Url::parse("https://example.com:9090")?;
|
|
||||||
let subdomain_port_8080 = RealmRef::from(&subdomain_port_8080_url);
|
|
||||||
let domain_port_9090 = RealmRef::from(&domain_port_9090_url);
|
|
||||||
assert!(!subdomain_port_8080.is_subdomain_of(domain_port_9090));
|
|
||||||
|
|
||||||
// Same port: subdomain with same explicit port is a subdomain
|
|
||||||
let subdomain_with_port_url = Url::parse("https://sub.example.com:8080")?;
|
|
||||||
let domain_with_port_url = Url::parse("https://example.com:8080")?;
|
|
||||||
let subdomain_with_port = RealmRef::from(&subdomain_with_port_url);
|
|
||||||
let domain_with_port = RealmRef::from(&domain_with_port_url);
|
|
||||||
assert!(subdomain_with_port.is_subdomain_of(domain_with_port));
|
|
||||||
|
|
||||||
// Default port handling: subdomain with implicit port is a subdomain
|
|
||||||
let subdomain_default_url = Url::parse("https://sub.example.com")?;
|
|
||||||
let domain_explicit_443_url = Url::parse("https://example.com:443")?;
|
|
||||||
let subdomain_default = RealmRef::from(&subdomain_default_url);
|
|
||||||
let domain_explicit_443 = RealmRef::from(&domain_explicit_443_url);
|
|
||||||
assert!(subdomain_default.is_subdomain_of(domain_explicit_443));
|
|
||||||
|
|
||||||
// Edge case: empty host (shouldn't happen with valid URLs but testing defensive code)
|
|
||||||
let file_url = Url::parse("file:///path/to/file")?;
|
|
||||||
let https_url = Url::parse("https://example.com")?;
|
|
||||||
let file_realm = RealmRef::from(&file_url);
|
|
||||||
let https_realm = RealmRef::from(&https_url);
|
|
||||||
assert!(!file_realm.is_subdomain_of(https_realm));
|
|
||||||
assert!(!https_realm.is_subdomain_of(file_realm));
|
|
||||||
|
|
||||||
// Subdomain with path (path should be ignored)
|
|
||||||
let subdomain_with_path_url = Url::parse("https://sub.example.com/path")?;
|
|
||||||
let domain_with_path_url = Url::parse("https://example.com/other")?;
|
|
||||||
let subdomain_with_path = RealmRef::from(&subdomain_with_path_url);
|
|
||||||
let domain_with_path = RealmRef::from(&domain_with_path_url);
|
|
||||||
assert!(subdomain_with_path.is_subdomain_of(domain_with_path));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,12 @@ use serde::{Deserialize, Serialize};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum ServiceParseError {
|
pub enum ServiceParseError {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
InvalidUrl(#[from] DisplaySafeUrlError),
|
InvalidUrl(#[from] url::ParseError),
|
||||||
#[error("Unsupported scheme: {0}")]
|
#[error("Unsupported scheme: {0}")]
|
||||||
UnsupportedScheme(String),
|
UnsupportedScheme(String),
|
||||||
#[error("HTTPS is required for non-local hosts")]
|
#[error("HTTPS is required for non-local hosts")]
|
||||||
|
|
@ -51,7 +51,7 @@ impl FromStr for Service {
|
||||||
// First try parsing as-is
|
// First try parsing as-is
|
||||||
let url = match DisplaySafeUrl::parse(s) {
|
let url = match DisplaySafeUrl::parse(s) {
|
||||||
Ok(url) => url,
|
Ok(url) => url,
|
||||||
Err(DisplaySafeUrlError::Url(url::ParseError::RelativeUrlWithoutBase)) => {
|
Err(url::ParseError::RelativeUrlWithoutBase) => {
|
||||||
// If it's a relative URL, try prepending https://
|
// If it's a relative URL, try prepending https://
|
||||||
let with_https = format!("https://{s}");
|
let with_https = format!("https://{s}");
|
||||||
DisplaySafeUrl::parse(&with_https)?
|
DisplaySafeUrl::parse(&with_https)?
|
||||||
|
|
|
||||||
|
|
@ -5,14 +5,15 @@ use fs_err as fs;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, with_added_extension};
|
use url::Url;
|
||||||
|
use uv_fs::{LockedFile, with_added_extension};
|
||||||
use uv_preview::{Preview, PreviewFeatures};
|
use uv_preview::{Preview, PreviewFeatures};
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use uv_state::{StateBucket, StateStore};
|
use uv_state::{StateBucket, StateStore};
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
||||||
use crate::credentials::{Password, Token, Username};
|
use crate::credentials::{Password, Username};
|
||||||
use crate::realm::Realm;
|
use crate::realm::Realm;
|
||||||
use crate::service::Service;
|
use crate::service::Service;
|
||||||
use crate::{Credentials, KeyringProvider};
|
use crate::{Credentials, KeyringProvider};
|
||||||
|
|
@ -28,7 +29,7 @@ pub enum AuthBackend {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthBackend {
|
impl AuthBackend {
|
||||||
pub async fn from_settings(preview: Preview) -> Result<Self, TomlCredentialError> {
|
pub fn from_settings(preview: Preview) -> Result<Self, TomlCredentialError> {
|
||||||
// If preview is enabled, we'll use the system-native store
|
// If preview is enabled, we'll use the system-native store
|
||||||
if preview.is_enabled(PreviewFeatures::NATIVE_AUTH) {
|
if preview.is_enabled(PreviewFeatures::NATIVE_AUTH) {
|
||||||
return Ok(Self::System(KeyringProvider::native()));
|
return Ok(Self::System(KeyringProvider::native()));
|
||||||
|
|
@ -36,16 +37,12 @@ impl AuthBackend {
|
||||||
|
|
||||||
// Otherwise, we'll use the plaintext credential store
|
// Otherwise, we'll use the plaintext credential store
|
||||||
let path = TextCredentialStore::default_file()?;
|
let path = TextCredentialStore::default_file()?;
|
||||||
match TextCredentialStore::read(&path).await {
|
match TextCredentialStore::read(&path) {
|
||||||
Ok((store, lock)) => Ok(Self::TextStore(store, lock)),
|
Ok((store, lock)) => Ok(Self::TextStore(store, lock)),
|
||||||
Err(err)
|
Err(TomlCredentialError::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||||
if err
|
|
||||||
.as_io_error()
|
|
||||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
|
||||||
{
|
|
||||||
Ok(Self::TextStore(
|
Ok(Self::TextStore(
|
||||||
TextCredentialStore::default(),
|
TextCredentialStore::default(),
|
||||||
TextCredentialStore::lock(&path).await?,
|
TextCredentialStore::lock(&path)?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
|
|
@ -73,8 +70,6 @@ pub enum AuthScheme {
|
||||||
pub enum TomlCredentialError {
|
pub enum TomlCredentialError {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
#[error(transparent)]
|
|
||||||
LockedFile(#[from] LockedFileError),
|
|
||||||
#[error("Failed to parse TOML credential file: {0}")]
|
#[error("Failed to parse TOML credential file: {0}")]
|
||||||
ParseError(#[from] toml::de::Error),
|
ParseError(#[from] toml::de::Error),
|
||||||
#[error("Failed to serialize credentials to TOML")]
|
#[error("Failed to serialize credentials to TOML")]
|
||||||
|
|
@ -89,21 +84,6 @@ pub enum TomlCredentialError {
|
||||||
TokenNotUnicode(#[from] std::string::FromUtf8Error),
|
TokenNotUnicode(#[from] std::string::FromUtf8Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TomlCredentialError {
|
|
||||||
pub fn as_io_error(&self) -> Option<&std::io::Error> {
|
|
||||||
match self {
|
|
||||||
Self::Io(err) => Some(err),
|
|
||||||
Self::LockedFile(err) => err.as_io_error(),
|
|
||||||
Self::ParseError(_)
|
|
||||||
| Self::SerializeError(_)
|
|
||||||
| Self::BasicAuthError(_)
|
|
||||||
| Self::BearerAuthError(_)
|
|
||||||
| Self::CredentialsDirError
|
|
||||||
| Self::TokenNotUnicode(_) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum BasicAuthError {
|
pub enum BasicAuthError {
|
||||||
#[error("`username` is required with `scheme = basic`")]
|
#[error("`username` is required with `scheme = basic`")]
|
||||||
|
|
@ -162,7 +142,7 @@ impl From<TomlCredential> for TomlCredentialWire {
|
||||||
username: Username::new(None),
|
username: Username::new(None),
|
||||||
scheme: AuthScheme::Bearer,
|
scheme: AuthScheme::Bearer,
|
||||||
password: None,
|
password: None,
|
||||||
token: Some(String::from_utf8(token.into_bytes()).expect("Token is valid UTF-8")),
|
token: Some(String::from_utf8(token).expect("Token is valid UTF-8")),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -210,7 +190,7 @@ impl TryFrom<TomlCredentialWire> for TomlCredential {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let credentials = Credentials::Bearer {
|
let credentials = Credentials::Bearer {
|
||||||
token: Token::new(value.token.unwrap().into_bytes()),
|
token: value.token.unwrap().into_bytes(),
|
||||||
};
|
};
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
service: value.service,
|
service: value.service,
|
||||||
|
|
@ -254,12 +234,12 @@ impl TextCredentialStore {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquire a lock on the credentials file at the given path.
|
/// Acquire a lock on the credentials file at the given path.
|
||||||
pub async fn lock(path: &Path) -> Result<LockedFile, TomlCredentialError> {
|
pub fn lock(path: &Path) -> Result<LockedFile, TomlCredentialError> {
|
||||||
if let Some(parent) = path.parent() {
|
if let Some(parent) = path.parent() {
|
||||||
fs::create_dir_all(parent)?;
|
fs::create_dir_all(parent)?;
|
||||||
}
|
}
|
||||||
let lock = with_added_extension(path, ".lock");
|
let lock = with_added_extension(path, ".lock");
|
||||||
Ok(LockedFile::acquire(lock, LockedFileMode::Exclusive, "credentials store").await?)
|
Ok(LockedFile::acquire_blocking(lock, "credentials store")?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read credentials from a file.
|
/// Read credentials from a file.
|
||||||
|
|
@ -290,8 +270,8 @@ impl TextCredentialStore {
|
||||||
/// Returns [`TextCredentialStore`] and a [`LockedFile`] to hold if mutating the store.
|
/// Returns [`TextCredentialStore`] and a [`LockedFile`] to hold if mutating the store.
|
||||||
///
|
///
|
||||||
/// If the store will not be written to following the read, the lock can be dropped.
|
/// If the store will not be written to following the read, the lock can be dropped.
|
||||||
pub async fn read<P: AsRef<Path>>(path: P) -> Result<(Self, LockedFile), TomlCredentialError> {
|
pub fn read<P: AsRef<Path>>(path: P) -> Result<(Self, LockedFile), TomlCredentialError> {
|
||||||
let lock = Self::lock(path.as_ref()).await?;
|
let lock = Self::lock(path.as_ref())?;
|
||||||
let store = Self::from_file(path)?;
|
let store = Self::from_file(path)?;
|
||||||
Ok((store, lock))
|
Ok((store, lock))
|
||||||
}
|
}
|
||||||
|
|
@ -330,17 +310,13 @@ impl TextCredentialStore {
|
||||||
/// Get credentials for a given URL and username.
|
/// Get credentials for a given URL and username.
|
||||||
///
|
///
|
||||||
/// The most specific URL prefix match in the same [`Realm`] is returned, if any.
|
/// The most specific URL prefix match in the same [`Realm`] is returned, if any.
|
||||||
pub fn get_credentials(
|
pub fn get_credentials(&self, url: &Url, username: Option<&str>) -> Option<&Credentials> {
|
||||||
&self,
|
|
||||||
url: &DisplaySafeUrl,
|
|
||||||
username: Option<&str>,
|
|
||||||
) -> Option<&Credentials> {
|
|
||||||
let request_realm = Realm::from(url);
|
let request_realm = Realm::from(url);
|
||||||
|
|
||||||
// Perform an exact lookup first
|
// Perform an exact lookup first
|
||||||
// TODO(zanieb): Consider adding `DisplaySafeUrlRef` so we can avoid this clone
|
// TODO(zanieb): Consider adding `DisplaySafeUrlRef` so we can avoid this clone
|
||||||
// TODO(zanieb): We could also return early here if we can't normalize to a `Service`
|
// TODO(zanieb): We could also return early here if we can't normalize to a `Service`
|
||||||
if let Ok(url_service) = Service::try_from(url.clone()) {
|
if let Ok(url_service) = Service::try_from(DisplaySafeUrl::from(url.clone())) {
|
||||||
if let Some(credential) = self.credentials.get(&(
|
if let Some(credential) = self.credentials.get(&(
|
||||||
url_service.clone(),
|
url_service.clone(),
|
||||||
Username::from(username.map(str::to_string)),
|
Username::from(username.map(str::to_string)),
|
||||||
|
|
@ -454,10 +430,10 @@ mod tests {
|
||||||
|
|
||||||
let service = Service::from_str("https://example.com").unwrap();
|
let service = Service::from_str("https://example.com").unwrap();
|
||||||
store.insert(service.clone(), credentials.clone());
|
store.insert(service.clone(), credentials.clone());
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
let url = Url::parse("https://example.com/").unwrap();
|
||||||
assert!(store.get_credentials(&url, None).is_some());
|
assert!(store.get_credentials(&url, None).is_some());
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/path").unwrap();
|
let url = Url::parse("https://example.com/path").unwrap();
|
||||||
let retrieved = store.get_credentials(&url, None).unwrap();
|
let retrieved = store.get_credentials(&url, None).unwrap();
|
||||||
assert_eq!(retrieved.username(), Some("user"));
|
assert_eq!(retrieved.username(), Some("user"));
|
||||||
assert_eq!(retrieved.password(), Some("pass"));
|
assert_eq!(retrieved.password(), Some("pass"));
|
||||||
|
|
@ -467,12 +443,12 @@ mod tests {
|
||||||
.remove(&service, Username::from(Some("user".to_string())))
|
.remove(&service, Username::from(Some("user".to_string())))
|
||||||
.is_some()
|
.is_some()
|
||||||
);
|
);
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
let url = Url::parse("https://example.com/").unwrap();
|
||||||
assert!(store.get_credentials(&url, None).is_none());
|
assert!(store.get_credentials(&url, None).is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[test]
|
||||||
async fn test_file_operations() {
|
fn test_file_operations() {
|
||||||
let mut temp_file = NamedTempFile::new().unwrap();
|
let mut temp_file = NamedTempFile::new().unwrap();
|
||||||
writeln!(
|
writeln!(
|
||||||
temp_file,
|
temp_file,
|
||||||
|
|
@ -493,12 +469,12 @@ password = "pass2"
|
||||||
|
|
||||||
let store = TextCredentialStore::from_file(temp_file.path()).unwrap();
|
let store = TextCredentialStore::from_file(temp_file.path()).unwrap();
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
let url = Url::parse("https://example.com/").unwrap();
|
||||||
assert!(store.get_credentials(&url, None).is_some());
|
assert!(store.get_credentials(&url, None).is_some());
|
||||||
let url = DisplaySafeUrl::parse("https://test.org/").unwrap();
|
let url = Url::parse("https://test.org/").unwrap();
|
||||||
assert!(store.get_credentials(&url, None).is_some());
|
assert!(store.get_credentials(&url, None).is_some());
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
let url = Url::parse("https://example.com").unwrap();
|
||||||
let cred = store.get_credentials(&url, None).unwrap();
|
let cred = store.get_credentials(&url, None).unwrap();
|
||||||
assert_eq!(cred.username(), Some("testuser"));
|
assert_eq!(cred.username(), Some("testuser"));
|
||||||
assert_eq!(cred.password(), Some("testpass"));
|
assert_eq!(cred.password(), Some("testpass"));
|
||||||
|
|
@ -508,7 +484,7 @@ password = "pass2"
|
||||||
store
|
store
|
||||||
.write(
|
.write(
|
||||||
temp_output.path(),
|
temp_output.path(),
|
||||||
TextCredentialStore::lock(temp_file.path()).await.unwrap(),
|
TextCredentialStore::lock(temp_file.path()).unwrap(),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|
@ -534,7 +510,7 @@ password = "pass2"
|
||||||
];
|
];
|
||||||
|
|
||||||
for url_str in matching_urls {
|
for url_str in matching_urls {
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
let url = Url::parse(url_str).unwrap();
|
||||||
let cred = store.get_credentials(&url, None);
|
let cred = store.get_credentials(&url, None);
|
||||||
assert!(cred.is_some(), "Failed to match URL with prefix: {url_str}");
|
assert!(cred.is_some(), "Failed to match URL with prefix: {url_str}");
|
||||||
}
|
}
|
||||||
|
|
@ -547,7 +523,7 @@ password = "pass2"
|
||||||
];
|
];
|
||||||
|
|
||||||
for url_str in non_matching_urls {
|
for url_str in non_matching_urls {
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
let url = Url::parse(url_str).unwrap();
|
||||||
let cred = store.get_credentials(&url, None);
|
let cred = store.get_credentials(&url, None);
|
||||||
assert!(cred.is_none(), "Should not match non-prefix URL: {url_str}");
|
assert!(cred.is_none(), "Should not match non-prefix URL: {url_str}");
|
||||||
}
|
}
|
||||||
|
|
@ -571,7 +547,7 @@ password = "pass2"
|
||||||
];
|
];
|
||||||
|
|
||||||
for url_str in matching_urls {
|
for url_str in matching_urls {
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
let url = Url::parse(url_str).unwrap();
|
||||||
let cred = store.get_credentials(&url, None);
|
let cred = store.get_credentials(&url, None);
|
||||||
assert!(
|
assert!(
|
||||||
cred.is_some(),
|
cred.is_some(),
|
||||||
|
|
@ -587,7 +563,7 @@ password = "pass2"
|
||||||
];
|
];
|
||||||
|
|
||||||
for url_str in non_matching_urls {
|
for url_str in non_matching_urls {
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
let url = Url::parse(url_str).unwrap();
|
||||||
let cred = store.get_credentials(&url, None);
|
let cred = store.get_credentials(&url, None);
|
||||||
assert!(
|
assert!(
|
||||||
cred.is_none(),
|
cred.is_none(),
|
||||||
|
|
@ -611,12 +587,12 @@ password = "pass2"
|
||||||
store.insert(specific_service.clone(), specific_cred);
|
store.insert(specific_service.clone(), specific_cred);
|
||||||
|
|
||||||
// Should match the most specific prefix
|
// Should match the most specific prefix
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
let url = Url::parse("https://example.com/api/v1/users").unwrap();
|
||||||
let cred = store.get_credentials(&url, None).unwrap();
|
let cred = store.get_credentials(&url, None).unwrap();
|
||||||
assert_eq!(cred.username(), Some("specific"));
|
assert_eq!(cred.username(), Some("specific"));
|
||||||
|
|
||||||
// Should match the general prefix for non-specific paths
|
// Should match the general prefix for non-specific paths
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/api/v2").unwrap();
|
let url = Url::parse("https://example.com/api/v2").unwrap();
|
||||||
let cred = store.get_credentials(&url, None).unwrap();
|
let cred = store.get_credentials(&url, None).unwrap();
|
||||||
assert_eq!(cred.username(), Some("general"));
|
assert_eq!(cred.username(), Some("general"));
|
||||||
}
|
}
|
||||||
|
|
@ -624,7 +600,7 @@ password = "pass2"
|
||||||
#[test]
|
#[test]
|
||||||
fn test_username_exact_url_match() {
|
fn test_username_exact_url_match() {
|
||||||
let mut store = TextCredentialStore::default();
|
let mut store = TextCredentialStore::default();
|
||||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
let url = Url::parse("https://example.com").unwrap();
|
||||||
let service = Service::from_str("https://example.com").unwrap();
|
let service = Service::from_str("https://example.com").unwrap();
|
||||||
let user1_creds = Credentials::basic(Some("user1".to_string()), Some("pass1".to_string()));
|
let user1_creds = Credentials::basic(Some("user1".to_string()), Some("pass1".to_string()));
|
||||||
store.insert(service.clone(), user1_creds.clone());
|
store.insert(service.clone(), user1_creds.clone());
|
||||||
|
|
@ -665,7 +641,7 @@ password = "pass2"
|
||||||
store.insert(general_service, general_creds);
|
store.insert(general_service, general_creds);
|
||||||
store.insert(specific_service, specific_creds);
|
store.insert(specific_service, specific_creds);
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
let url = Url::parse("https://example.com/api/v1/users").unwrap();
|
||||||
|
|
||||||
// Should match specific credentials when username matches
|
// Should match specific credentials when username matches
|
||||||
let result = store.get_credentials(&url, Some("specific_user"));
|
let result = store.get_credentials(&url, Some("specific_user"));
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,13 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-bench"
|
name = "uv-bench"
|
||||||
version = "0.0.8"
|
version = "0.0.0"
|
||||||
description = "This is an internal component crate of uv"
|
description = "uv Micro-benchmarks"
|
||||||
publish = false
|
publish = false
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
||||||
|
|
@ -22,14 +23,14 @@ name = "uv"
|
||||||
path = "benches/uv.rs"
|
path = "benches/uv.rs"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dependencies]
|
||||||
uv-cache = { workspace = true }
|
uv-cache = { workspace = true }
|
||||||
uv-client = { workspace = true }
|
uv-client = { workspace = true }
|
||||||
uv-configuration = { workspace = true }
|
uv-configuration = { workspace = true }
|
||||||
uv-dispatch = { workspace = true }
|
uv-dispatch = { workspace = true }
|
||||||
uv-distribution = { workspace = true }
|
uv-distribution = { workspace = true }
|
||||||
uv-distribution-types = { workspace = true }
|
uv-distribution-types = { workspace = true }
|
||||||
uv-extract = { workspace = true }
|
uv-extract = { workspace = true, optional = true }
|
||||||
uv-install-wheel = { workspace = true }
|
uv-install-wheel = { workspace = true }
|
||||||
uv-pep440 = { workspace = true }
|
uv-pep440 = { workspace = true }
|
||||||
uv-pep508 = { workspace = true }
|
uv-pep508 = { workspace = true }
|
||||||
|
|
@ -42,7 +43,10 @@ uv-types = { workspace = true }
|
||||||
uv-workspace = { workspace = true }
|
uv-workspace = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
criterion = { version = "4.0.3", default-features = false, package = "codspeed-criterion-compat", features = ["async_tokio"] }
|
codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true }
|
||||||
|
criterion = { version = "0.7.0", default-features = false, features = [
|
||||||
|
"async_tokio",
|
||||||
|
] }
|
||||||
jiff = { workspace = true }
|
jiff = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
|
||||||
|
|
@ -50,4 +54,5 @@ tokio = { workspace = true }
|
||||||
ignored = ["uv-extract"]
|
ignored = ["uv-extract"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
codspeed = ["codspeed-criterion-compat"]
|
||||||
static = ["uv-extract/static"]
|
static = ["uv-extract/static"]
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-bench
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bench).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use std::hint::black_box;
|
use std::hint::black_box;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::{BaseClientBuilder, RegistryClientBuilder};
|
use uv_client::{BaseClientBuilder, RegistryClientBuilder};
|
||||||
use uv_distribution_types::Requirement;
|
use uv_distribution_types::Requirement;
|
||||||
|
|
@ -59,10 +59,7 @@ fn setup(manifest: Manifest) -> impl Fn(bool) {
|
||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let cache = Cache::from_path("../../.cache")
|
let cache = Cache::from_path("../../.cache").init().unwrap();
|
||||||
.init_no_wait()
|
|
||||||
.expect("No cache contention when running benchmarks")
|
|
||||||
.unwrap();
|
|
||||||
let interpreter = PythonEnvironment::from_root("../../.venv", &cache)
|
let interpreter = PythonEnvironment::from_root("../../.venv", &cache)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_interpreter();
|
.into_interpreter();
|
||||||
|
|
@ -134,7 +131,7 @@ mod resolver {
|
||||||
);
|
);
|
||||||
|
|
||||||
static TAGS: LazyLock<Tags> = LazyLock::new(|| {
|
static TAGS: LazyLock<Tags> = LazyLock::new(|| {
|
||||||
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false, false).unwrap()
|
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false).unwrap()
|
||||||
});
|
});
|
||||||
|
|
||||||
pub(crate) async fn resolve(
|
pub(crate) async fn resolve(
|
||||||
|
|
|
||||||
|
|
@ -1 +1,10 @@
|
||||||
|
pub mod criterion {
|
||||||
|
//! This module re-exports the criterion API but picks the right backend depending on whether
|
||||||
|
//! the benchmarks are built to run locally or with codspeed
|
||||||
|
|
||||||
|
#[cfg(not(feature = "codspeed"))]
|
||||||
|
pub use criterion::*;
|
||||||
|
|
||||||
|
#[cfg(feature = "codspeed")]
|
||||||
|
pub use codspeed_criterion_compat::*;
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,14 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-bin-install"
|
name = "uv-bin-install"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
description = "Binary download and installation utilities for uv"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
@ -22,8 +23,6 @@ uv-distribution-filename = { workspace = true }
|
||||||
uv-extract = { workspace = true }
|
uv-extract = { workspace = true }
|
||||||
uv-pep440 = { workspace = true }
|
uv-pep440 = { workspace = true }
|
||||||
uv-platform = { workspace = true }
|
uv-platform = { workspace = true }
|
||||||
uv-redacted = { workspace = true }
|
|
||||||
|
|
||||||
fs-err = { workspace = true, features = ["tokio"] }
|
fs-err = { workspace = true, features = ["tokio"] }
|
||||||
futures = { workspace = true }
|
futures = { workspace = true }
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
|
|
@ -35,3 +34,4 @@ tokio = { workspace = true }
|
||||||
tokio-util = { workspace = true }
|
tokio-util = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-bin-install
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bin-install).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -19,12 +19,11 @@ use tracing::debug;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uv_distribution_filename::SourceDistExtension;
|
use uv_distribution_filename::SourceDistExtension;
|
||||||
|
|
||||||
use uv_cache::{Cache, CacheBucket, CacheEntry, Error as CacheError};
|
use uv_cache::{Cache, CacheBucket, CacheEntry};
|
||||||
use uv_client::{BaseClient, is_transient_network_error};
|
use uv_client::{BaseClient, is_transient_network_error};
|
||||||
use uv_extract::{Error as ExtractError, stream};
|
use uv_extract::{Error as ExtractError, stream};
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
use uv_platform::Platform;
|
use uv_platform::Platform;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
|
|
||||||
/// Binary tools that can be installed.
|
/// Binary tools that can be installed.
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
|
@ -135,13 +134,10 @@ pub enum Error {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Io(#[from] std::io::Error),
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Cache(#[from] CacheError),
|
|
||||||
|
|
||||||
#[error("Failed to detect platform")]
|
#[error("Failed to detect platform")]
|
||||||
Platform(#[from] uv_platform::Error),
|
Platform(#[from] uv_platform::Error),
|
||||||
|
|
||||||
#[error("Attempt failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
#[error("Attempt failed after {retries} retries")]
|
||||||
RetriedError {
|
RetriedError {
|
||||||
#[source]
|
#[source]
|
||||||
err: Box<Error>,
|
err: Box<Error>,
|
||||||
|
|
@ -315,7 +311,7 @@ async fn download_and_unpack(
|
||||||
let temp_dir = tempfile::tempdir_in(cache.bucket(CacheBucket::Binaries))?;
|
let temp_dir = tempfile::tempdir_in(cache.bucket(CacheBucket::Binaries))?;
|
||||||
|
|
||||||
let response = client
|
let response = client
|
||||||
.for_host(&DisplaySafeUrl::from_url(download_url.clone()))
|
.for_host(&download_url.clone().into())
|
||||||
.get(download_url.clone())
|
.get(download_url.clone())
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-build-backend"
|
name = "uv-build-backend"
|
||||||
version = "0.0.8"
|
version = "0.1.0"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-build-backend
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-backend).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
use itertools::Itertools;
|
|
||||||
mod metadata;
|
mod metadata;
|
||||||
mod serde_verbatim;
|
mod serde_verbatim;
|
||||||
mod settings;
|
mod settings;
|
||||||
|
|
@ -8,10 +7,8 @@ mod wheel;
|
||||||
pub use metadata::{PyProjectToml, check_direct_build};
|
pub use metadata::{PyProjectToml, check_direct_build};
|
||||||
pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
||||||
pub use source_dist::{build_source_dist, list_source_dist};
|
pub use source_dist::{build_source_dist, list_source_dist};
|
||||||
use uv_warnings::warn_user_once;
|
|
||||||
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
||||||
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
@ -32,9 +29,9 @@ use crate::settings::ModuleName;
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Io(#[from] io::Error),
|
Io(#[from] io::Error),
|
||||||
#[error("Invalid metadata format in: {}", _0.user_display())]
|
#[error("Invalid pyproject.toml")]
|
||||||
Toml(PathBuf, #[source] toml::de::Error),
|
Toml(#[from] toml::de::Error),
|
||||||
#[error("Invalid project metadata")]
|
#[error("Invalid pyproject.toml")]
|
||||||
Validation(#[from] ValidationError),
|
Validation(#[from] ValidationError),
|
||||||
#[error("Invalid module name: {0}")]
|
#[error("Invalid module name: {0}")]
|
||||||
InvalidModuleName(String, #[source] IdentifierParseError),
|
InvalidModuleName(String, #[source] IdentifierParseError),
|
||||||
|
|
@ -194,60 +191,6 @@ fn check_metadata_directory(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the list of module names without names which would be included twice
|
|
||||||
///
|
|
||||||
/// In normal cases it should do nothing:
|
|
||||||
///
|
|
||||||
/// * `["aaa"] -> ["aaa"]`
|
|
||||||
/// * `["aaa", "bbb"] -> ["aaa", "bbb"]`
|
|
||||||
///
|
|
||||||
/// Duplicate elements are removed:
|
|
||||||
///
|
|
||||||
/// * `["aaa", "aaa"] -> ["aaa"]`
|
|
||||||
/// * `["bbb", "aaa", "bbb"] -> ["aaa", "bbb"]`
|
|
||||||
///
|
|
||||||
/// Names with more specific paths are removed in favour of more general paths:
|
|
||||||
///
|
|
||||||
/// * `["aaa.foo", "aaa"] -> ["aaa"]`
|
|
||||||
/// * `["bbb", "aaa", "bbb.foo", "ccc.foo", "ccc.foo.bar", "aaa"] -> ["aaa", "bbb.foo", "ccc.foo"]`
|
|
||||||
///
|
|
||||||
/// This does not preserve the order of the elements.
|
|
||||||
fn prune_redundant_modules(mut names: Vec<String>) -> Vec<String> {
|
|
||||||
names.sort();
|
|
||||||
let mut pruned = Vec::with_capacity(names.len());
|
|
||||||
for name in names {
|
|
||||||
if let Some(last) = pruned.last() {
|
|
||||||
if name == *last {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// This is a more specific (narrow) module name than what came before
|
|
||||||
if name
|
|
||||||
.strip_prefix(last)
|
|
||||||
.is_some_and(|suffix| suffix.starts_with('.'))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pruned.push(name);
|
|
||||||
}
|
|
||||||
pruned
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Wraps [`prune_redundant_modules`] with a conditional warning when modules are ignored
|
|
||||||
fn prune_redundant_modules_warn(names: &[String], show_warnings: bool) -> Vec<String> {
|
|
||||||
let pruned = prune_redundant_modules(names.to_vec());
|
|
||||||
if show_warnings && names.len() != pruned.len() {
|
|
||||||
let mut pruned: HashSet<_> = pruned.iter().collect();
|
|
||||||
let ignored: Vec<_> = names.iter().filter(|name| !pruned.remove(name)).collect();
|
|
||||||
let s = if ignored.len() == 1 { "" } else { "s" };
|
|
||||||
warn_user_once!(
|
|
||||||
"Ignoring redundant module name{s} in `tool.uv.build-backend.module-name`: `{}`",
|
|
||||||
ignored.into_iter().join("`, `")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
pruned
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
||||||
/// checking the project layout and names.
|
/// checking the project layout and names.
|
||||||
///
|
///
|
||||||
|
|
@ -270,7 +213,6 @@ fn find_roots(
|
||||||
relative_module_root: &Path,
|
relative_module_root: &Path,
|
||||||
module_name: Option<&ModuleName>,
|
module_name: Option<&ModuleName>,
|
||||||
namespace: bool,
|
namespace: bool,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
||||||
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
||||||
// Check that even if a path contains `..`, we only include files below the module root.
|
// Check that even if a path contains `..`, we only include files below the module root.
|
||||||
|
|
@ -289,8 +231,8 @@ fn find_roots(
|
||||||
ModuleName::Name(name) => {
|
ModuleName::Name(name) => {
|
||||||
vec![name.split('.').collect::<PathBuf>()]
|
vec![name.split('.').collect::<PathBuf>()]
|
||||||
}
|
}
|
||||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
ModuleName::Names(names) => names
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|name| name.split('.').collect::<PathBuf>())
|
.map(|name| name.split('.').collect::<PathBuf>())
|
||||||
.collect(),
|
.collect(),
|
||||||
}
|
}
|
||||||
|
|
@ -308,9 +250,9 @@ fn find_roots(
|
||||||
let modules_relative = if let Some(module_name) = module_name {
|
let modules_relative = if let Some(module_name) = module_name {
|
||||||
match module_name {
|
match module_name {
|
||||||
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
||||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
ModuleName::Names(names) => names
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|name| module_path_from_module_name(&src_root, &name))
|
.map(|name| module_path_from_module_name(&src_root, name))
|
||||||
.collect::<Result<_, _>>()?,
|
.collect::<Result<_, _>>()?,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -478,20 +420,19 @@ mod tests {
|
||||||
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
||||||
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
||||||
// latest and remove it since it has the same filename as the indirect wheel.
|
// latest and remove it since it has the same filename as the indirect wheel.
|
||||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||||
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION, false)?;
|
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION)?;
|
||||||
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
||||||
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
||||||
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
||||||
fs_err::remove_file(&direct_wheel_path)?;
|
fs_err::remove_file(&direct_wheel_path)?;
|
||||||
|
|
||||||
// Build a source distribution.
|
// Build a source distribution.
|
||||||
let (_name, source_dist_list_files) =
|
let (_name, source_dist_list_files) = list_source_dist(source_root, MOCK_UV_VERSION)?;
|
||||||
list_source_dist(source_root, MOCK_UV_VERSION, false)?;
|
|
||||||
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
||||||
// normalize the path.
|
// normalize the path.
|
||||||
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||||
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION, false)?;
|
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION)?;
|
||||||
let source_dist_path = dist.join(source_dist_filename.to_string());
|
let source_dist_path = dist.join(source_dist_filename.to_string());
|
||||||
let source_dist_contents = sdist_contents(&source_dist_path);
|
let source_dist_contents = sdist_contents(&source_dist_path);
|
||||||
|
|
||||||
|
|
@ -505,13 +446,7 @@ mod tests {
|
||||||
source_dist_filename.name.as_dist_info_name(),
|
source_dist_filename.name.as_dist_info_name(),
|
||||||
source_dist_filename.version
|
source_dist_filename.version
|
||||||
));
|
));
|
||||||
let wheel_filename = build_wheel(
|
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, MOCK_UV_VERSION)?;
|
||||||
&sdist_top_level_directory,
|
|
||||||
dist,
|
|
||||||
None,
|
|
||||||
MOCK_UV_VERSION,
|
|
||||||
false,
|
|
||||||
)?;
|
|
||||||
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
||||||
|
|
||||||
// Check that direct and indirect wheels are identical.
|
// Check that direct and indirect wheels are identical.
|
||||||
|
|
@ -599,7 +534,7 @@ mod tests {
|
||||||
/// platform-independent deterministic builds.
|
/// platform-independent deterministic builds.
|
||||||
#[test]
|
#[test]
|
||||||
fn built_by_uv_building() {
|
fn built_by_uv_building() {
|
||||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||||
let src = TempDir::new().unwrap();
|
let src = TempDir::new().unwrap();
|
||||||
for dir in [
|
for dir in [
|
||||||
"src",
|
"src",
|
||||||
|
|
@ -662,7 +597,7 @@ mod tests {
|
||||||
// Check that the source dist is reproducible across platforms.
|
// Check that the source dist is reproducible across platforms.
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
||||||
@"bb74bff575b135bb39e5c9bce56349441fb0923bb8857e32a5eaf34ec1843967"
|
@"871d1f859140721b67cbeaca074e7a2740c88c38028d0509eba87d1285f1da9e"
|
||||||
);
|
);
|
||||||
// Check both the files we report and the actual files
|
// Check both the files we report and the actual files
|
||||||
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
||||||
|
|
@ -821,7 +756,7 @@ mod tests {
|
||||||
|
|
||||||
// Build a wheel from a source distribution
|
// Build a wheel from a source distribution
|
||||||
let output_dir = TempDir::new().unwrap();
|
let output_dir = TempDir::new().unwrap();
|
||||||
build_source_dist(src.path(), output_dir.path(), "0.5.15", false).unwrap();
|
build_source_dist(src.path(), output_dir.path(), "0.5.15").unwrap();
|
||||||
let sdist_tree = TempDir::new().unwrap();
|
let sdist_tree = TempDir::new().unwrap();
|
||||||
let source_dist_path = output_dir.path().join("pep_pep639_license-1.0.0.tar.gz");
|
let source_dist_path = output_dir.path().join("pep_pep639_license-1.0.0.tar.gz");
|
||||||
let sdist_reader = BufReader::new(File::open(&source_dist_path).unwrap());
|
let sdist_reader = BufReader::new(File::open(&source_dist_path).unwrap());
|
||||||
|
|
@ -832,7 +767,6 @@ mod tests {
|
||||||
output_dir.path(),
|
output_dir.path(),
|
||||||
None,
|
None,
|
||||||
"0.5.15",
|
"0.5.15",
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let wheel = output_dir
|
let wheel = output_dir
|
||||||
|
|
@ -897,7 +831,6 @@ mod tests {
|
||||||
output_dir.path(),
|
output_dir.path(),
|
||||||
Some(&metadata_dir.path().join(&dist_info_dir)),
|
Some(&metadata_dir.path().join(&dist_info_dir)),
|
||||||
"0.5.15",
|
"0.5.15",
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let wheel = output_dir
|
let wheel = output_dir
|
||||||
|
|
@ -1481,114 +1414,4 @@ mod tests {
|
||||||
simple_namespace_part-1.0.0.dist-info/WHEEL
|
simple_namespace_part-1.0.0.dist-info/WHEEL
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `prune_redundant_modules` should remove modules which are already
|
|
||||||
/// included (either directly or via their parent)
|
|
||||||
#[test]
|
|
||||||
fn test_prune_redundant_modules() {
|
|
||||||
fn check(input: &[&str], expect: &[&str]) {
|
|
||||||
let input = input.iter().map(|s| (*s).to_string()).collect();
|
|
||||||
let expect: Vec<_> = expect.iter().map(|s| (*s).to_string()).collect();
|
|
||||||
assert_eq!(prune_redundant_modules(input), expect);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Basic cases
|
|
||||||
check(&[], &[]);
|
|
||||||
check(&["foo"], &["foo"]);
|
|
||||||
check(&["foo", "bar"], &["bar", "foo"]);
|
|
||||||
|
|
||||||
// Deshadowing
|
|
||||||
check(&["foo", "foo.bar"], &["foo"]);
|
|
||||||
check(&["foo.bar", "foo"], &["foo"]);
|
|
||||||
check(
|
|
||||||
&["foo.bar.a", "foo.bar.b", "foo.bar", "foo", "foo.bar.a.c"],
|
|
||||||
&["foo"],
|
|
||||||
);
|
|
||||||
check(
|
|
||||||
&["bar.one", "bar.two", "baz", "bar", "baz.one"],
|
|
||||||
&["bar", "baz"],
|
|
||||||
);
|
|
||||||
|
|
||||||
// Potential false positives
|
|
||||||
check(&["foo", "foobar"], &["foo", "foobar"]);
|
|
||||||
check(
|
|
||||||
&["foo", "foobar", "foo.bar", "foobar.baz"],
|
|
||||||
&["foo", "foobar"],
|
|
||||||
);
|
|
||||||
check(&["foo.bar", "foo.baz"], &["foo.bar", "foo.baz"]);
|
|
||||||
check(&["foo", "foo", "foo.bar", "foo.bar"], &["foo"]);
|
|
||||||
|
|
||||||
// Everything
|
|
||||||
check(
|
|
||||||
&[
|
|
||||||
"foo.inner",
|
|
||||||
"foo.inner.deeper",
|
|
||||||
"foo",
|
|
||||||
"bar",
|
|
||||||
"bar.sub",
|
|
||||||
"bar.sub.deep",
|
|
||||||
"foobar",
|
|
||||||
"baz.baz.bar",
|
|
||||||
"baz.baz",
|
|
||||||
"qux",
|
|
||||||
],
|
|
||||||
&["bar", "baz.baz", "foo", "foobar", "qux"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A package with duplicate module names.
|
|
||||||
#[test]
|
|
||||||
fn duplicate_module_names() {
|
|
||||||
let src = TempDir::new().unwrap();
|
|
||||||
let pyproject_toml = indoc! {r#"
|
|
||||||
[project]
|
|
||||||
name = "duplicate"
|
|
||||||
version = "1.0.0"
|
|
||||||
|
|
||||||
[tool.uv.build-backend]
|
|
||||||
module-name = ["foo", "foo", "bar.baz", "bar.baz.submodule"]
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
|
||||||
build-backend = "uv_build"
|
|
||||||
"#
|
|
||||||
};
|
|
||||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
|
||||||
fs_err::create_dir_all(src.path().join("src").join("foo")).unwrap();
|
|
||||||
File::create(src.path().join("src").join("foo").join("__init__.py")).unwrap();
|
|
||||||
fs_err::create_dir_all(src.path().join("src").join("bar").join("baz")).unwrap();
|
|
||||||
File::create(
|
|
||||||
src.path()
|
|
||||||
.join("src")
|
|
||||||
.join("bar")
|
|
||||||
.join("baz")
|
|
||||||
.join("__init__.py"),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let dist = TempDir::new().unwrap();
|
|
||||||
let build = build(src.path(), dist.path()).unwrap();
|
|
||||||
assert_snapshot!(build.source_dist_contents.join("\n"), @r"
|
|
||||||
duplicate-1.0.0/
|
|
||||||
duplicate-1.0.0/PKG-INFO
|
|
||||||
duplicate-1.0.0/pyproject.toml
|
|
||||||
duplicate-1.0.0/src
|
|
||||||
duplicate-1.0.0/src/bar
|
|
||||||
duplicate-1.0.0/src/bar/baz
|
|
||||||
duplicate-1.0.0/src/bar/baz/__init__.py
|
|
||||||
duplicate-1.0.0/src/foo
|
|
||||||
duplicate-1.0.0/src/foo/__init__.py
|
|
||||||
");
|
|
||||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
|
||||||
bar/
|
|
||||||
bar/baz/
|
|
||||||
bar/baz/__init__.py
|
|
||||||
duplicate-1.0.0.dist-info/
|
|
||||||
duplicate-1.0.0.dist-info/METADATA
|
|
||||||
duplicate-1.0.0.dist-info/RECORD
|
|
||||||
duplicate-1.0.0.dist-info/WHEEL
|
|
||||||
foo/
|
|
||||||
foo/__init__.py
|
|
||||||
");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,10 +3,10 @@ use std::ffi::OsStr;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::{self, FromStr};
|
use std::str::FromStr;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::Deserialize;
|
||||||
use tracing::{debug, trace, warn};
|
use tracing::{debug, trace, warn};
|
||||||
use version_ranges::Ranges;
|
use version_ranges::Ranges;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
@ -60,20 +60,10 @@ pub enum ValidationError {
|
||||||
ReservedGuiScripts,
|
ReservedGuiScripts,
|
||||||
#[error("`project.license` is not a valid SPDX expression: {0}")]
|
#[error("`project.license` is not a valid SPDX expression: {0}")]
|
||||||
InvalidSpdx(String, #[source] spdx::error::ParseError),
|
InvalidSpdx(String, #[source] spdx::error::ParseError),
|
||||||
#[error("`{field}` glob `{glob}` did not match any files")]
|
|
||||||
LicenseGlobNoMatches { field: String, glob: String },
|
|
||||||
#[error("License file `{}` must be UTF-8 encoded", _0)]
|
|
||||||
LicenseFileNotUtf8(String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the build backend is matching the currently running uv version.
|
/// Check if the build backend is matching the currently running uv version.
|
||||||
pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
||||||
#[derive(Deserialize)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
struct PyProjectToml {
|
|
||||||
build_system: BuildSystem,
|
|
||||||
}
|
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml =
|
let pyproject_toml: PyProjectToml =
|
||||||
match fs_err::read_to_string(source_tree.join("pyproject.toml"))
|
match fs_err::read_to_string(source_tree.join("pyproject.toml"))
|
||||||
.map_err(|err| err.to_string())
|
.map_err(|err| err.to_string())
|
||||||
|
|
@ -83,14 +73,12 @@ pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
||||||
Ok(pyproject_toml) => pyproject_toml,
|
Ok(pyproject_toml) => pyproject_toml,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
debug!(
|
debug!(
|
||||||
"Not using uv build backend direct build for source tree `{name}`, \
|
"Not using uv build backend direct build of {name}, no pyproject.toml: {err}"
|
||||||
failed to parse pyproject.toml: {err}"
|
|
||||||
);
|
);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match pyproject_toml
|
match pyproject_toml
|
||||||
.build_system
|
|
||||||
.check_build_system(uv_version::version())
|
.check_build_system(uv_version::version())
|
||||||
.as_slice()
|
.as_slice()
|
||||||
{
|
{
|
||||||
|
|
@ -99,36 +87,16 @@ pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
||||||
// Any warning -> no match
|
// Any warning -> no match
|
||||||
[first, others @ ..] => {
|
[first, others @ ..] => {
|
||||||
debug!(
|
debug!(
|
||||||
"Not using uv build backend direct build of `{name}`, pyproject.toml does not match: {first}"
|
"Not using uv build backend direct build of {name}, pyproject.toml does not match: {first}"
|
||||||
);
|
);
|
||||||
for other in others {
|
for other in others {
|
||||||
trace!("Further uv build backend direct build of `{name}` mismatch: {other}");
|
trace!("Further uv build backend direct build of {name} mismatch: {other}");
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A package name as provided in a `pyproject.toml`.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
struct VerbatimPackageName {
|
|
||||||
/// The package name as given in the `pyproject.toml`.
|
|
||||||
given: String,
|
|
||||||
/// The normalized package name.
|
|
||||||
normalized: PackageName,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for VerbatimPackageName {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let given = String::deserialize(deserializer)?;
|
|
||||||
let normalized = PackageName::from_str(&given).map_err(serde::de::Error::custom)?;
|
|
||||||
Ok(Self { given, normalized })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A `pyproject.toml` as specified in PEP 517.
|
/// A `pyproject.toml` as specified in PEP 517.
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
#[serde(
|
#[serde(
|
||||||
|
|
@ -147,18 +115,15 @@ pub struct PyProjectToml {
|
||||||
|
|
||||||
impl PyProjectToml {
|
impl PyProjectToml {
|
||||||
pub(crate) fn name(&self) -> &PackageName {
|
pub(crate) fn name(&self) -> &PackageName {
|
||||||
&self.project.name.normalized
|
&self.project.name
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn version(&self) -> &Version {
|
pub(crate) fn version(&self) -> &Version {
|
||||||
&self.project.version
|
&self.project.version
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse(path: &Path) -> Result<Self, Error> {
|
pub(crate) fn parse(contents: &str) -> Result<Self, Error> {
|
||||||
let contents = fs_err::read_to_string(path)?;
|
Ok(toml::from_str(contents)?)
|
||||||
let pyproject_toml =
|
|
||||||
toml::from_str(&contents).map_err(|err| Error::Toml(path.to_path_buf(), err))?;
|
|
||||||
Ok(pyproject_toml)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn readme(&self) -> Option<&Readme> {
|
pub(crate) fn readme(&self) -> Option<&Readme> {
|
||||||
|
|
@ -196,9 +161,83 @@ impl PyProjectToml {
|
||||||
self.tool.as_ref()?.uv.as_ref()?.build_backend.as_ref()
|
self.tool.as_ref()?.uv.as_ref()?.build_backend.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [`BuildSystem::check_build_system`].
|
/// Returns user-facing warnings if the `[build-system]` table looks suspicious.
|
||||||
|
///
|
||||||
|
/// Example of a valid table:
|
||||||
|
///
|
||||||
|
/// ```toml
|
||||||
|
/// [build-system]
|
||||||
|
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||||
|
/// build-backend = "uv_build"
|
||||||
|
/// ```
|
||||||
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
||||||
self.build_system.check_build_system(uv_version)
|
let mut warnings = Vec::new();
|
||||||
|
if self.build_system.build_backend.as_deref() != Some("uv_build") {
|
||||||
|
warnings.push(format!(
|
||||||
|
r#"The value for `build_system.build-backend` should be `"uv_build"`, not `"{}"`"#,
|
||||||
|
self.build_system.build_backend.clone().unwrap_or_default()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let uv_version =
|
||||||
|
Version::from_str(uv_version).expect("uv's own version is not PEP 440 compliant");
|
||||||
|
let next_minor = uv_version.release().get(1).copied().unwrap_or_default() + 1;
|
||||||
|
let next_breaking = Version::new([0, next_minor]);
|
||||||
|
|
||||||
|
let expected = || {
|
||||||
|
format!(
|
||||||
|
"Expected a single uv requirement in `build-system.requires`, found `{}`",
|
||||||
|
toml::to_string(&self.build_system.requires).unwrap_or_default()
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let [uv_requirement] = &self.build_system.requires.as_slice() else {
|
||||||
|
warnings.push(expected());
|
||||||
|
return warnings;
|
||||||
|
};
|
||||||
|
if uv_requirement.name.as_str() != "uv-build" {
|
||||||
|
warnings.push(expected());
|
||||||
|
return warnings;
|
||||||
|
}
|
||||||
|
let bounded = match &uv_requirement.version_or_url {
|
||||||
|
None => false,
|
||||||
|
Some(VersionOrUrl::Url(_)) => {
|
||||||
|
// We can't validate the url
|
||||||
|
true
|
||||||
|
}
|
||||||
|
Some(VersionOrUrl::VersionSpecifier(specifier)) => {
|
||||||
|
// We don't check how wide the range is (that's up to the user), we just
|
||||||
|
// check that the current version is compliant, to avoid accidentally using a
|
||||||
|
// too new or too old uv, and we check that an upper bound exists. The latter
|
||||||
|
// is very important to allow making breaking changes in uv without breaking
|
||||||
|
// the existing immutable source distributions on pypi.
|
||||||
|
if !specifier.contains(&uv_version) {
|
||||||
|
// This is allowed to happen when testing prereleases, but we should still warn.
|
||||||
|
warnings.push(format!(
|
||||||
|
r#"`build_system.requires = ["{uv_requirement}"]` does not contain the
|
||||||
|
current uv version {uv_version}"#,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ranges::from(specifier.clone())
|
||||||
|
.bounding_range()
|
||||||
|
.map(|bounding_range| bounding_range.1 != Bound::Unbounded)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !bounded {
|
||||||
|
warnings.push(format!(
|
||||||
|
"`build_system.requires = [\"{}\"]` is missing an \
|
||||||
|
upper bound on the `uv_build` version such as `<{next_breaking}`. \
|
||||||
|
Without bounding the `uv_build` version, the source distribution will break \
|
||||||
|
when a future, breaking version of `uv_build` is released.",
|
||||||
|
// Use an underscore consistently, to avoid confusing users between a package name with dash and a
|
||||||
|
// module name with underscore
|
||||||
|
uv_requirement.verbatim()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate and convert a `pyproject.toml` to core metadata.
|
/// Validate and convert a `pyproject.toml` to core metadata.
|
||||||
|
|
@ -346,7 +385,99 @@ impl PyProjectToml {
|
||||||
"2.3"
|
"2.3"
|
||||||
};
|
};
|
||||||
|
|
||||||
let (license, license_expression, license_files) = self.license_metadata(root)?;
|
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
||||||
|
let (license, license_expression, license_files) =
|
||||||
|
if let Some(license_globs) = &self.project.license_files {
|
||||||
|
let license_expression = match &self.project.license {
|
||||||
|
None => None,
|
||||||
|
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
||||||
|
Some(License::Text { .. } | License::File { .. }) => {
|
||||||
|
return Err(ValidationError::MixedLicenseGenerations.into());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut license_files = Vec::new();
|
||||||
|
let mut license_globs_parsed = Vec::new();
|
||||||
|
for license_glob in license_globs {
|
||||||
|
let pep639_glob =
|
||||||
|
PortableGlobParser::Pep639
|
||||||
|
.parse(license_glob)
|
||||||
|
.map_err(|err| Error::PortableGlob {
|
||||||
|
field: license_glob.to_string(),
|
||||||
|
source: err,
|
||||||
|
})?;
|
||||||
|
license_globs_parsed.push(pep639_glob);
|
||||||
|
}
|
||||||
|
let license_globs =
|
||||||
|
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
||||||
|
Error::GlobSetTooLarge {
|
||||||
|
field: "tool.uv.build-backend.source-include".to_string(),
|
||||||
|
source: err,
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
for entry in WalkDir::new(root)
|
||||||
|
.sort_by_file_name()
|
||||||
|
.into_iter()
|
||||||
|
.filter_entry(|entry| {
|
||||||
|
license_globs.match_directory(
|
||||||
|
entry
|
||||||
|
.path()
|
||||||
|
.strip_prefix(root)
|
||||||
|
.expect("walkdir starts with root"),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
{
|
||||||
|
let entry = entry.map_err(|err| Error::WalkDir {
|
||||||
|
root: root.to_path_buf(),
|
||||||
|
err,
|
||||||
|
})?;
|
||||||
|
let relative = entry
|
||||||
|
.path()
|
||||||
|
.strip_prefix(root)
|
||||||
|
.expect("walkdir starts with root");
|
||||||
|
if !license_globs.match_path(relative) {
|
||||||
|
trace!("Not a license files match: {}", relative.user_display());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if !entry.file_type().is_file() {
|
||||||
|
trace!(
|
||||||
|
"Not a file in license files match: {}",
|
||||||
|
relative.user_display()
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
error_on_venv(entry.file_name(), entry.path())?;
|
||||||
|
|
||||||
|
debug!("License files match: {}", relative.user_display());
|
||||||
|
license_files.push(relative.portable_display().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// The glob order may be unstable
|
||||||
|
license_files.sort();
|
||||||
|
|
||||||
|
(None, license_expression, license_files)
|
||||||
|
} else {
|
||||||
|
match &self.project.license {
|
||||||
|
None => (None, None, Vec::new()),
|
||||||
|
Some(License::Spdx(license_expression)) => {
|
||||||
|
(None, Some(license_expression.clone()), Vec::new())
|
||||||
|
}
|
||||||
|
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
||||||
|
Some(License::File { file }) => {
|
||||||
|
let text = fs_err::read_to_string(root.join(file))?;
|
||||||
|
(Some(text), None, Vec::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check that the license expression is a valid SPDX identifier.
|
||||||
|
if let Some(license_expression) = &license_expression {
|
||||||
|
if let Err(err) = spdx::Expression::parse(license_expression) {
|
||||||
|
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(konsti): https://peps.python.org/pep-0753/#label-normalization (Draft)
|
// TODO(konsti): https://peps.python.org/pep-0753/#label-normalization (Draft)
|
||||||
let project_urls = self
|
let project_urls = self
|
||||||
|
|
@ -391,7 +522,7 @@ impl PyProjectToml {
|
||||||
|
|
||||||
Ok(Metadata23 {
|
Ok(Metadata23 {
|
||||||
metadata_version: metadata_version.to_string(),
|
metadata_version: metadata_version.to_string(),
|
||||||
name: self.project.name.given.clone(),
|
name: self.project.name.to_string(),
|
||||||
version: self.project.version.to_string(),
|
version: self.project.version.to_string(),
|
||||||
// Not supported.
|
// Not supported.
|
||||||
platforms: vec![],
|
platforms: vec![],
|
||||||
|
|
@ -416,7 +547,7 @@ impl PyProjectToml {
|
||||||
license_files,
|
license_files,
|
||||||
classifiers: self.project.classifiers.clone().unwrap_or_default(),
|
classifiers: self.project.classifiers.clone().unwrap_or_default(),
|
||||||
requires_dist: requires_dist.iter().map(ToString::to_string).collect(),
|
requires_dist: requires_dist.iter().map(ToString::to_string).collect(),
|
||||||
provides_extra: extras.iter().map(ToString::to_string).collect(),
|
provides_extras: extras.iter().map(ToString::to_string).collect(),
|
||||||
// Not commonly set.
|
// Not commonly set.
|
||||||
provides_dist: vec![],
|
provides_dist: vec![],
|
||||||
// Not supported.
|
// Not supported.
|
||||||
|
|
@ -433,156 +564,6 @@ impl PyProjectToml {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse and validate the old (PEP 621) and new (PEP 639) license files.
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
fn license_metadata(
|
|
||||||
&self,
|
|
||||||
root: &Path,
|
|
||||||
) -> Result<(Option<String>, Option<String>, Vec<String>), Error> {
|
|
||||||
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
|
||||||
let (license, license_expression, license_files) = if let Some(license_globs) =
|
|
||||||
&self.project.license_files
|
|
||||||
{
|
|
||||||
let license_expression = match &self.project.license {
|
|
||||||
None => None,
|
|
||||||
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
|
||||||
Some(License::Text { .. } | License::File { .. }) => {
|
|
||||||
return Err(ValidationError::MixedLicenseGenerations.into());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut license_files = Vec::new();
|
|
||||||
let mut license_globs_parsed = Vec::with_capacity(license_globs.len());
|
|
||||||
let mut license_glob_matchers = Vec::with_capacity(license_globs.len());
|
|
||||||
|
|
||||||
for license_glob in license_globs {
|
|
||||||
let pep639_glob =
|
|
||||||
PortableGlobParser::Pep639
|
|
||||||
.parse(license_glob)
|
|
||||||
.map_err(|err| Error::PortableGlob {
|
|
||||||
field: license_glob.to_owned(),
|
|
||||||
source: err,
|
|
||||||
})?;
|
|
||||||
license_glob_matchers.push(pep639_glob.compile_matcher());
|
|
||||||
license_globs_parsed.push(pep639_glob);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track whether each user-specified glob matched so we can flag the unmatched ones.
|
|
||||||
let mut license_globs_matched = vec![false; license_globs_parsed.len()];
|
|
||||||
|
|
||||||
let license_globs =
|
|
||||||
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
|
||||||
Error::GlobSetTooLarge {
|
|
||||||
field: "project.license-files".to_string(),
|
|
||||||
source: err,
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
for entry in WalkDir::new(root)
|
|
||||||
.sort_by_file_name()
|
|
||||||
.into_iter()
|
|
||||||
.filter_entry(|entry| {
|
|
||||||
license_globs.match_directory(
|
|
||||||
entry
|
|
||||||
.path()
|
|
||||||
.strip_prefix(root)
|
|
||||||
.expect("walkdir starts with root"),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
{
|
|
||||||
let entry = entry.map_err(|err| Error::WalkDir {
|
|
||||||
root: root.to_path_buf(),
|
|
||||||
err,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let relative = entry
|
|
||||||
.path()
|
|
||||||
.strip_prefix(root)
|
|
||||||
.expect("walkdir starts with root");
|
|
||||||
|
|
||||||
if !license_globs.match_path(relative) {
|
|
||||||
trace!("Not a license files match: {}", relative.user_display());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let file_type = entry.file_type();
|
|
||||||
|
|
||||||
if !(file_type.is_file() || file_type.is_symlink()) {
|
|
||||||
trace!(
|
|
||||||
"Not a file or symlink in license files match: {}",
|
|
||||||
relative.user_display()
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
error_on_venv(entry.file_name(), entry.path())?;
|
|
||||||
|
|
||||||
debug!("License files match: {}", relative.user_display());
|
|
||||||
|
|
||||||
for (matched, matcher) in license_globs_matched
|
|
||||||
.iter_mut()
|
|
||||||
.zip(license_glob_matchers.iter())
|
|
||||||
{
|
|
||||||
if *matched {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if matcher.is_match(relative) {
|
|
||||||
*matched = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
license_files.push(relative.portable_display().to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some((pattern, _)) = license_globs_parsed
|
|
||||||
.into_iter()
|
|
||||||
.zip(license_globs_matched)
|
|
||||||
.find(|(_, matched)| !matched)
|
|
||||||
{
|
|
||||||
return Err(ValidationError::LicenseGlobNoMatches {
|
|
||||||
field: "project.license-files".to_string(),
|
|
||||||
glob: pattern.to_string(),
|
|
||||||
}
|
|
||||||
.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
for license_file in &license_files {
|
|
||||||
let file_path = root.join(license_file);
|
|
||||||
let bytes = fs_err::read(&file_path)?;
|
|
||||||
if str::from_utf8(&bytes).is_err() {
|
|
||||||
return Err(ValidationError::LicenseFileNotUtf8(license_file.clone()).into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The glob order may be unstable
|
|
||||||
license_files.sort();
|
|
||||||
|
|
||||||
(None, license_expression, license_files)
|
|
||||||
} else {
|
|
||||||
match &self.project.license {
|
|
||||||
None => (None, None, Vec::new()),
|
|
||||||
Some(License::Spdx(license_expression)) => {
|
|
||||||
(None, Some(license_expression.clone()), Vec::new())
|
|
||||||
}
|
|
||||||
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
|
||||||
Some(License::File { file }) => {
|
|
||||||
let text = fs_err::read_to_string(root.join(file))?;
|
|
||||||
(Some(text), None, Vec::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check that the license expression is a valid SPDX identifier.
|
|
||||||
if let Some(license_expression) = &license_expression {
|
|
||||||
if let Err(err) = spdx::Expression::parse(license_expression) {
|
|
||||||
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((license, license_expression, license_files))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Validate and convert the entrypoints in `pyproject.toml`, including console and GUI scripts,
|
/// Validate and convert the entrypoints in `pyproject.toml`, including console and GUI scripts,
|
||||||
/// to an `entry_points.txt`.
|
/// to an `entry_points.txt`.
|
||||||
///
|
///
|
||||||
|
|
@ -664,7 +645,7 @@ impl PyProjectToml {
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
struct Project {
|
struct Project {
|
||||||
/// The name of the project.
|
/// The name of the project.
|
||||||
name: VerbatimPackageName,
|
name: PackageName,
|
||||||
/// The version of the project.
|
/// The version of the project.
|
||||||
version: Version,
|
version: Version,
|
||||||
/// The summary description of the project in one line.
|
/// The summary description of the project in one line.
|
||||||
|
|
@ -801,6 +782,18 @@ pub(crate) enum Contact {
|
||||||
Email { email: String },
|
Email { email: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The `[build-system]` section of a pyproject.toml as specified in PEP 517.
|
||||||
|
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
struct BuildSystem {
|
||||||
|
/// PEP 508 dependencies required to execute the build system.
|
||||||
|
requires: Vec<SerdeVerbatim<Requirement<VerbatimParsedUrl>>>,
|
||||||
|
/// A string naming a Python object that will be used to perform the build.
|
||||||
|
build_backend: Option<String>,
|
||||||
|
/// <https://peps.python.org/pep-0517/#in-tree-build-backends>
|
||||||
|
backend_path: Option<Vec<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
/// The `tool` section as specified in PEP 517.
|
/// The `tool` section as specified in PEP 517.
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
|
@ -817,100 +810,6 @@ pub(crate) struct ToolUv {
|
||||||
build_backend: Option<BuildBackendSettings>,
|
build_backend: Option<BuildBackendSettings>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The `[build-system]` section of a pyproject.toml as specified in PEP 517.
|
|
||||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
struct BuildSystem {
|
|
||||||
/// PEP 508 dependencies required to execute the build system.
|
|
||||||
requires: Vec<SerdeVerbatim<Requirement<VerbatimParsedUrl>>>,
|
|
||||||
/// A string naming a Python object that will be used to perform the build.
|
|
||||||
build_backend: Option<String>,
|
|
||||||
/// <https://peps.python.org/pep-0517/#in-tree-build-backends>
|
|
||||||
backend_path: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildSystem {
|
|
||||||
/// Check if the `[build-system]` table matches the uv build backend expectations and return
|
|
||||||
/// a list of warnings if it looks suspicious.
|
|
||||||
///
|
|
||||||
/// Example of a valid table:
|
|
||||||
///
|
|
||||||
/// ```toml
|
|
||||||
/// [build-system]
|
|
||||||
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
|
||||||
/// build-backend = "uv_build"
|
|
||||||
/// ```
|
|
||||||
pub(crate) fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
|
||||||
let mut warnings = Vec::new();
|
|
||||||
if self.build_backend.as_deref() != Some("uv_build") {
|
|
||||||
warnings.push(format!(
|
|
||||||
r#"The value for `build_system.build-backend` should be `"uv_build"`, not `"{}"`"#,
|
|
||||||
self.build_backend.clone().unwrap_or_default()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let uv_version =
|
|
||||||
Version::from_str(uv_version).expect("uv's own version is not PEP 440 compliant");
|
|
||||||
let next_minor = uv_version.release().get(1).copied().unwrap_or_default() + 1;
|
|
||||||
let next_breaking = Version::new([0, next_minor]);
|
|
||||||
|
|
||||||
let expected = || {
|
|
||||||
format!(
|
|
||||||
"Expected a single uv requirement in `build-system.requires`, found `{}`",
|
|
||||||
toml::to_string(&self.requires).unwrap_or_default()
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let [uv_requirement] = &self.requires.as_slice() else {
|
|
||||||
warnings.push(expected());
|
|
||||||
return warnings;
|
|
||||||
};
|
|
||||||
if uv_requirement.name.as_str() != "uv-build" {
|
|
||||||
warnings.push(expected());
|
|
||||||
return warnings;
|
|
||||||
}
|
|
||||||
let bounded = match &uv_requirement.version_or_url {
|
|
||||||
None => false,
|
|
||||||
Some(VersionOrUrl::Url(_)) => {
|
|
||||||
// We can't validate the url
|
|
||||||
true
|
|
||||||
}
|
|
||||||
Some(VersionOrUrl::VersionSpecifier(specifier)) => {
|
|
||||||
// We don't check how wide the range is (that's up to the user), we just
|
|
||||||
// check that the current version is compliant, to avoid accidentally using a
|
|
||||||
// too new or too old uv, and we check that an upper bound exists. The latter
|
|
||||||
// is very important to allow making breaking changes in uv without breaking
|
|
||||||
// the existing immutable source distributions on pypi.
|
|
||||||
if !specifier.contains(&uv_version) {
|
|
||||||
// This is allowed to happen when testing prereleases, but we should still warn.
|
|
||||||
warnings.push(format!(
|
|
||||||
r#"`build_system.requires = ["{uv_requirement}"]` does not contain the
|
|
||||||
current uv version {uv_version}"#,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Ranges::from(specifier.clone())
|
|
||||||
.bounding_range()
|
|
||||||
.map(|bounding_range| bounding_range.1 != Bound::Unbounded)
|
|
||||||
.unwrap_or(false)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if !bounded {
|
|
||||||
warnings.push(format!(
|
|
||||||
"`build_system.requires = [\"{}\"]` is missing an \
|
|
||||||
upper bound on the `uv_build` version such as `<{next_breaking}`. \
|
|
||||||
Without bounding the `uv_build` version, the source distribution will break \
|
|
||||||
when a future, breaking version of `uv_build` is released.",
|
|
||||||
// Use an underscore consistently, to avoid confusing users between a package name with dash and a
|
|
||||||
// module name with underscore
|
|
||||||
uv_requirement.verbatim()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
warnings
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
@ -941,28 +840,6 @@ mod tests {
|
||||||
formatted
|
formatted
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uppercase_package_name() {
|
|
||||||
let contents = r#"
|
|
||||||
[project]
|
|
||||||
name = "Hello-World"
|
|
||||||
version = "0.1.0"
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
|
||||||
build-backend = "uv_build"
|
|
||||||
"#;
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
|
||||||
let temp_dir = TempDir::new().unwrap();
|
|
||||||
|
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
|
||||||
Metadata-Version: 2.3
|
|
||||||
Name: Hello-World
|
|
||||||
Version: 0.1.0
|
|
||||||
");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn valid() {
|
fn valid() {
|
||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
|
@ -1037,7 +914,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
};
|
};
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||||
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||||
|
|
@ -1131,7 +1008,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
};
|
};
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||||
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||||
|
|
@ -1223,7 +1100,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
};
|
};
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||||
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||||
|
|
@ -1284,7 +1161,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn build_system_valid() {
|
fn build_system_valid() {
|
||||||
let contents = extend_project("");
|
let contents = extend_project("");
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(&contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@""
|
@""
|
||||||
|
|
@ -1302,7 +1179,7 @@ mod tests {
|
||||||
requires = ["uv_build"]
|
requires = ["uv_build"]
|
||||||
build-backend = "uv_build"
|
build-backend = "uv_build"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@r###"`build_system.requires = ["uv_build"]` is missing an upper bound on the `uv_build` version such as `<0.5`. Without bounding the `uv_build` version, the source distribution will break when a future, breaking version of `uv_build` is released."###
|
@r###"`build_system.requires = ["uv_build"]` is missing an upper bound on the `uv_build` version such as `<0.5`. Without bounding the `uv_build` version, the source distribution will break when a future, breaking version of `uv_build` is released."###
|
||||||
|
|
@ -1320,7 +1197,7 @@ mod tests {
|
||||||
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
||||||
build-backend = "uv_build"
|
build-backend = "uv_build"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||||
|
|
@ -1338,7 +1215,7 @@ mod tests {
|
||||||
requires = ["setuptools"]
|
requires = ["setuptools"]
|
||||||
build-backend = "uv_build"
|
build-backend = "uv_build"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||||
|
|
@ -1356,7 +1233,7 @@ mod tests {
|
||||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||||
build-backend = "setuptools"
|
build-backend = "setuptools"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@r###"The value for `build_system.build-backend` should be `"uv_build"`, not `"setuptools"`"###
|
@r###"The value for `build_system.build-backend` should be `"uv_build"`, not `"setuptools"`"###
|
||||||
|
|
@ -1367,7 +1244,7 @@ mod tests {
|
||||||
fn minimal() {
|
fn minimal() {
|
||||||
let contents = extend_project("");
|
let contents = extend_project("");
|
||||||
|
|
||||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
let metadata = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -1386,14 +1263,15 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents).unwrap_err();
|
let err = PyProjectToml::parse(&contents).unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r#"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
TOML parse error at line 4, column 10
|
Invalid pyproject.toml
|
||||||
|
Caused by: TOML parse error at line 4, column 10
|
||||||
|
|
|
|
||||||
4 | readme = { path = "Readme.md" }
|
4 | readme = { path = "Readme.md" }
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||||
data did not match any variant of untagged enum Readme
|
data did not match any variant of untagged enum Readme
|
||||||
"#);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1403,7 +1281,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
|
|
@ -1425,14 +1303,14 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: `project.description` must be a single line
|
Caused by: `project.description` must be a single line
|
||||||
");
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1443,14 +1321,14 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
||||||
");
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1459,7 +1337,7 @@ mod tests {
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
let metadata = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -1477,13 +1355,13 @@ mod tests {
|
||||||
license = "MIT XOR Apache-2"
|
license = "MIT XOR Apache-2"
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
// TODO(konsti): We mess up the indentation in the error.
|
// TODO(konsti): We mess up the indentation in the error.
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: `project.license` is not a valid SPDX expression: MIT XOR Apache-2
|
Caused by: `project.license` is not a valid SPDX expression: MIT XOR Apache-2
|
||||||
Caused by: MIT XOR Apache-2
|
Caused by: MIT XOR Apache-2
|
||||||
^^^ unknown term
|
^^^ unknown term
|
||||||
|
|
@ -1497,18 +1375,18 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: Dynamic metadata is not supported
|
Caused by: Dynamic metadata is not supported
|
||||||
");
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn script_error(contents: &str) -> String {
|
fn script_error(contents: &str) -> String {
|
||||||
let err = toml::from_str::<PyProjectToml>(contents)
|
let err = PyProjectToml::parse(contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_entry_points()
|
.to_entry_points()
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
|
|
|
||||||
|
|
@ -70,9 +70,6 @@ pub struct BuildBackendSettings {
|
||||||
pub default_excludes: bool,
|
pub default_excludes: bool,
|
||||||
|
|
||||||
/// Glob expressions which files and directories to exclude from the source distribution.
|
/// Glob expressions which files and directories to exclude from the source distribution.
|
||||||
///
|
|
||||||
/// These exclusions are also applied to wheels to ensure that a wheel built from a source tree
|
|
||||||
/// is consistent with a wheel built from a source distribution.
|
|
||||||
#[option(
|
#[option(
|
||||||
default = r#"[]"#,
|
default = r#"[]"#,
|
||||||
value_type = "list[str]",
|
value_type = "list[str]",
|
||||||
|
|
|
||||||
|
|
@ -24,9 +24,9 @@ pub fn build_source_dist(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
source_dist_directory: &Path,
|
source_dist_directory: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<SourceDistFilename, Error> {
|
) -> Result<SourceDistFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
let filename = SourceDistFilename {
|
let filename = SourceDistFilename {
|
||||||
name: pyproject_toml.name().clone(),
|
name: pyproject_toml.name().clone(),
|
||||||
version: pyproject_toml.version().clone(),
|
version: pyproject_toml.version().clone(),
|
||||||
|
|
@ -34,7 +34,7 @@ pub fn build_source_dist(
|
||||||
};
|
};
|
||||||
let source_dist_path = source_dist_directory.join(filename.to_string());
|
let source_dist_path = source_dist_directory.join(filename.to_string());
|
||||||
let writer = TarGzWriter::new(&source_dist_path)?;
|
let writer = TarGzWriter::new(&source_dist_path)?;
|
||||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
write_source_dist(source_tree, writer, uv_version)?;
|
||||||
Ok(filename)
|
Ok(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -42,9 +42,9 @@ pub fn build_source_dist(
|
||||||
pub fn list_source_dist(
|
pub fn list_source_dist(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(SourceDistFilename, FileList), Error> {
|
) -> Result<(SourceDistFilename, FileList), Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
let filename = SourceDistFilename {
|
let filename = SourceDistFilename {
|
||||||
name: pyproject_toml.name().clone(),
|
name: pyproject_toml.name().clone(),
|
||||||
version: pyproject_toml.version().clone(),
|
version: pyproject_toml.version().clone(),
|
||||||
|
|
@ -52,7 +52,7 @@ pub fn list_source_dist(
|
||||||
};
|
};
|
||||||
let mut files = FileList::new();
|
let mut files = FileList::new();
|
||||||
let writer = ListWriter::new(&mut files);
|
let writer = ListWriter::new(&mut files);
|
||||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
write_source_dist(source_tree, writer, uv_version)?;
|
||||||
Ok((filename, files))
|
Ok((filename, files))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -61,7 +61,6 @@ fn source_dist_matcher(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
pyproject_toml: &PyProjectToml,
|
pyproject_toml: &PyProjectToml,
|
||||||
settings: BuildBackendSettings,
|
settings: BuildBackendSettings,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(GlobDirFilter, GlobSet), Error> {
|
) -> Result<(GlobDirFilter, GlobSet), Error> {
|
||||||
// File and directories to include in the source directory
|
// File and directories to include in the source directory
|
||||||
let mut include_globs = Vec::new();
|
let mut include_globs = Vec::new();
|
||||||
|
|
@ -76,7 +75,6 @@ fn source_dist_matcher(
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_ref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
for module_relative in modules_relative {
|
for module_relative in modules_relative {
|
||||||
// The wheel must not include any files included by the source distribution (at least until we
|
// The wheel must not include any files included by the source distribution (at least until we
|
||||||
|
|
@ -184,9 +182,9 @@ fn write_source_dist(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
mut writer: impl DirectoryWriter,
|
mut writer: impl DirectoryWriter,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<SourceDistFilename, Error> {
|
) -> Result<SourceDistFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -220,7 +218,7 @@ fn write_source_dist(
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let (include_matcher, exclude_matcher) =
|
let (include_matcher, exclude_matcher) =
|
||||||
source_dist_matcher(source_tree, &pyproject_toml, settings, show_warnings)?;
|
source_dist_matcher(source_tree, &pyproject_toml, settings)?;
|
||||||
|
|
||||||
let mut files_visited = 0;
|
let mut files_visited = 0;
|
||||||
for entry in WalkDir::new(source_tree)
|
for entry in WalkDir::new(source_tree)
|
||||||
|
|
@ -299,10 +297,6 @@ impl TarGzWriter {
|
||||||
impl DirectoryWriter for TarGzWriter {
|
impl DirectoryWriter for TarGzWriter {
|
||||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||||
let mut header = Header::new_gnu();
|
let mut header = Header::new_gnu();
|
||||||
// Work around bug in Python's std tar module
|
|
||||||
// https://github.com/python/cpython/issues/141707
|
|
||||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
|
||||||
header.set_entry_type(EntryType::Regular);
|
|
||||||
header.set_size(bytes.len() as u64);
|
header.set_size(bytes.len() as u64);
|
||||||
// Reasonable default to avoid 0o000 permissions, the user's umask will be applied on
|
// Reasonable default to avoid 0o000 permissions, the user's umask will be applied on
|
||||||
// unpacking.
|
// unpacking.
|
||||||
|
|
@ -316,10 +310,6 @@ impl DirectoryWriter for TarGzWriter {
|
||||||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
||||||
let metadata = fs_err::metadata(file)?;
|
let metadata = fs_err::metadata(file)?;
|
||||||
let mut header = Header::new_gnu();
|
let mut header = Header::new_gnu();
|
||||||
// Work around bug in Python's std tar module
|
|
||||||
// https://github.com/python/cpython/issues/141707
|
|
||||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
|
||||||
header.set_entry_type(EntryType::Regular);
|
|
||||||
// Preserve the executable bit, especially for scripts
|
// Preserve the executable bit, especially for scripts
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
let executable_bit = {
|
let executable_bit = {
|
||||||
|
|
|
||||||
|
|
@ -29,9 +29,9 @@ pub fn build_wheel(
|
||||||
wheel_dir: &Path,
|
wheel_dir: &Path,
|
||||||
metadata_directory: Option<&Path>,
|
metadata_directory: Option<&Path>,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<WheelFilename, Error> {
|
) -> Result<WheelFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -58,7 +58,6 @@ pub fn build_wheel(
|
||||||
&filename,
|
&filename,
|
||||||
uv_version,
|
uv_version,
|
||||||
wheel_writer,
|
wheel_writer,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(filename)
|
Ok(filename)
|
||||||
|
|
@ -68,9 +67,9 @@ pub fn build_wheel(
|
||||||
pub fn list_wheel(
|
pub fn list_wheel(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(WheelFilename, FileList), Error> {
|
) -> Result<(WheelFilename, FileList), Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -88,14 +87,7 @@ pub fn list_wheel(
|
||||||
|
|
||||||
let mut files = FileList::new();
|
let mut files = FileList::new();
|
||||||
let writer = ListWriter::new(&mut files);
|
let writer = ListWriter::new(&mut files);
|
||||||
write_wheel(
|
write_wheel(source_tree, &pyproject_toml, &filename, uv_version, writer)?;
|
||||||
source_tree,
|
|
||||||
&pyproject_toml,
|
|
||||||
&filename,
|
|
||||||
uv_version,
|
|
||||||
writer,
|
|
||||||
show_warnings,
|
|
||||||
)?;
|
|
||||||
Ok((filename, files))
|
Ok((filename, files))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -105,7 +97,6 @@ fn write_wheel(
|
||||||
filename: &WheelFilename,
|
filename: &WheelFilename,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
mut wheel_writer: impl DirectoryWriter,
|
mut wheel_writer: impl DirectoryWriter,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let settings = pyproject_toml
|
let settings = pyproject_toml
|
||||||
.settings()
|
.settings()
|
||||||
|
|
@ -141,7 +132,6 @@ fn write_wheel(
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_ref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut files_visited = 0;
|
let mut files_visited = 0;
|
||||||
|
|
@ -269,9 +259,9 @@ pub fn build_editable(
|
||||||
wheel_dir: &Path,
|
wheel_dir: &Path,
|
||||||
metadata_directory: Option<&Path>,
|
metadata_directory: Option<&Path>,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<WheelFilename, Error> {
|
) -> Result<WheelFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -305,7 +295,6 @@ pub fn build_editable(
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_ref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
wheel_writer.write_bytes(
|
wheel_writer.write_bytes(
|
||||||
|
|
@ -332,7 +321,8 @@ pub fn metadata(
|
||||||
metadata_directory: &Path,
|
metadata_directory: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
) -> Result<String, Error> {
|
) -> Result<String, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -840,7 +830,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_prepare_metadata() {
|
fn test_prepare_metadata() {
|
||||||
let metadata_dir = TempDir::new().unwrap();
|
let metadata_dir = TempDir::new().unwrap();
|
||||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||||
metadata(built_by_uv, metadata_dir.path(), "1.0.0+test").unwrap();
|
metadata(built_by_uv, metadata_dir.path(), "1.0.0+test").unwrap();
|
||||||
|
|
||||||
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-build-frontend"
|
name = "uv-build-frontend"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "Build wheels from source distributions"
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -16,7 +17,6 @@ doctest = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
uv-auth = { workspace = true }
|
|
||||||
uv-cache-key = { workspace = true }
|
uv-cache-key = { workspace = true }
|
||||||
uv-configuration = { workspace = true }
|
uv-configuration = { workspace = true }
|
||||||
uv-distribution = { workspace = true }
|
uv-distribution = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-build-frontend
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-frontend).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -28,7 +28,7 @@ use tokio::io::AsyncBufReadExt;
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
use tokio::sync::{Mutex, Semaphore};
|
use tokio::sync::{Mutex, Semaphore};
|
||||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||||
use uv_auth::CredentialsCache;
|
|
||||||
use uv_cache_key::cache_digest;
|
use uv_cache_key::cache_digest;
|
||||||
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
|
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
|
||||||
use uv_distribution::BuildRequires;
|
use uv_distribution::BuildRequires;
|
||||||
|
|
@ -36,7 +36,7 @@ use uv_distribution_types::{
|
||||||
ConfigSettings, ExtraBuildRequirement, ExtraBuildRequires, IndexLocations, Requirement,
|
ConfigSettings, ExtraBuildRequirement, ExtraBuildRequires, IndexLocations, Requirement,
|
||||||
Resolution,
|
Resolution,
|
||||||
};
|
};
|
||||||
use uv_fs::{LockedFile, LockedFileMode};
|
use uv_fs::LockedFile;
|
||||||
use uv_fs::{PythonExt, Simplified};
|
use uv_fs::{PythonExt, Simplified};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
|
|
@ -292,7 +292,6 @@ impl SourceBuild {
|
||||||
mut environment_variables: FxHashMap<OsString, OsString>,
|
mut environment_variables: FxHashMap<OsString, OsString>,
|
||||||
level: BuildOutput,
|
level: BuildOutput,
|
||||||
concurrent_builds: usize,
|
concurrent_builds: usize,
|
||||||
credentials_cache: &CredentialsCache,
|
|
||||||
preview: Preview,
|
preview: Preview,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let temp_dir = build_context.cache().venv_dir()?;
|
let temp_dir = build_context.cache().venv_dir()?;
|
||||||
|
|
@ -303,6 +302,7 @@ impl SourceBuild {
|
||||||
source.to_path_buf()
|
source.to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
|
||||||
// Check if we have a PEP 517 build backend.
|
// Check if we have a PEP 517 build backend.
|
||||||
let (pep517_backend, project) = Self::extract_pep517_backend(
|
let (pep517_backend, project) = Self::extract_pep517_backend(
|
||||||
&source_tree,
|
&source_tree,
|
||||||
|
|
@ -311,7 +311,7 @@ impl SourceBuild {
|
||||||
locations,
|
locations,
|
||||||
source_strategy,
|
source_strategy,
|
||||||
workspace_cache,
|
workspace_cache,
|
||||||
credentials_cache,
|
&default_backend,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| *err)?;
|
.map_err(|err| *err)?;
|
||||||
|
|
@ -359,9 +359,7 @@ impl SourceBuild {
|
||||||
interpreter.clone(),
|
interpreter.clone(),
|
||||||
uv_virtualenv::Prompt::None,
|
uv_virtualenv::Prompt::None,
|
||||||
false,
|
false,
|
||||||
uv_virtualenv::OnExisting::Remove(
|
uv_virtualenv::OnExisting::Remove,
|
||||||
uv_virtualenv::RemovalReason::TemporaryEnvironment,
|
|
||||||
),
|
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
|
|
@ -383,6 +381,7 @@ impl SourceBuild {
|
||||||
let resolved_requirements = Self::get_resolved_requirements(
|
let resolved_requirements = Self::get_resolved_requirements(
|
||||||
build_context,
|
build_context,
|
||||||
source_build_context,
|
source_build_context,
|
||||||
|
&default_backend,
|
||||||
&pep517_backend,
|
&pep517_backend,
|
||||||
extra_build_dependencies,
|
extra_build_dependencies,
|
||||||
build_stack,
|
build_stack,
|
||||||
|
|
@ -454,7 +453,6 @@ impl SourceBuild {
|
||||||
&environment_variables,
|
&environment_variables,
|
||||||
&modified_path,
|
&modified_path,
|
||||||
&temp_dir,
|
&temp_dir,
|
||||||
credentials_cache,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
@ -493,16 +491,12 @@ impl SourceBuild {
|
||||||
"uv-setuptools-{}.lock",
|
"uv-setuptools-{}.lock",
|
||||||
cache_digest(&canonical_source_path)
|
cache_digest(&canonical_source_path)
|
||||||
));
|
));
|
||||||
source_tree_lock = LockedFile::acquire(
|
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||||
lock_path,
|
.await
|
||||||
LockedFileMode::Exclusive,
|
.inspect_err(|err| {
|
||||||
self.source_tree.to_string_lossy(),
|
warn!("Failed to acquire build lock: {err}");
|
||||||
)
|
})
|
||||||
.await
|
.ok();
|
||||||
.inspect_err(|err| {
|
|
||||||
warn!("Failed to acquire build lock: {err}");
|
|
||||||
})
|
|
||||||
.ok();
|
|
||||||
}
|
}
|
||||||
Ok(source_tree_lock)
|
Ok(source_tree_lock)
|
||||||
}
|
}
|
||||||
|
|
@ -510,12 +504,13 @@ impl SourceBuild {
|
||||||
async fn get_resolved_requirements(
|
async fn get_resolved_requirements(
|
||||||
build_context: &impl BuildContext,
|
build_context: &impl BuildContext,
|
||||||
source_build_context: SourceBuildContext,
|
source_build_context: SourceBuildContext,
|
||||||
|
default_backend: &Pep517Backend,
|
||||||
pep517_backend: &Pep517Backend,
|
pep517_backend: &Pep517Backend,
|
||||||
extra_build_dependencies: Vec<Requirement>,
|
extra_build_dependencies: Vec<Requirement>,
|
||||||
build_stack: &BuildStack,
|
build_stack: &BuildStack,
|
||||||
) -> Result<Resolution, Error> {
|
) -> Result<Resolution, Error> {
|
||||||
Ok(
|
Ok(
|
||||||
if pep517_backend.requirements == DEFAULT_BACKEND.requirements
|
if pep517_backend.requirements == default_backend.requirements
|
||||||
&& extra_build_dependencies.is_empty()
|
&& extra_build_dependencies.is_empty()
|
||||||
{
|
{
|
||||||
let mut resolution = source_build_context.default_resolution.lock().await;
|
let mut resolution = source_build_context.default_resolution.lock().await;
|
||||||
|
|
@ -523,7 +518,7 @@ impl SourceBuild {
|
||||||
resolved_requirements.clone()
|
resolved_requirements.clone()
|
||||||
} else {
|
} else {
|
||||||
let resolved_requirements = build_context
|
let resolved_requirements = build_context
|
||||||
.resolve(&DEFAULT_BACKEND.requirements, build_stack)
|
.resolve(&default_backend.requirements, build_stack)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
Error::RequirementsResolve("`setup.py` build", err.into())
|
Error::RequirementsResolve("`setup.py` build", err.into())
|
||||||
|
|
@ -563,7 +558,7 @@ impl SourceBuild {
|
||||||
locations: &IndexLocations,
|
locations: &IndexLocations,
|
||||||
source_strategy: SourceStrategy,
|
source_strategy: SourceStrategy,
|
||||||
workspace_cache: &WorkspaceCache,
|
workspace_cache: &WorkspaceCache,
|
||||||
credentials_cache: &CredentialsCache,
|
default_backend: &Pep517Backend,
|
||||||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||||
Ok(toml) => {
|
Ok(toml) => {
|
||||||
|
|
@ -592,7 +587,6 @@ impl SourceBuild {
|
||||||
locations,
|
locations,
|
||||||
source_strategy,
|
source_strategy,
|
||||||
workspace_cache,
|
workspace_cache,
|
||||||
credentials_cache,
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(Error::Lowering)?;
|
.map_err(Error::Lowering)?;
|
||||||
|
|
@ -662,7 +656,7 @@ impl SourceBuild {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_BACKEND.clone()
|
default_backend.clone()
|
||||||
};
|
};
|
||||||
Ok((backend, pyproject_toml.project))
|
Ok((backend, pyproject_toml.project))
|
||||||
}
|
}
|
||||||
|
|
@ -678,7 +672,7 @@ impl SourceBuild {
|
||||||
// the default backend, to match `build`. `pip` uses `setup.py` directly in this
|
// the default backend, to match `build`. `pip` uses `setup.py` directly in this
|
||||||
// case, but plans to make PEP 517 builds the default in the future.
|
// case, but plans to make PEP 517 builds the default in the future.
|
||||||
// See: https://github.com/pypa/pip/issues/9175.
|
// See: https://github.com/pypa/pip/issues/9175.
|
||||||
Ok((DEFAULT_BACKEND.clone(), None))
|
Ok((default_backend.clone(), None))
|
||||||
}
|
}
|
||||||
Err(err) => Err(Box::new(err.into())),
|
Err(err) => Err(Box::new(err.into())),
|
||||||
}
|
}
|
||||||
|
|
@ -965,7 +959,6 @@ async fn create_pep517_build_environment(
|
||||||
environment_variables: &FxHashMap<OsString, OsString>,
|
environment_variables: &FxHashMap<OsString, OsString>,
|
||||||
modified_path: &OsString,
|
modified_path: &OsString,
|
||||||
temp_dir: &TempDir,
|
temp_dir: &TempDir,
|
||||||
credentials_cache: &CredentialsCache,
|
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
// Write the hook output to a file so that we can read it back reliably.
|
// Write the hook output to a file so that we can read it back reliably.
|
||||||
let outfile = temp_dir
|
let outfile = temp_dir
|
||||||
|
|
@ -1060,7 +1053,6 @@ async fn create_pep517_build_environment(
|
||||||
locations,
|
locations,
|
||||||
source_strategy,
|
source_strategy,
|
||||||
workspace_cache,
|
workspace_cache,
|
||||||
credentials_cache,
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(Error::Lowering)?;
|
.map_err(Error::Lowering)?;
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-build"
|
name = "uv-build"
|
||||||
version = "0.9.18"
|
version = "0.8.17"
|
||||||
description = "A Python build backend"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
[project]
|
[project]
|
||||||
name = "uv-build"
|
name = "uv-build"
|
||||||
version = "0.9.18"
|
version = "0.8.17"
|
||||||
description = "The uv build backend"
|
description = "The uv build backend"
|
||||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||||
requires-python = ">=3.8"
|
requires-python = ">=3.8"
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ def main():
|
||||||
"Use `uv build` or another build frontend instead.",
|
"Use `uv build` or another build frontend instead.",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
if "--help" in sys.argv or "-h" in sys.argv:
|
if "--help" in sys.argv:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
||||||
|
|
@ -44,7 +44,6 @@ fn main() -> Result<()> {
|
||||||
&env::current_dir()?,
|
&env::current_dir()?,
|
||||||
&sdist_directory,
|
&sdist_directory,
|
||||||
uv_version::version(),
|
uv_version::version(),
|
||||||
false,
|
|
||||||
)?;
|
)?;
|
||||||
// Tell the build frontend about the name of the artifact we built
|
// Tell the build frontend about the name of the artifact we built
|
||||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||||
|
|
@ -57,7 +56,6 @@ fn main() -> Result<()> {
|
||||||
&wheel_directory,
|
&wheel_directory,
|
||||||
metadata_directory.as_deref(),
|
metadata_directory.as_deref(),
|
||||||
uv_version::version(),
|
uv_version::version(),
|
||||||
false,
|
|
||||||
)?;
|
)?;
|
||||||
// Tell the build frontend about the name of the artifact we built
|
// Tell the build frontend about the name of the artifact we built
|
||||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||||
|
|
@ -70,7 +68,6 @@ fn main() -> Result<()> {
|
||||||
&wheel_directory,
|
&wheel_directory,
|
||||||
metadata_directory.as_deref(),
|
metadata_directory.as_deref(),
|
||||||
uv_version::version(),
|
uv_version::version(),
|
||||||
false,
|
|
||||||
)?;
|
)?;
|
||||||
// Tell the build frontend about the name of the artifact we built
|
// Tell the build frontend about the name of the artifact we built
|
||||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cache-info"
|
name = "uv-cache-info"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -16,8 +16,6 @@ doctest = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
uv-fs = { workspace = true }
|
|
||||||
|
|
||||||
fs-err = { workspace = true }
|
fs-err = { workspace = true }
|
||||||
globwalk = { workspace = true }
|
globwalk = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
schemars = { workspace = true, optional = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cache-info
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-info).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::cmp::max;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use uv_fs::Simplified;
|
|
||||||
|
|
||||||
use crate::git_info::{Commit, Tags};
|
use crate::git_info::{Commit, Tags};
|
||||||
use crate::glob::cluster_globs;
|
use crate::glob::cluster_globs;
|
||||||
use crate::timestamp::Timestamp;
|
use crate::timestamp::Timestamp;
|
||||||
|
|
@ -64,7 +63,7 @@ impl CacheInfo {
|
||||||
pub fn from_directory(directory: &Path) -> Result<Self, CacheInfoError> {
|
pub fn from_directory(directory: &Path) -> Result<Self, CacheInfoError> {
|
||||||
let mut commit = None;
|
let mut commit = None;
|
||||||
let mut tags = None;
|
let mut tags = None;
|
||||||
let mut last_changed: Option<(PathBuf, Timestamp)> = None;
|
let mut timestamp = None;
|
||||||
let mut directories = BTreeMap::new();
|
let mut directories = BTreeMap::new();
|
||||||
let mut env = BTreeMap::new();
|
let mut env = BTreeMap::new();
|
||||||
|
|
||||||
|
|
@ -129,12 +128,7 @@ impl CacheInfo {
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let timestamp = Timestamp::from_metadata(&metadata);
|
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
|
||||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
|
||||||
}) {
|
|
||||||
last_changed = Some((path, timestamp));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
CacheKey::Directory { dir } => {
|
CacheKey::Directory { dir } => {
|
||||||
// Treat the path as a directory.
|
// Treat the path as a directory.
|
||||||
|
|
@ -264,25 +258,14 @@ impl CacheInfo {
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let timestamp = Timestamp::from_metadata(&metadata);
|
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
|
||||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
|
||||||
}) {
|
|
||||||
last_changed = Some((entry.into_path(), timestamp));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let timestamp = if let Some((path, timestamp)) = last_changed {
|
debug!(
|
||||||
debug!(
|
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}"
|
||||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}. Most recently modified: {}",
|
);
|
||||||
path.user_display()
|
|
||||||
);
|
|
||||||
Some(timestamp)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
timestamp,
|
timestamp,
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cache-key"
|
name = "uv-cache-key"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "Generic functionality for caching paths, URLs, and other resources across platforms."
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cache-key
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-key).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -4,7 +4,7 @@ use std::hash::{Hash, Hasher};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
||||||
|
|
||||||
|
|
@ -98,7 +98,7 @@ impl CanonicalUrl {
|
||||||
Self(url)
|
Self(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -139,18 +139,8 @@ impl std::fmt::Display for CanonicalUrl {
|
||||||
/// `https://github.com/pypa/package.git#subdirectory=pkg_b` would map to different
|
/// `https://github.com/pypa/package.git#subdirectory=pkg_b` would map to different
|
||||||
/// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same
|
/// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same
|
||||||
/// resource.
|
/// resource.
|
||||||
///
|
|
||||||
/// The additional information it holds should only be used to discriminate between
|
|
||||||
/// sources that hold the exact same commit in their canonical representation,
|
|
||||||
/// but may differ in the contents such as when Git LFS is enabled.
|
|
||||||
///
|
|
||||||
/// A different cache key will be computed when Git LFS is enabled.
|
|
||||||
/// When Git LFS is `false` or `None`, the cache key remains unchanged.
|
|
||||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||||
pub struct RepositoryUrl {
|
pub struct RepositoryUrl(DisplaySafeUrl);
|
||||||
repo_url: DisplaySafeUrl,
|
|
||||||
with_lfs: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RepositoryUrl {
|
impl RepositoryUrl {
|
||||||
pub fn new(url: &DisplaySafeUrl) -> Self {
|
pub fn new(url: &DisplaySafeUrl) -> Self {
|
||||||
|
|
@ -171,31 +161,19 @@ impl RepositoryUrl {
|
||||||
url.set_fragment(None);
|
url.set_fragment(None);
|
||||||
url.set_query(None);
|
url.set_query(None);
|
||||||
|
|
||||||
Self {
|
Self(url)
|
||||||
repo_url: url,
|
|
||||||
with_lfs: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_lfs(mut self, lfs: Option<bool>) -> Self {
|
|
||||||
self.with_lfs = lfs;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CacheKey for RepositoryUrl {
|
impl CacheKey for RepositoryUrl {
|
||||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||||
// possible changes in how the URL crate does hashing.
|
// possible changes in how the URL crate does hashing.
|
||||||
self.repo_url.as_str().cache_key(state);
|
self.0.as_str().cache_key(state);
|
||||||
if let Some(true) = self.with_lfs {
|
|
||||||
1u8.cache_key(state);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -203,10 +181,7 @@ impl Hash for RepositoryUrl {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||||
// possible changes in how the URL crate does hashing.
|
// possible changes in how the URL crate does hashing.
|
||||||
self.repo_url.as_str().hash(state);
|
self.0.as_str().hash(state);
|
||||||
if let Some(true) = self.with_lfs {
|
|
||||||
1u8.hash(state);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -214,13 +189,13 @@ impl Deref for RepositoryUrl {
|
||||||
type Target = Url;
|
type Target = Url;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.repo_url
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for RepositoryUrl {
|
impl std::fmt::Display for RepositoryUrl {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
std::fmt::Display::fmt(&self.repo_url, f)
|
std::fmt::Display::fmt(&self.0, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -229,7 +204,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn user_credential_does_not_affect_cache_key() -> Result<(), DisplaySafeUrlError> {
|
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||||
let mut hasher = CacheKeyHasher::new();
|
let mut hasher = CacheKeyHasher::new();
|
||||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||||
.cache_key(&mut hasher);
|
.cache_key(&mut hasher);
|
||||||
|
|
@ -279,7 +254,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn canonical_url() -> Result<(), DisplaySafeUrlError> {
|
fn canonical_url() -> Result<(), url::ParseError> {
|
||||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||||
|
|
@ -308,14 +283,6 @@ mod tests {
|
||||||
)?,
|
)?,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Two URLs should _not_ be considered equal if they differ in Git LFS enablement.
|
|
||||||
assert_ne!(
|
|
||||||
CanonicalUrl::parse(
|
|
||||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
|
||||||
)?,
|
|
||||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||||
assert_ne!(
|
assert_ne!(
|
||||||
CanonicalUrl::parse(
|
CanonicalUrl::parse(
|
||||||
|
|
@ -368,7 +335,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn repository_url() -> Result<(), DisplaySafeUrlError> {
|
fn repository_url() -> Result<(), url::ParseError> {
|
||||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||||
|
|
@ -411,76 +378,6 @@ mod tests {
|
||||||
)?,
|
)?,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
|
||||||
// differ in Git LFS enablement.
|
|
||||||
assert_eq!(
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
|
||||||
)?,
|
|
||||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn repository_url_with_lfs() -> Result<(), DisplaySafeUrlError> {
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let repo_url_basic = hasher.finish();
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let repo_url_with_fragments = hasher.finish();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
repo_url_basic, repo_url_with_fragments,
|
|
||||||
"repository urls should have the exact cache keys as fragments are removed",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.with_lfs(None)
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let git_url_with_fragments = hasher.finish();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
repo_url_with_fragments, git_url_with_fragments,
|
|
||||||
"both structs should have the exact cache keys as fragments are still removed",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.with_lfs(Some(false))
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let git_url_with_fragments_and_lfs_false = hasher.finish();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
git_url_with_fragments, git_url_with_fragments_and_lfs_false,
|
|
||||||
"both structs should have the exact cache keys as lfs false should not influence them",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.with_lfs(Some(true))
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let git_url_with_fragments_and_lfs_true = hasher.finish();
|
|
||||||
|
|
||||||
assert_ne!(
|
|
||||||
git_url_with_fragments, git_url_with_fragments_and_lfs_true,
|
|
||||||
"both structs should have different cache keys as one has Git LFS enabled",
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cache"
|
name = "uv-cache"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "Generate stable hash digests across versions and platforms."
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -34,6 +35,5 @@ rustc-hash = { workspace = true }
|
||||||
same-file = { workspace = true }
|
same-file = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
walkdir = { workspace = true }
|
walkdir = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cache
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
||||||
use crate::Cache;
|
use crate::Cache;
|
||||||
use clap::{Parser, ValueHint};
|
use clap::Parser;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
#[derive(Parser, Debug, Clone)]
|
#[derive(Parser, Debug, Clone)]
|
||||||
|
|
@ -27,7 +27,7 @@ pub struct CacheArgs {
|
||||||
/// `%LOCALAPPDATA%\uv\cache` on Windows.
|
/// `%LOCALAPPDATA%\uv\cache` on Windows.
|
||||||
///
|
///
|
||||||
/// To view the location of the cache directory, run `uv cache dir`.
|
/// To view the location of the cache directory, run `uv cache dir`.
|
||||||
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR, value_hint = ValueHint::DirPath)]
|
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR)]
|
||||||
pub cache_dir: Option<PathBuf>,
|
pub cache_dir: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,10 +7,11 @@ use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use tracing::{debug, trace, warn};
|
use tracing::debug;
|
||||||
|
|
||||||
|
pub use archive::ArchiveId;
|
||||||
use uv_cache_info::Timestamp;
|
use uv_cache_info::Timestamp;
|
||||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, Simplified, cachedir, directories};
|
use uv_fs::{LockedFile, cachedir, directories};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
use uv_pypi_types::ResolutionMetadata;
|
use uv_pypi_types::ResolutionMetadata;
|
||||||
|
|
||||||
|
|
@ -21,7 +22,6 @@ use crate::removal::Remover;
|
||||||
pub use crate::removal::{Removal, rm_rf};
|
pub use crate::removal::{Removal, rm_rf};
|
||||||
pub use crate::wheel::WheelCache;
|
pub use crate::wheel::WheelCache;
|
||||||
use crate::wheel::WheelCacheKind;
|
use crate::wheel::WheelCacheKind;
|
||||||
pub use archive::ArchiveId;
|
|
||||||
|
|
||||||
mod archive;
|
mod archive;
|
||||||
mod by_timestamp;
|
mod by_timestamp;
|
||||||
|
|
@ -35,17 +35,6 @@ mod wheel;
|
||||||
/// Must be kept in-sync with the version in [`CacheBucket::to_str`].
|
/// Must be kept in-sync with the version in [`CacheBucket::to_str`].
|
||||||
pub const ARCHIVE_VERSION: u8 = 0;
|
pub const ARCHIVE_VERSION: u8 = 0;
|
||||||
|
|
||||||
/// Error locking a cache entry or shard
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error(transparent)]
|
|
||||||
Io(#[from] io::Error),
|
|
||||||
#[error("Could not make the path absolute")]
|
|
||||||
Absolute(#[source] io::Error),
|
|
||||||
#[error("Could not acquire lock")]
|
|
||||||
Acquire(#[from] LockedFileError),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A [`CacheEntry`] which may or may not exist yet.
|
/// A [`CacheEntry`] which may or may not exist yet.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct CacheEntry(PathBuf);
|
pub struct CacheEntry(PathBuf);
|
||||||
|
|
@ -91,14 +80,9 @@ impl CacheEntry {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquire the [`CacheEntry`] as an exclusive lock.
|
/// Acquire the [`CacheEntry`] as an exclusive lock.
|
||||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||||
fs_err::create_dir_all(self.dir())?;
|
fs_err::create_dir_all(self.dir())?;
|
||||||
Ok(LockedFile::acquire(
|
LockedFile::acquire(self.path(), self.path().display()).await
|
||||||
self.path(),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
self.path().display(),
|
|
||||||
)
|
|
||||||
.await?)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -125,14 +109,9 @@ impl CacheShard {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquire the cache entry as an exclusive lock.
|
/// Acquire the cache entry as an exclusive lock.
|
||||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||||
fs_err::create_dir_all(self.as_ref())?;
|
fs_err::create_dir_all(self.as_ref())?;
|
||||||
Ok(LockedFile::acquire(
|
LockedFile::acquire(self.join(".lock"), self.display()).await
|
||||||
self.join(".lock"),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
self.display(),
|
|
||||||
)
|
|
||||||
.await?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the [`CacheShard`] as a [`PathBuf`].
|
/// Return the [`CacheShard`] as a [`PathBuf`].
|
||||||
|
|
@ -156,8 +135,6 @@ impl Deref for CacheShard {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The main cache abstraction.
|
/// The main cache abstraction.
|
||||||
///
|
|
||||||
/// While the cache is active, it holds a read (shared) lock that prevents cache cleaning
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Cache {
|
pub struct Cache {
|
||||||
/// The cache directory.
|
/// The cache directory.
|
||||||
|
|
@ -169,9 +146,6 @@ pub struct Cache {
|
||||||
/// Included to ensure that the temporary directory exists for the length of the operation, but
|
/// Included to ensure that the temporary directory exists for the length of the operation, but
|
||||||
/// is dropped at the end as appropriate.
|
/// is dropped at the end as appropriate.
|
||||||
temp_dir: Option<Arc<tempfile::TempDir>>,
|
temp_dir: Option<Arc<tempfile::TempDir>>,
|
||||||
/// Ensure that `uv cache` operations don't remove items from the cache that are used by another
|
|
||||||
/// uv process.
|
|
||||||
lock_file: Option<Arc<LockedFile>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Cache {
|
impl Cache {
|
||||||
|
|
@ -181,7 +155,6 @@ impl Cache {
|
||||||
root: root.into(),
|
root: root.into(),
|
||||||
refresh: Refresh::None(Timestamp::now()),
|
refresh: Refresh::None(Timestamp::now()),
|
||||||
temp_dir: None,
|
temp_dir: None,
|
||||||
lock_file: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -192,7 +165,6 @@ impl Cache {
|
||||||
root: temp_dir.path().to_path_buf(),
|
root: temp_dir.path().to_path_buf(),
|
||||||
refresh: Refresh::None(Timestamp::now()),
|
refresh: Refresh::None(Timestamp::now()),
|
||||||
temp_dir: Some(Arc::new(temp_dir)),
|
temp_dir: Some(Arc::new(temp_dir)),
|
||||||
lock_file: None,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -202,69 +174,6 @@ impl Cache {
|
||||||
Self { refresh, ..self }
|
Self { refresh, ..self }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquire a lock that allows removing entries from the cache.
|
|
||||||
pub async fn with_exclusive_lock(self) -> Result<Self, LockedFileError> {
|
|
||||||
let Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
// Release the existing lock, avoid deadlocks from a cloned cache.
|
|
||||||
if let Some(lock_file) = lock_file {
|
|
||||||
drop(
|
|
||||||
Arc::try_unwrap(lock_file).expect(
|
|
||||||
"cloning the cache before acquiring an exclusive lock causes a deadlock",
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let lock_file = LockedFile::acquire(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
root.simplified_display(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file: Some(Arc::new(lock_file)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Acquire a lock that allows removing entries from the cache, if available.
|
|
||||||
///
|
|
||||||
/// If the lock is not immediately available, returns [`Err`] with self.
|
|
||||||
pub fn with_exclusive_lock_no_wait(self) -> Result<Self, Self> {
|
|
||||||
let Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
match LockedFile::acquire_no_wait(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
root.simplified_display(),
|
|
||||||
) {
|
|
||||||
Some(lock_file) => Ok(Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file: Some(Arc::new(lock_file)),
|
|
||||||
}),
|
|
||||||
None => Err(Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the root of the cache.
|
/// Return the root of the cache.
|
||||||
pub fn root(&self) -> &Path {
|
pub fn root(&self) -> &Path {
|
||||||
&self.root
|
&self.root
|
||||||
|
|
@ -401,8 +310,10 @@ impl Cache {
|
||||||
self.temp_dir.is_some()
|
self.temp_dir.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Populate the cache scaffold.
|
/// Initialize the [`Cache`].
|
||||||
fn create_base_files(root: &PathBuf) -> io::Result<()> {
|
pub fn init(self) -> Result<Self, io::Error> {
|
||||||
|
let root = &self.root;
|
||||||
|
|
||||||
// Create the cache directory, if it doesn't exist.
|
// Create the cache directory, if it doesn't exist.
|
||||||
fs_err::create_dir_all(root)?;
|
fs_err::create_dir_all(root)?;
|
||||||
|
|
||||||
|
|
@ -448,101 +359,21 @@ impl Cache {
|
||||||
.join(".git"),
|
.join(".git"),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize the [`Cache`].
|
|
||||||
pub async fn init(self) -> Result<Self, Error> {
|
|
||||||
let root = &self.root;
|
|
||||||
|
|
||||||
Self::create_base_files(root)?;
|
|
||||||
|
|
||||||
// Block cache removal operations from interfering.
|
|
||||||
let lock_file = match LockedFile::acquire(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Shared,
|
|
||||||
root.simplified_display(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(lock_file) => Some(Arc::new(lock_file)),
|
|
||||||
Err(err)
|
|
||||||
if err
|
|
||||||
.as_io_error()
|
|
||||||
.is_some_and(|err| err.kind() == io::ErrorKind::Unsupported) =>
|
|
||||||
{
|
|
||||||
warn!(
|
|
||||||
"Shared locking is not supported by the current platform or filesystem, \
|
|
||||||
reduced parallel process safety with `uv cache clean` and `uv cache prune`."
|
|
||||||
);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err.into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
root: std::path::absolute(root)?,
|
||||||
lock_file,
|
|
||||||
..self
|
..self
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Initialize the [`Cache`], assuming that there are no other uv processes running.
|
|
||||||
pub fn init_no_wait(self) -> Result<Option<Self>, Error> {
|
|
||||||
let root = &self.root;
|
|
||||||
|
|
||||||
Self::create_base_files(root)?;
|
|
||||||
|
|
||||||
// Block cache removal operations from interfering.
|
|
||||||
let Some(lock_file) = LockedFile::acquire_no_wait(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Shared,
|
|
||||||
root.simplified_display(),
|
|
||||||
) else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
Ok(Some(Self {
|
|
||||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
|
||||||
lock_file: Some(Arc::new(lock_file)),
|
|
||||||
..self
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear the cache, removing all entries.
|
/// Clear the cache, removing all entries.
|
||||||
pub fn clear(self, reporter: Box<dyn CleanReporter>) -> Result<Removal, io::Error> {
|
pub fn clear(&self, reporter: Box<dyn CleanReporter>) -> Result<Removal, io::Error> {
|
||||||
// Remove everything but `.lock`, Windows does not allow removal of a locked file
|
Remover::new(reporter).rm_rf(&self.root)
|
||||||
let mut removal = Remover::new(reporter).rm_rf(&self.root, true)?;
|
|
||||||
let Self {
|
|
||||||
root, lock_file, ..
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
// Remove the `.lock` file, unlocking it first
|
|
||||||
if let Some(lock) = lock_file {
|
|
||||||
drop(lock);
|
|
||||||
fs_err::remove_file(root.join(".lock"))?;
|
|
||||||
}
|
|
||||||
removal.num_files += 1;
|
|
||||||
|
|
||||||
// Remove the root directory
|
|
||||||
match fs_err::remove_dir(root) {
|
|
||||||
Ok(()) => {
|
|
||||||
removal.num_dirs += 1;
|
|
||||||
}
|
|
||||||
// On Windows, when `--force` is used, the `.lock` file can exist and be unremovable,
|
|
||||||
// so we make this non-fatal
|
|
||||||
Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => {
|
|
||||||
trace!("Failed to remove root cache directory: not empty");
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(removal)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove a package from the cache.
|
/// Remove a package from the cache.
|
||||||
///
|
///
|
||||||
/// Returns the number of entries removed from the cache.
|
/// Returns the number of entries removed from the cache.
|
||||||
pub fn remove(&self, name: &PackageName) -> io::Result<Removal> {
|
pub fn remove(&self, name: &PackageName) -> Result<Removal, io::Error> {
|
||||||
// Collect the set of referenced archives.
|
// Collect the set of referenced archives.
|
||||||
let references = self.find_archive_references()?;
|
let references = self.find_archive_references()?;
|
||||||
|
|
||||||
|
|
@ -576,7 +407,6 @@ impl Cache {
|
||||||
if entry.file_name() == "CACHEDIR.TAG"
|
if entry.file_name() == "CACHEDIR.TAG"
|
||||||
|| entry.file_name() == ".gitignore"
|
|| entry.file_name() == ".gitignore"
|
||||||
|| entry.file_name() == ".git"
|
|| entry.file_name() == ".git"
|
||||||
|| entry.file_name() == ".lock"
|
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
@ -1172,7 +1002,7 @@ impl CacheBucket {
|
||||||
Self::Interpreter => "interpreter-v4",
|
Self::Interpreter => "interpreter-v4",
|
||||||
// Note that when bumping this, you'll also need to bump it
|
// Note that when bumping this, you'll also need to bump it
|
||||||
// in `crates/uv/tests/it/cache_clean.rs`.
|
// in `crates/uv/tests/it/cache_clean.rs`.
|
||||||
Self::Simple => "simple-v18",
|
Self::Simple => "simple-v17",
|
||||||
// Note that when bumping this, you'll also need to bump it
|
// Note that when bumping this, you'll also need to bump it
|
||||||
// in `crates/uv/tests/it/cache_prune.rs`.
|
// in `crates/uv/tests/it/cache_prune.rs`.
|
||||||
Self::Wheels => "wheels-v5",
|
Self::Wheels => "wheels-v5",
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ use crate::CleanReporter;
|
||||||
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
||||||
/// the number of files and directories removed, along with a total byte count.
|
/// the number of files and directories removed, along with a total byte count.
|
||||||
pub fn rm_rf(path: impl AsRef<Path>) -> io::Result<Removal> {
|
pub fn rm_rf(path: impl AsRef<Path>) -> io::Result<Removal> {
|
||||||
Remover::default().rm_rf(path, false)
|
Remover::default().rm_rf(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A builder for a [`Remover`] that can remove files and directories.
|
/// A builder for a [`Remover`] that can remove files and directories.
|
||||||
|
|
@ -29,13 +29,9 @@ impl Remover {
|
||||||
|
|
||||||
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
||||||
/// the number of files and directories removed, along with a total byte count.
|
/// the number of files and directories removed, along with a total byte count.
|
||||||
pub(crate) fn rm_rf(
|
pub(crate) fn rm_rf(&self, path: impl AsRef<Path>) -> io::Result<Removal> {
|
||||||
&self,
|
|
||||||
path: impl AsRef<Path>,
|
|
||||||
skip_locked_file: bool,
|
|
||||||
) -> io::Result<Removal> {
|
|
||||||
let mut removal = Removal::default();
|
let mut removal = Removal::default();
|
||||||
removal.rm_rf(path.as_ref(), self.reporter.as_deref(), skip_locked_file)?;
|
removal.rm_rf(path.as_ref(), self.reporter.as_deref())?;
|
||||||
Ok(removal)
|
Ok(removal)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -56,12 +52,7 @@ pub struct Removal {
|
||||||
|
|
||||||
impl Removal {
|
impl Removal {
|
||||||
/// Recursively remove a file or directory and all its contents.
|
/// Recursively remove a file or directory and all its contents.
|
||||||
fn rm_rf(
|
fn rm_rf(&mut self, path: &Path, reporter: Option<&dyn CleanReporter>) -> io::Result<()> {
|
||||||
&mut self,
|
|
||||||
path: &Path,
|
|
||||||
reporter: Option<&dyn CleanReporter>,
|
|
||||||
skip_locked_file: bool,
|
|
||||||
) -> io::Result<()> {
|
|
||||||
let metadata = match fs_err::symlink_metadata(path) {
|
let metadata = match fs_err::symlink_metadata(path) {
|
||||||
Ok(metadata) => metadata,
|
Ok(metadata) => metadata,
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(()),
|
Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(()),
|
||||||
|
|
@ -109,25 +100,13 @@ impl Removal {
|
||||||
if set_readable(dir).unwrap_or(false) {
|
if set_readable(dir).unwrap_or(false) {
|
||||||
// Retry the operation; if we _just_ `self.rm_rf(dir)` and continue,
|
// Retry the operation; if we _just_ `self.rm_rf(dir)` and continue,
|
||||||
// `walkdir` may give us duplicate entries for the directory.
|
// `walkdir` may give us duplicate entries for the directory.
|
||||||
return self.rm_rf(path, reporter, skip_locked_file);
|
return self.rm_rf(path, reporter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
|
|
||||||
// Remove the exclusive lock last.
|
|
||||||
if skip_locked_file
|
|
||||||
&& entry.file_name() == ".lock"
|
|
||||||
&& entry
|
|
||||||
.path()
|
|
||||||
.strip_prefix(path)
|
|
||||||
.is_ok_and(|suffix| suffix == Path::new(".lock"))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry.file_type().is_symlink() && {
|
if entry.file_type().is_symlink() && {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
{
|
{
|
||||||
|
|
@ -142,11 +121,6 @@ impl Removal {
|
||||||
self.num_files += 1;
|
self.num_files += 1;
|
||||||
remove_dir(entry.path())?;
|
remove_dir(entry.path())?;
|
||||||
} else if entry.file_type().is_dir() {
|
} else if entry.file_type().is_dir() {
|
||||||
// Remove the directory with the exclusive lock last.
|
|
||||||
if skip_locked_file && entry.path() == path {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.num_dirs += 1;
|
self.num_dirs += 1;
|
||||||
|
|
||||||
// The contents should have been removed by now, but sometimes a race condition is
|
// The contents should have been removed by now, but sometimes a race condition is
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ pub enum WheelCache<'a> {
|
||||||
Path(&'a DisplaySafeUrl),
|
Path(&'a DisplaySafeUrl),
|
||||||
/// An editable dependency, which we key by URL.
|
/// An editable dependency, which we key by URL.
|
||||||
Editable(&'a DisplaySafeUrl),
|
Editable(&'a DisplaySafeUrl),
|
||||||
/// A Git dependency, which we key by URL (including LFS state), SHA.
|
/// A Git dependency, which we key by URL and SHA.
|
||||||
///
|
///
|
||||||
/// Note that this variant only exists for source distributions; wheels can't be delivered
|
/// Note that this variant only exists for source distributions; wheels can't be delivered
|
||||||
/// through Git.
|
/// through Git.
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cli"
|
name = "uv-cli"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "The command line interface for the uv binary."
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cli
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cli).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -366,7 +366,6 @@ pub fn resolver_options(
|
||||||
exclude_newer_package.unwrap_or_default(),
|
exclude_newer_package.unwrap_or_default(),
|
||||||
),
|
),
|
||||||
link_mode,
|
link_mode,
|
||||||
torch_backend: None,
|
|
||||||
no_build: flag(no_build, build, "build"),
|
no_build: flag(no_build, build, "build"),
|
||||||
no_build_package: Some(no_build_package),
|
no_build_package: Some(no_build_package),
|
||||||
no_binary: flag(no_binary, binary, "binary"),
|
no_binary: flag(no_binary, binary, "binary"),
|
||||||
|
|
@ -496,6 +495,5 @@ pub fn resolver_installer_options(
|
||||||
Some(no_binary_package)
|
Some(no_binary_package)
|
||||||
},
|
},
|
||||||
no_sources: if no_sources { Some(true) } else { None },
|
no_sources: if no_sources { Some(true) } else { None },
|
||||||
torch_backend: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-client"
|
name = "uv-client"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
|
||||||
homepage = { workspace = true }
|
|
||||||
repository = { workspace = true }
|
|
||||||
authors = { workspace = true }
|
|
||||||
license = { workspace = true }
|
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
@ -38,7 +32,6 @@ uv-version = { workspace = true }
|
||||||
uv-warnings = { workspace = true }
|
uv-warnings = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
astral-tl = { workspace = true }
|
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
async_http_range_reader = { workspace = true }
|
async_http_range_reader = { workspace = true }
|
||||||
async_zip = { workspace = true }
|
async_zip = { workspace = true }
|
||||||
|
|
@ -61,6 +54,7 @@ serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
sys-info = { workspace = true }
|
sys-info = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
|
tl = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
tokio-util = { workspace = true }
|
tokio-util = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
|
|
@ -72,9 +66,5 @@ http-body-util = { workspace = true }
|
||||||
hyper = { workspace = true }
|
hyper = { workspace = true }
|
||||||
hyper-util = { workspace = true }
|
hyper-util = { workspace = true }
|
||||||
insta = { workspace = true }
|
insta = { workspace = true }
|
||||||
rcgen = { workspace = true }
|
|
||||||
rustls = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
tokio-rustls = { workspace = true }
|
|
||||||
wiremock = { workspace = true }
|
wiremock = { workspace = true }
|
||||||
tempfile = { workspace = true }
|
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,5 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
# `pypi-client`
|
||||||
|
|
||||||
# uv-client
|
A general-use client for interacting with PyPI.
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
Loosely modeled after Orogene's `oro-client`.
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-client).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
|
||||||
|
|
@ -28,14 +28,13 @@ use tracing::{debug, trace};
|
||||||
use url::ParseError;
|
use url::ParseError;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_auth::{AuthMiddleware, Credentials, CredentialsCache, Indexes, PyxTokenStore};
|
use uv_auth::{AuthMiddleware, Credentials, Indexes, PyxTokenStore};
|
||||||
use uv_configuration::{KeyringProviderType, TrustedHost};
|
use uv_configuration::{KeyringProviderType, TrustedHost};
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_pep508::MarkerEnvironment;
|
use uv_pep508::MarkerEnvironment;
|
||||||
use uv_platform_tags::Platform;
|
use uv_platform_tags::Platform;
|
||||||
use uv_preview::Preview;
|
use uv_preview::Preview;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_redacted::DisplaySafeUrlError;
|
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
use uv_version::version;
|
use uv_version::version;
|
||||||
use uv_warnings::warn_user_once;
|
use uv_warnings::warn_user_once;
|
||||||
|
|
@ -45,12 +44,13 @@ use crate::middleware::OfflineMiddleware;
|
||||||
use crate::tls::read_identity;
|
use crate::tls::read_identity;
|
||||||
use crate::{Connectivity, WrappedReqwestError};
|
use crate::{Connectivity, WrappedReqwestError};
|
||||||
|
|
||||||
|
/// Do not use this value directly outside tests, use [`retries_from_env`] instead.
|
||||||
pub const DEFAULT_RETRIES: u32 = 3;
|
pub const DEFAULT_RETRIES: u32 = 3;
|
||||||
|
|
||||||
/// Maximum number of redirects to follow before giving up.
|
/// Maximum number of redirects to follow before giving up.
|
||||||
///
|
///
|
||||||
/// This is the default used by [`reqwest`].
|
/// This is the default used by [`reqwest`].
|
||||||
pub const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||||
|
|
||||||
/// Selectively skip parts or the entire auth middleware.
|
/// Selectively skip parts or the entire auth middleware.
|
||||||
#[derive(Debug, Clone, Copy, Default)]
|
#[derive(Debug, Clone, Copy, Default)]
|
||||||
|
|
@ -78,10 +78,8 @@ pub struct BaseClientBuilder<'a> {
|
||||||
markers: Option<&'a MarkerEnvironment>,
|
markers: Option<&'a MarkerEnvironment>,
|
||||||
platform: Option<&'a Platform>,
|
platform: Option<&'a Platform>,
|
||||||
auth_integration: AuthIntegration,
|
auth_integration: AuthIntegration,
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
|
||||||
credentials_cache: Arc<CredentialsCache>,
|
|
||||||
indexes: Indexes,
|
indexes: Indexes,
|
||||||
timeout: Duration,
|
default_timeout: Duration,
|
||||||
extra_middleware: Option<ExtraMiddleware>,
|
extra_middleware: Option<ExtraMiddleware>,
|
||||||
proxies: Vec<Proxy>,
|
proxies: Vec<Proxy>,
|
||||||
redirect_policy: RedirectPolicy,
|
redirect_policy: RedirectPolicy,
|
||||||
|
|
@ -91,8 +89,6 @@ pub struct BaseClientBuilder<'a> {
|
||||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
||||||
/// Optional custom reqwest client to use instead of creating a new one.
|
/// Optional custom reqwest client to use instead of creating a new one.
|
||||||
custom_client: Option<Client>,
|
custom_client: Option<Client>,
|
||||||
/// uv subcommand in which this client is being used
|
|
||||||
subcommand: Option<Vec<String>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The policy for handling HTTP redirects.
|
/// The policy for handling HTTP redirects.
|
||||||
|
|
@ -104,8 +100,6 @@ pub enum RedirectPolicy {
|
||||||
BypassMiddleware,
|
BypassMiddleware,
|
||||||
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
||||||
RetriggerMiddleware,
|
RetriggerMiddleware,
|
||||||
/// No redirect for non-cloneable (e.g., streaming) requests with custom redirect logic.
|
|
||||||
NoRedirect,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RedirectPolicy {
|
impl RedirectPolicy {
|
||||||
|
|
@ -113,7 +107,6 @@ impl RedirectPolicy {
|
||||||
match self {
|
match self {
|
||||||
Self::BypassMiddleware => reqwest::redirect::Policy::default(),
|
Self::BypassMiddleware => reqwest::redirect::Policy::default(),
|
||||||
Self::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
Self::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
||||||
Self::NoRedirect => reqwest::redirect::Policy::none(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -143,39 +136,33 @@ impl Default for BaseClientBuilder<'_> {
|
||||||
markers: None,
|
markers: None,
|
||||||
platform: None,
|
platform: None,
|
||||||
auth_integration: AuthIntegration::default(),
|
auth_integration: AuthIntegration::default(),
|
||||||
credentials_cache: Arc::new(CredentialsCache::default()),
|
|
||||||
indexes: Indexes::new(),
|
indexes: Indexes::new(),
|
||||||
timeout: Duration::from_secs(30),
|
default_timeout: Duration::from_secs(30),
|
||||||
extra_middleware: None,
|
extra_middleware: None,
|
||||||
proxies: vec![],
|
proxies: vec![],
|
||||||
redirect_policy: RedirectPolicy::default(),
|
redirect_policy: RedirectPolicy::default(),
|
||||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
||||||
custom_client: None,
|
custom_client: None,
|
||||||
subcommand: None,
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BaseClientBuilder<'_> {
|
||||||
|
pub fn new(
|
||||||
|
connectivity: Connectivity,
|
||||||
|
native_tls: bool,
|
||||||
|
allow_insecure_host: Vec<TrustedHost>,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
allow_insecure_host,
|
||||||
|
native_tls,
|
||||||
|
connectivity,
|
||||||
|
..Self::default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> BaseClientBuilder<'a> {
|
impl<'a> BaseClientBuilder<'a> {
|
||||||
pub fn new(
|
|
||||||
connectivity: Connectivity,
|
|
||||||
native_tls: bool,
|
|
||||||
allow_insecure_host: Vec<TrustedHost>,
|
|
||||||
preview: Preview,
|
|
||||||
timeout: Duration,
|
|
||||||
retries: u32,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
preview,
|
|
||||||
allow_insecure_host,
|
|
||||||
native_tls,
|
|
||||||
retries,
|
|
||||||
connectivity,
|
|
||||||
timeout,
|
|
||||||
..Self::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Use a custom reqwest client instead of creating a new one.
|
/// Use a custom reqwest client instead of creating a new one.
|
||||||
///
|
///
|
||||||
/// This allows you to provide your own reqwest client with custom configuration.
|
/// This allows you to provide your own reqwest client with custom configuration.
|
||||||
|
|
@ -211,6 +198,15 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise use the default
|
||||||
|
/// retries.
|
||||||
|
///
|
||||||
|
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||||
|
pub fn retries_from_env(mut self) -> Result<Self, RetryParsingError> {
|
||||||
|
self.retries = retries_from_env()?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||||
self.native_tls = native_tls;
|
self.native_tls = native_tls;
|
||||||
|
|
@ -248,8 +244,8 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
pub fn default_timeout(mut self, default_timeout: Duration) -> Self {
|
||||||
self.timeout = timeout;
|
self.default_timeout = default_timeout;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -283,26 +279,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn subcommand(mut self, subcommand: Vec<String>) -> Self {
|
|
||||||
self.subcommand = Some(subcommand);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
|
||||||
&self.credentials_cache
|
|
||||||
}
|
|
||||||
|
|
||||||
/// See [`CredentialsCache::store_credentials_from_url`].
|
|
||||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
|
||||||
self.credentials_cache.store_credentials_from_url(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// See [`CredentialsCache::store_credentials`].
|
|
||||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
|
||||||
self.credentials_cache.store_credentials(url, credentials);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_native_tls(&self) -> bool {
|
pub fn is_native_tls(&self) -> bool {
|
||||||
self.native_tls
|
self.native_tls
|
||||||
}
|
}
|
||||||
|
|
@ -312,7 +288,7 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a [`RetryPolicy`] for the client.
|
/// Create a [`RetryPolicy`] for the client.
|
||||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
fn retry_policy(&self) -> ExponentialBackoff {
|
||||||
let mut builder = ExponentialBackoff::builder();
|
let mut builder = ExponentialBackoff::builder();
|
||||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||||
|
|
@ -321,7 +297,21 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(&self) -> BaseClient {
|
pub fn build(&self) -> BaseClient {
|
||||||
let timeout = self.timeout;
|
// Timeout options, matching https://doc.rust-lang.org/nightly/cargo/reference/config.html#httptimeout
|
||||||
|
// `UV_REQUEST_TIMEOUT` is provided for backwards compatibility with v0.1.6
|
||||||
|
let timeout = env::var(EnvVars::UV_HTTP_TIMEOUT)
|
||||||
|
.or_else(|_| env::var(EnvVars::UV_REQUEST_TIMEOUT))
|
||||||
|
.or_else(|_| env::var(EnvVars::HTTP_TIMEOUT))
|
||||||
|
.and_then(|value| {
|
||||||
|
value.parse::<u64>()
|
||||||
|
.map(Duration::from_secs)
|
||||||
|
.or_else(|_| {
|
||||||
|
// On parse error, warn and use the default timeout
|
||||||
|
warn_user_once!("Ignoring invalid value from environment for `UV_HTTP_TIMEOUT`. Expected an integer number of seconds, got \"{value}\".");
|
||||||
|
Ok(self.default_timeout)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.unwrap_or(self.default_timeout);
|
||||||
debug!("Using request timeout of {}s", timeout.as_secs());
|
debug!("Using request timeout of {}s", timeout.as_secs());
|
||||||
|
|
||||||
// Use the custom client if provided, otherwise create a new one
|
// Use the custom client if provided, otherwise create a new one
|
||||||
|
|
@ -351,7 +341,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
dangerous_client,
|
dangerous_client,
|
||||||
raw_dangerous_client,
|
raw_dangerous_client,
|
||||||
timeout,
|
timeout,
|
||||||
credentials_cache: self.credentials_cache.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -378,7 +367,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
raw_client: existing.raw_client.clone(),
|
raw_client: existing.raw_client.clone(),
|
||||||
raw_dangerous_client: existing.raw_dangerous_client.clone(),
|
raw_dangerous_client: existing.raw_dangerous_client.clone(),
|
||||||
timeout: existing.timeout,
|
timeout: existing.timeout,
|
||||||
credentials_cache: existing.credentials_cache.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -387,14 +375,14 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
let mut user_agent_string = format!("uv/{}", version());
|
let mut user_agent_string = format!("uv/{}", version());
|
||||||
|
|
||||||
// Add linehaul metadata.
|
// Add linehaul metadata.
|
||||||
let linehaul = LineHaul::new(self.markers, self.platform, self.subcommand.clone());
|
if let Some(markers) = self.markers {
|
||||||
if let Ok(output) = serde_json::to_string(&linehaul) {
|
let linehaul = LineHaul::new(markers, self.platform);
|
||||||
let _ = write!(user_agent_string, " {output}");
|
if let Ok(output) = serde_json::to_string(&linehaul) {
|
||||||
|
let _ = write!(user_agent_string, " {output}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Checks for the presence of `SSL_CERT_FILE`.
|
// Check for the presence of an `SSL_CERT_FILE`.
|
||||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
|
||||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
|
||||||
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
||||||
let path_exists = Path::new(&path).exists();
|
let path_exists = Path::new(&path).exists();
|
||||||
if !path_exists {
|
if !path_exists {
|
||||||
|
|
@ -406,61 +394,11 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
path_exists
|
path_exists
|
||||||
});
|
});
|
||||||
|
|
||||||
// Checks for the presence of `SSL_CERT_DIR`.
|
|
||||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
|
||||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
|
||||||
let ssl_cert_dir_exists = env::var_os(EnvVars::SSL_CERT_DIR)
|
|
||||||
.filter(|v| !v.is_empty())
|
|
||||||
.is_some_and(|dirs| {
|
|
||||||
// Parse `SSL_CERT_DIR`, with support for multiple entries using
|
|
||||||
// a platform-specific delimiter (`:` on Unix, `;` on Windows)
|
|
||||||
let (existing, missing): (Vec<_>, Vec<_>) =
|
|
||||||
env::split_paths(&dirs).partition(|p| p.exists());
|
|
||||||
|
|
||||||
if existing.is_empty() {
|
|
||||||
let end_note = if missing.len() == 1 {
|
|
||||||
"The directory does not exist."
|
|
||||||
} else {
|
|
||||||
"The entries do not exist."
|
|
||||||
};
|
|
||||||
warn_user_once!(
|
|
||||||
"Ignoring invalid `SSL_CERT_DIR`. {end_note}: {}.",
|
|
||||||
missing
|
|
||||||
.iter()
|
|
||||||
.map(Simplified::simplified_display)
|
|
||||||
.join(", ")
|
|
||||||
.cyan()
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Warn on any missing entries
|
|
||||||
if !missing.is_empty() {
|
|
||||||
let end_note = if missing.len() == 1 {
|
|
||||||
"The following directory does not exist:"
|
|
||||||
} else {
|
|
||||||
"The following entries do not exist:"
|
|
||||||
};
|
|
||||||
warn_user_once!(
|
|
||||||
"Invalid entries in `SSL_CERT_DIR`. {end_note}: {}.",
|
|
||||||
missing
|
|
||||||
.iter()
|
|
||||||
.map(Simplified::simplified_display)
|
|
||||||
.join(", ")
|
|
||||||
.cyan()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Proceed while ignoring missing entries
|
|
||||||
true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create a secure client that validates certificates.
|
// Create a secure client that validates certificates.
|
||||||
let raw_client = self.create_client(
|
let raw_client = self.create_client(
|
||||||
&user_agent_string,
|
&user_agent_string,
|
||||||
timeout,
|
timeout,
|
||||||
ssl_cert_file_exists,
|
ssl_cert_file_exists,
|
||||||
ssl_cert_dir_exists,
|
|
||||||
Security::Secure,
|
Security::Secure,
|
||||||
self.redirect_policy,
|
self.redirect_policy,
|
||||||
);
|
);
|
||||||
|
|
@ -470,7 +408,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
&user_agent_string,
|
&user_agent_string,
|
||||||
timeout,
|
timeout,
|
||||||
ssl_cert_file_exists,
|
ssl_cert_file_exists,
|
||||||
ssl_cert_dir_exists,
|
|
||||||
Security::Insecure,
|
Security::Insecure,
|
||||||
self.redirect_policy,
|
self.redirect_policy,
|
||||||
);
|
);
|
||||||
|
|
@ -483,7 +420,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
user_agent: &str,
|
user_agent: &str,
|
||||||
timeout: Duration,
|
timeout: Duration,
|
||||||
ssl_cert_file_exists: bool,
|
ssl_cert_file_exists: bool,
|
||||||
ssl_cert_dir_exists: bool,
|
|
||||||
security: Security,
|
security: Security,
|
||||||
redirect_policy: RedirectPolicy,
|
redirect_policy: RedirectPolicy,
|
||||||
) -> Client {
|
) -> Client {
|
||||||
|
|
@ -502,7 +438,7 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
Security::Insecure => client_builder.danger_accept_invalid_certs(true),
|
Security::Insecure => client_builder.danger_accept_invalid_certs(true),
|
||||||
};
|
};
|
||||||
|
|
||||||
let client_builder = if self.native_tls || ssl_cert_file_exists || ssl_cert_dir_exists {
|
let client_builder = if self.native_tls || ssl_cert_file_exists {
|
||||||
client_builder.tls_built_in_native_certs(true)
|
client_builder.tls_built_in_native_certs(true)
|
||||||
} else {
|
} else {
|
||||||
client_builder.tls_built_in_webpki_certs(true)
|
client_builder.tls_built_in_webpki_certs(true)
|
||||||
|
|
@ -583,7 +519,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
match self.auth_integration {
|
match self.auth_integration {
|
||||||
AuthIntegration::Default => {
|
AuthIntegration::Default => {
|
||||||
let mut auth_middleware = AuthMiddleware::new()
|
let mut auth_middleware = AuthMiddleware::new()
|
||||||
.with_cache_arc(self.credentials_cache.clone())
|
|
||||||
.with_base_client(base_client)
|
.with_base_client(base_client)
|
||||||
.with_indexes(self.indexes.clone())
|
.with_indexes(self.indexes.clone())
|
||||||
.with_keyring(self.keyring.to_provider())
|
.with_keyring(self.keyring.to_provider())
|
||||||
|
|
@ -595,7 +530,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
AuthIntegration::OnlyAuthenticated => {
|
AuthIntegration::OnlyAuthenticated => {
|
||||||
let mut auth_middleware = AuthMiddleware::new()
|
let mut auth_middleware = AuthMiddleware::new()
|
||||||
.with_cache_arc(self.credentials_cache.clone())
|
|
||||||
.with_base_client(base_client)
|
.with_base_client(base_client)
|
||||||
.with_indexes(self.indexes.clone())
|
.with_indexes(self.indexes.clone())
|
||||||
.with_keyring(self.keyring.to_provider())
|
.with_keyring(self.keyring.to_provider())
|
||||||
|
|
@ -639,8 +573,6 @@ pub struct BaseClient {
|
||||||
allow_insecure_host: Vec<TrustedHost>,
|
allow_insecure_host: Vec<TrustedHost>,
|
||||||
/// The number of retries to attempt on transient errors.
|
/// The number of retries to attempt on transient errors.
|
||||||
retries: u32,
|
retries: u32,
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
|
||||||
credentials_cache: Arc<CredentialsCache>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
|
@ -663,7 +595,7 @@ impl BaseClient {
|
||||||
|
|
||||||
/// Executes a request, applying redirect policy.
|
/// Executes a request, applying redirect policy.
|
||||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||||
let client = self.for_host(&DisplaySafeUrl::from_url(req.url().clone()));
|
let client = self.for_host(&DisplaySafeUrl::from(req.url().clone()));
|
||||||
client.execute(req).await
|
client.execute(req).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -686,15 +618,7 @@ impl BaseClient {
|
||||||
|
|
||||||
/// The [`RetryPolicy`] for the client.
|
/// The [`RetryPolicy`] for the client.
|
||||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
pub fn retry_policy(&self) -> ExponentialBackoff {
|
||||||
let mut builder = ExponentialBackoff::builder();
|
ExponentialBackoff::builder().build_with_max_retries(self.retries)
|
||||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
|
||||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
|
||||||
}
|
|
||||||
builder.build_with_max_retries(self.retries)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
|
||||||
&self.credentials_cache
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -732,7 +656,6 @@ impl RedirectClientWithMiddleware {
|
||||||
match self.redirect_policy {
|
match self.redirect_policy {
|
||||||
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
||||||
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
||||||
RedirectPolicy::NoRedirect => self.client.execute(req).await,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -798,7 +721,7 @@ fn request_into_redirect(
|
||||||
res: &Response,
|
res: &Response,
|
||||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||||
) -> reqwest_middleware::Result<Option<Request>> {
|
) -> reqwest_middleware::Result<Option<Request>> {
|
||||||
let original_req_url = DisplaySafeUrl::from_url(req.url().clone());
|
let original_req_url = DisplaySafeUrl::from(req.url().clone());
|
||||||
let status = res.status();
|
let status = res.status();
|
||||||
let should_redirect = match status {
|
let should_redirect = match status {
|
||||||
StatusCode::MOVED_PERMANENTLY
|
StatusCode::MOVED_PERMANENTLY
|
||||||
|
|
@ -851,7 +774,7 @@ fn request_into_redirect(
|
||||||
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
||||||
Ok(url) => url,
|
Ok(url) => url,
|
||||||
// Per RFC 7231, URLs should be resolved against the request URL.
|
// Per RFC 7231, URLs should be resolved against the request URL.
|
||||||
Err(DisplaySafeUrlError::Url(ParseError::RelativeUrlWithoutBase)) => original_req_url.join(location).map_err(|err| {
|
Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| {
|
||||||
reqwest_middleware::Error::Middleware(anyhow!(
|
reqwest_middleware::Error::Middleware(anyhow!(
|
||||||
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
||||||
))
|
))
|
||||||
|
|
@ -1104,12 +1027,12 @@ pub fn is_transient_network_error(err: &(dyn Error + 'static)) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
trace!("Cannot retry nested reqwest error");
|
trace!("Cannot retry nested reqwest error");
|
||||||
} else if source.downcast_ref::<h2::Error>().is_some() {
|
} else if let Some(io_err) = source.downcast_ref::<io::Error>().or_else(|| {
|
||||||
// All h2 errors look like errors that should be retried
|
// h2 may hide an IO error inside.
|
||||||
// https://github.com/astral-sh/uv/issues/15916
|
source
|
||||||
trace!("Retrying nested h2 error");
|
.downcast_ref::<h2::Error>()
|
||||||
return true;
|
.and_then(|err| err.get_io())
|
||||||
} else if let Some(io_err) = source.downcast_ref::<io::Error>() {
|
}) {
|
||||||
has_known_error = true;
|
has_known_error = true;
|
||||||
let retryable_io_err_kinds = [
|
let retryable_io_err_kinds = [
|
||||||
// https://github.com/astral-sh/uv/issues/12054
|
// https://github.com/astral-sh/uv/issues/12054
|
||||||
|
|
@ -1176,6 +1099,19 @@ pub enum RetryParsingError {
|
||||||
ParseInt(#[from] ParseIntError),
|
ParseInt(#[from] ParseIntError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change.
|
||||||
|
///
|
||||||
|
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||||
|
pub fn retries_from_env() -> Result<u32, RetryParsingError> {
|
||||||
|
// TODO(zanieb): We should probably parse this in another layer, but there's not a natural
|
||||||
|
// fit for it right now
|
||||||
|
if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) {
|
||||||
|
Ok(value.to_string_lossy().as_ref().parse::<u32>()?)
|
||||||
|
} else {
|
||||||
|
Ok(DEFAULT_RETRIES)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
|
||||||
|
|
@ -15,32 +15,12 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::BaseClient;
|
use crate::BaseClient;
|
||||||
use crate::base_client::is_transient_network_error;
|
use crate::base_client::is_transient_network_error;
|
||||||
use crate::error::ProblemDetails;
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Error, ErrorKind,
|
Error, ErrorKind,
|
||||||
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
|
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
|
||||||
rkyvutil::OwnedArchive,
|
rkyvutil::OwnedArchive,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Extract problem details from an HTTP response if it has the correct content type
|
|
||||||
///
|
|
||||||
/// Note: This consumes the response body, so it should only be called when there's an error status.
|
|
||||||
async fn extract_problem_details(response: Response) -> Option<ProblemDetails> {
|
|
||||||
match response.bytes().await {
|
|
||||||
Ok(bytes) => match serde_json::from_slice(&bytes) {
|
|
||||||
Ok(details) => Some(details),
|
|
||||||
Err(err) => {
|
|
||||||
warn!("Failed to parse problem details: {err}");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(err) => {
|
|
||||||
warn!("Failed to read response body for problem details: {err}");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A trait the generalizes (de)serialization at a high level.
|
/// A trait the generalizes (de)serialization at a high level.
|
||||||
///
|
///
|
||||||
/// The main purpose of this trait is to make the `CachedClient` work for
|
/// The main purpose of this trait is to make the `CachedClient` work for
|
||||||
|
|
@ -557,36 +537,16 @@ impl CachedClient {
|
||||||
cached: DataWithCachePolicy,
|
cached: DataWithCachePolicy,
|
||||||
new_cache_policy_builder: CachePolicyBuilder,
|
new_cache_policy_builder: CachePolicyBuilder,
|
||||||
) -> Result<CachedResponse, Error> {
|
) -> Result<CachedResponse, Error> {
|
||||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
let url = DisplaySafeUrl::from(req.url().clone());
|
||||||
debug!("Sending revalidation request for: {url}");
|
debug!("Sending revalidation request for: {url}");
|
||||||
let mut response = self
|
let mut response = self
|
||||||
.0
|
.0
|
||||||
.execute(req)
|
.execute(req)
|
||||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||||
.await
|
.await
|
||||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||||
|
.error_for_status()
|
||||||
// Check for HTTP error status and extract problem details if available
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
if let Err(status_error) = response.error_for_status_ref() {
|
|
||||||
// Clone the response to extract problem details before the error consumes it
|
|
||||||
let problem_details = if response
|
|
||||||
.headers()
|
|
||||||
.get("content-type")
|
|
||||||
.and_then(|ct| ct.to_str().ok())
|
|
||||||
.map(|ct| ct == "application/problem+json")
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
extract_problem_details(response).await
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
return Err(ErrorKind::from_reqwest_with_problem_details(
|
|
||||||
url.clone(),
|
|
||||||
status_error,
|
|
||||||
problem_details,
|
|
||||||
)
|
|
||||||
.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the user set a custom `Cache-Control` header, override it.
|
// If the user set a custom `Cache-Control` header, override it.
|
||||||
if let CacheControl::Override(header) = cache_control {
|
if let CacheControl::Override(header) = cache_control {
|
||||||
|
|
@ -627,7 +587,7 @@ impl CachedClient {
|
||||||
req: Request,
|
req: Request,
|
||||||
cache_control: CacheControl<'_>,
|
cache_control: CacheControl<'_>,
|
||||||
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
||||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
let url = DisplaySafeUrl::from(req.url().clone());
|
||||||
trace!("Sending fresh {} request for {}", req.method(), url);
|
trace!("Sending fresh {} request for {}", req.method(), url);
|
||||||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||||
let mut response = self
|
let mut response = self
|
||||||
|
|
@ -651,25 +611,9 @@ impl CachedClient {
|
||||||
.map(|retries| retries.value());
|
.map(|retries| retries.value());
|
||||||
|
|
||||||
if let Err(status_error) = response.error_for_status_ref() {
|
if let Err(status_error) = response.error_for_status_ref() {
|
||||||
let problem_details = if response
|
|
||||||
.headers()
|
|
||||||
.get("content-type")
|
|
||||||
.and_then(|ct| ct.to_str().ok())
|
|
||||||
.map(|ct| ct.starts_with("application/problem+json"))
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
extract_problem_details(response).await
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
return Err(CachedClientError::<Error>::Client {
|
return Err(CachedClientError::<Error>::Client {
|
||||||
retries: retry_count,
|
retries: retry_count,
|
||||||
err: ErrorKind::from_reqwest_with_problem_details(
|
err: ErrorKind::from_reqwest(url, status_error).into(),
|
||||||
url,
|
|
||||||
status_error,
|
|
||||||
problem_details,
|
|
||||||
)
|
|
||||||
.into(),
|
|
||||||
}
|
}
|
||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
|
|
@ -743,15 +687,13 @@ impl CachedClient {
|
||||||
let total_retries = past_retries + middleware_retries;
|
let total_retries = past_retries + middleware_retries;
|
||||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||||
|
debug!(
|
||||||
|
"Transient failure while handling response from {}; retrying...",
|
||||||
|
req.url(),
|
||||||
|
);
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
|
|
||||||
debug!(
|
|
||||||
"Transient failure while handling response from {}; retrying after {:.1}s...",
|
|
||||||
req.url(),
|
|
||||||
duration.as_secs_f32(),
|
|
||||||
);
|
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
past_retries += 1;
|
past_retries += 1;
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -803,14 +745,13 @@ impl CachedClient {
|
||||||
let total_retries = past_retries + middleware_retries;
|
let total_retries = past_retries + middleware_retries;
|
||||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||||
|
debug!(
|
||||||
|
"Transient failure while handling response from {}; retrying...",
|
||||||
|
req.url(),
|
||||||
|
);
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
debug!(
|
|
||||||
"Transient failure while handling response from {}; retrying after {}s...",
|
|
||||||
req.url(),
|
|
||||||
duration.as_secs(),
|
|
||||||
);
|
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
past_retries += 1;
|
past_retries += 1;
|
||||||
continue;
|
continue;
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,9 @@
|
||||||
use async_http_range_reader::AsyncHttpRangeReaderError;
|
|
||||||
use async_zip::error::ZipError;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use uv_cache::Error as CacheError;
|
use async_http_range_reader::AsyncHttpRangeReaderError;
|
||||||
|
use async_zip::error::ZipError;
|
||||||
|
|
||||||
use uv_distribution_filename::{WheelFilename, WheelFilenameError};
|
use uv_distribution_filename::{WheelFilename, WheelFilenameError};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
@ -13,61 +11,6 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
use crate::middleware::OfflineError;
|
use crate::middleware::OfflineError;
|
||||||
use crate::{FlatIndexError, html};
|
use crate::{FlatIndexError, html};
|
||||||
|
|
||||||
/// RFC 9457 Problem Details for HTTP APIs
|
|
||||||
///
|
|
||||||
/// This structure represents the standard format for machine-readable details
|
|
||||||
/// of errors in HTTP response bodies as defined in RFC 9457.
|
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
|
||||||
pub struct ProblemDetails {
|
|
||||||
/// A URI reference that identifies the problem type.
|
|
||||||
/// When dereferenced, it SHOULD provide human-readable documentation for the problem type.
|
|
||||||
#[serde(rename = "type", default = "default_problem_type")]
|
|
||||||
pub problem_type: String,
|
|
||||||
|
|
||||||
/// A short, human-readable summary of the problem type.
|
|
||||||
pub title: Option<String>,
|
|
||||||
|
|
||||||
/// The HTTP status code generated by the origin server for this occurrence of the problem.
|
|
||||||
pub status: Option<u16>,
|
|
||||||
|
|
||||||
/// A human-readable explanation specific to this occurrence of the problem.
|
|
||||||
pub detail: Option<String>,
|
|
||||||
|
|
||||||
/// A URI reference that identifies the specific occurrence of the problem.
|
|
||||||
pub instance: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Default problem type URI as per RFC 9457
|
|
||||||
#[inline]
|
|
||||||
fn default_problem_type() -> String {
|
|
||||||
"about:blank".to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ProblemDetails {
|
|
||||||
/// Get a human-readable description of the problem
|
|
||||||
pub fn description(&self) -> Option<String> {
|
|
||||||
match self {
|
|
||||||
Self {
|
|
||||||
title: Some(title),
|
|
||||||
detail: Some(detail),
|
|
||||||
..
|
|
||||||
} => Some(format!("Server message: {title}, {detail}")),
|
|
||||||
Self {
|
|
||||||
title: Some(title), ..
|
|
||||||
} => Some(format!("Server message: {title}")),
|
|
||||||
Self {
|
|
||||||
detail: Some(detail),
|
|
||||||
..
|
|
||||||
} => Some(format!("Server message: {detail}")),
|
|
||||||
Self {
|
|
||||||
status: Some(status),
|
|
||||||
..
|
|
||||||
} => Some(format!("HTTP error {status}")),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Error {
|
pub struct Error {
|
||||||
kind: Box<ErrorKind>,
|
kind: Box<ErrorKind>,
|
||||||
|
|
@ -79,9 +22,8 @@ impl Display for Error {
|
||||||
if self.retries > 0 {
|
if self.retries > 0 {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"Request failed after {retries} {subject}",
|
"Request failed after {retries} retries",
|
||||||
retries = self.retries,
|
retries = self.retries
|
||||||
subject = if self.retries > 1 { "retries" } else { "retry" }
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
Display::fmt(&self.kind, f)
|
Display::fmt(&self.kind, f)
|
||||||
|
|
@ -273,15 +215,11 @@ pub enum ErrorKind {
|
||||||
/// Make sure the package name is spelled correctly and that you've
|
/// Make sure the package name is spelled correctly and that you've
|
||||||
/// configured the right registry to fetch it from.
|
/// configured the right registry to fetch it from.
|
||||||
#[error("Package `{0}` was not found in the registry")]
|
#[error("Package `{0}` was not found in the registry")]
|
||||||
RemotePackageNotFound(PackageName),
|
PackageNotFound(String),
|
||||||
|
|
||||||
/// The package was not found in the local (file-based) index.
|
/// The package was not found in the local (file-based) index.
|
||||||
#[error("Package `{0}` was not found in the local index")]
|
#[error("Package `{0}` was not found in the local index")]
|
||||||
LocalPackageNotFound(PackageName),
|
FileNotFound(String),
|
||||||
|
|
||||||
/// The root was not found in the local (file-based) index.
|
|
||||||
#[error("Local index not found at: `{}`", _0.display())]
|
|
||||||
LocalIndexNotFound(PathBuf),
|
|
||||||
|
|
||||||
/// The metadata file could not be parsed.
|
/// The metadata file could not be parsed.
|
||||||
#[error("Couldn't parse metadata of {0} from {1}")]
|
#[error("Couldn't parse metadata of {0} from {1}")]
|
||||||
|
|
@ -291,12 +229,16 @@ pub enum ErrorKind {
|
||||||
#[source] Box<uv_pypi_types::MetadataError>,
|
#[source] Box<uv_pypi_types::MetadataError>,
|
||||||
),
|
),
|
||||||
|
|
||||||
|
/// The metadata file was not found in the wheel.
|
||||||
|
#[error("Metadata file `{0}` was not found in {1}")]
|
||||||
|
MetadataNotFound(WheelFilename, String),
|
||||||
|
|
||||||
/// An error that happened while making a request or in a reqwest middleware.
|
/// An error that happened while making a request or in a reqwest middleware.
|
||||||
#[error("Failed to fetch: `{0}`")]
|
#[error("Failed to fetch: `{0}`")]
|
||||||
WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError),
|
WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError),
|
||||||
|
|
||||||
/// Add the number of failed retries to the error.
|
/// Add the number of failed retries to the error.
|
||||||
#[error("Request failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
#[error("Request failed after {retries} retries")]
|
||||||
RequestWithRetries {
|
RequestWithRetries {
|
||||||
source: Box<ErrorKind>,
|
source: Box<ErrorKind>,
|
||||||
retries: u32,
|
retries: u32,
|
||||||
|
|
@ -338,9 +280,6 @@ pub enum ErrorKind {
|
||||||
#[error("Failed to write to the client cache")]
|
#[error("Failed to write to the client cache")]
|
||||||
CacheWrite(#[source] std::io::Error),
|
CacheWrite(#[source] std::io::Error),
|
||||||
|
|
||||||
#[error("Failed to acquire lock on the client cache")]
|
|
||||||
CacheLock(#[source] CacheError),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Io(std::io::Error),
|
Io(std::io::Error),
|
||||||
|
|
||||||
|
|
@ -391,19 +330,7 @@ impl ErrorKind {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Self::WrappedReqwestError(url, WrappedReqwestError::from(err))
|
Self::WrappedReqwestError(url, WrappedReqwestError(err))
|
||||||
}
|
|
||||||
|
|
||||||
/// Create an [`ErrorKind`] from a [`reqwest::Error`] with problem details.
|
|
||||||
pub(crate) fn from_reqwest_with_problem_details(
|
|
||||||
url: DisplaySafeUrl,
|
|
||||||
error: reqwest::Error,
|
|
||||||
problem_details: Option<ProblemDetails>,
|
|
||||||
) -> Self {
|
|
||||||
Self::WrappedReqwestError(
|
|
||||||
url,
|
|
||||||
WrappedReqwestError::with_problem_details(error.into(), problem_details),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -413,26 +340,12 @@ impl ErrorKind {
|
||||||
/// Wraps a [`reqwest_middleware::Error`] instead of an [`reqwest::Error`] since the actual reqwest
|
/// Wraps a [`reqwest_middleware::Error`] instead of an [`reqwest::Error`] since the actual reqwest
|
||||||
/// error may be below some context in the [`anyhow::Error`].
|
/// error may be below some context in the [`anyhow::Error`].
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct WrappedReqwestError {
|
pub struct WrappedReqwestError(reqwest_middleware::Error);
|
||||||
error: reqwest_middleware::Error,
|
|
||||||
problem_details: Option<Box<ProblemDetails>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WrappedReqwestError {
|
impl WrappedReqwestError {
|
||||||
/// Create a new `WrappedReqwestError` with optional problem details
|
|
||||||
pub fn with_problem_details(
|
|
||||||
error: reqwest_middleware::Error,
|
|
||||||
problem_details: Option<ProblemDetails>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
error,
|
|
||||||
problem_details: problem_details.map(Box::new),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the inner [`reqwest::Error`] from the error chain, if it exists.
|
/// Return the inner [`reqwest::Error`] from the error chain, if it exists.
|
||||||
fn inner(&self) -> Option<&reqwest::Error> {
|
fn inner(&self) -> Option<&reqwest::Error> {
|
||||||
match &self.error {
|
match &self.0 {
|
||||||
reqwest_middleware::Error::Reqwest(err) => Some(err),
|
reqwest_middleware::Error::Reqwest(err) => Some(err),
|
||||||
reqwest_middleware::Error::Middleware(err) => err.chain().find_map(|err| {
|
reqwest_middleware::Error::Middleware(err) => err.chain().find_map(|err| {
|
||||||
if let Some(err) = err.downcast_ref::<reqwest::Error>() {
|
if let Some(err) = err.downcast_ref::<reqwest::Error>() {
|
||||||
|
|
@ -494,19 +407,13 @@ impl WrappedReqwestError {
|
||||||
|
|
||||||
impl From<reqwest::Error> for WrappedReqwestError {
|
impl From<reqwest::Error> for WrappedReqwestError {
|
||||||
fn from(error: reqwest::Error) -> Self {
|
fn from(error: reqwest::Error) -> Self {
|
||||||
Self {
|
Self(error.into())
|
||||||
error: error.into(),
|
|
||||||
problem_details: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<reqwest_middleware::Error> for WrappedReqwestError {
|
impl From<reqwest_middleware::Error> for WrappedReqwestError {
|
||||||
fn from(error: reqwest_middleware::Error) -> Self {
|
fn from(error: reqwest_middleware::Error) -> Self {
|
||||||
Self {
|
Self(error)
|
||||||
error,
|
|
||||||
problem_details: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -514,7 +421,7 @@ impl Deref for WrappedReqwestError {
|
||||||
type Target = reqwest_middleware::Error;
|
type Target = reqwest_middleware::Error;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.error
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -523,15 +430,9 @@ impl Display for WrappedReqwestError {
|
||||||
if self.is_likely_offline() {
|
if self.is_likely_offline() {
|
||||||
// Insert an extra hint, we'll show the wrapped error through `source`
|
// Insert an extra hint, we'll show the wrapped error through `source`
|
||||||
f.write_str("Could not connect, are you offline?")
|
f.write_str("Could not connect, are you offline?")
|
||||||
} else if let Some(problem_details) = &self.problem_details {
|
|
||||||
// Show problem details if available
|
|
||||||
match problem_details.description() {
|
|
||||||
None => Display::fmt(&self.error, f),
|
|
||||||
Some(message) => f.write_str(&message),
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Show the wrapped error
|
// Show the wrapped error
|
||||||
Display::fmt(&self.error, f)
|
Display::fmt(&self.0, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -540,117 +441,10 @@ impl std::error::Error for WrappedReqwestError {
|
||||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
if self.is_likely_offline() {
|
if self.is_likely_offline() {
|
||||||
// `Display` is inserting an extra message, so we need to show the wrapped error
|
// `Display` is inserting an extra message, so we need to show the wrapped error
|
||||||
Some(&self.error)
|
Some(&self.0)
|
||||||
} else if self.problem_details.is_some() {
|
|
||||||
// `Display` is showing problem details, so show the wrapped error as source
|
|
||||||
Some(&self.error)
|
|
||||||
} else {
|
} else {
|
||||||
// `Display` is showing the wrapped error, continue with its source
|
// `Display` is showing the wrapped error, continue with its source
|
||||||
self.error.source()
|
self.0.source()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_parsing() {
|
|
||||||
let json = r#"{
|
|
||||||
"type": "https://example.com/probs/out-of-credit",
|
|
||||||
"title": "You do not have enough credit.",
|
|
||||||
"detail": "Your current balance is 30, but that costs 50.",
|
|
||||||
"status": 403,
|
|
||||||
"instance": "/account/12345/msgs/abc"
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.problem_type,
|
|
||||||
"https://example.com/probs/out-of-credit"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.title,
|
|
||||||
Some("You do not have enough credit.".to_string())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.detail,
|
|
||||||
Some("Your current balance is 30, but that costs 50.".to_string())
|
|
||||||
);
|
|
||||||
assert_eq!(problem_details.status, Some(403));
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.instance,
|
|
||||||
Some("/account/12345/msgs/abc".to_string())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_default_type() {
|
|
||||||
let json = r#"{
|
|
||||||
"detail": "Something went wrong",
|
|
||||||
"status": 500
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(problem_details.problem_type, "about:blank");
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.detail,
|
|
||||||
Some("Something went wrong".to_string())
|
|
||||||
);
|
|
||||||
assert_eq!(problem_details.status, Some(500));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_description() {
|
|
||||||
let json = r#"{
|
|
||||||
"detail": "Detailed error message",
|
|
||||||
"title": "Error Title",
|
|
||||||
"status": 400
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.description().unwrap(),
|
|
||||||
"Server message: Error Title, Detailed error message"
|
|
||||||
);
|
|
||||||
|
|
||||||
let json_no_detail = r#"{
|
|
||||||
"title": "Error Title",
|
|
||||||
"status": 400
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails =
|
|
||||||
serde_json::from_slice(json_no_detail.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.description().unwrap(),
|
|
||||||
"Server message: Error Title"
|
|
||||||
);
|
|
||||||
|
|
||||||
let json_minimal = r#"{
|
|
||||||
"status": 400
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails =
|
|
||||||
serde_json::from_slice(json_minimal.as_bytes()).unwrap();
|
|
||||||
assert_eq!(problem_details.description().unwrap(), "HTTP error 400");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_with_extensions() {
|
|
||||||
let json = r#"{
|
|
||||||
"type": "https://example.com/probs/out-of-credit",
|
|
||||||
"title": "You do not have enough credit.",
|
|
||||||
"detail": "Your current balance is 30, but that costs 50.",
|
|
||||||
"status": 403,
|
|
||||||
"balance": 30,
|
|
||||||
"accounts": ["/account/12345", "/account/67890"]
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.title,
|
|
||||||
Some("You do not have enough credit.".to_string())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_small_str::SmallString;
|
use uv_small_str::SmallString;
|
||||||
|
|
||||||
use crate::cached_client::{CacheControl, CachedClientError};
|
use crate::cached_client::{CacheControl, CachedClientError};
|
||||||
use crate::html::SimpleDetailHTML;
|
use crate::html::SimpleHtml;
|
||||||
use crate::{CachedClient, Connectivity, Error, ErrorKind, OwnedArchive};
|
use crate::{CachedClient, Connectivity, Error, ErrorKind, OwnedArchive};
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
|
@ -189,13 +189,13 @@ impl<'a> FlatIndexClient<'a> {
|
||||||
async {
|
async {
|
||||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||||
// This ensures that we handle redirects and other URL transformations correctly.
|
// This ensures that we handle redirects and other URL transformations correctly.
|
||||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
let url = DisplaySafeUrl::from(response.url().clone());
|
||||||
|
|
||||||
let text = response
|
let text = response
|
||||||
.text()
|
.text()
|
||||||
.await
|
.await
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(&text, &url)
|
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||||
|
|
||||||
// Convert to a reference-counted string.
|
// Convert to a reference-counted string.
|
||||||
|
|
@ -321,63 +321,6 @@ impl<'a> FlatIndexClient<'a> {
|
||||||
index: flat_index.clone(),
|
index: flat_index.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
dists.sort_by(|a, b| {
|
|
||||||
a.filename
|
|
||||||
.cmp(&b.filename)
|
|
||||||
.then_with(|| a.index.cmp(&b.index))
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(FlatIndexEntries::from_entries(dists))
|
Ok(FlatIndexEntries::from_entries(dists))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use fs_err::File;
|
|
||||||
use std::io::Write;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_from_directory_sorts_distributions() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
|
|
||||||
let filenames = [
|
|
||||||
"beta-2.0.0-py3-none-any.whl",
|
|
||||||
"alpha-1.0.0.tar.gz",
|
|
||||||
"alpha-1.0.0-py3-none-any.whl",
|
|
||||||
];
|
|
||||||
|
|
||||||
for name in &filenames {
|
|
||||||
let mut file = File::create(dir.path().join(name)).unwrap();
|
|
||||||
file.write_all(b"").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let entries = FlatIndexClient::read_from_directory(
|
|
||||||
dir.path(),
|
|
||||||
&IndexUrl::parse(&dir.path().to_string_lossy(), None).unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let actual = entries
|
|
||||||
.entries
|
|
||||||
.iter()
|
|
||||||
.map(|entry| entry.filename.to_string())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut expected = filenames
|
|
||||||
.iter()
|
|
||||||
.map(|name| DistFilename::try_from_normalized_filename(name).unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
expected.sort();
|
|
||||||
|
|
||||||
let expected = expected
|
|
||||||
.into_iter()
|
|
||||||
.map(|filename| filename.to_string())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -3,32 +3,32 @@ use std::str::FromStr;
|
||||||
use jiff::Timestamp;
|
use jiff::Timestamp;
|
||||||
use tl::HTMLTag;
|
use tl::HTMLTag;
|
||||||
use tracing::{debug, instrument, warn};
|
use tracing::{debug, instrument, warn};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
use uv_normalize::PackageName;
|
|
||||||
use uv_pep440::VersionSpecifiers;
|
use uv_pep440::VersionSpecifiers;
|
||||||
use uv_pypi_types::{BaseUrl, CoreMetadata, Hashes, PypiFile, Yanked};
|
use uv_pypi_types::{BaseUrl, CoreMetadata, Hashes, PypiFile, Yanked};
|
||||||
use uv_pypi_types::{HashError, LenientVersionSpecifiers};
|
use uv_pypi_types::{HashError, LenientVersionSpecifiers};
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
/// A parsed structure from PyPI "HTML" index format for a single package.
|
/// A parsed structure from PyPI "HTML" index format for a single package.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct SimpleDetailHTML {
|
pub(crate) struct SimpleHtml {
|
||||||
/// The [`BaseUrl`] to which all relative URLs should be resolved.
|
/// The [`BaseUrl`] to which all relative URLs should be resolved.
|
||||||
pub(crate) base: BaseUrl,
|
pub(crate) base: BaseUrl,
|
||||||
/// The list of [`PypiFile`]s available for download sorted by filename.
|
/// The list of [`PypiFile`]s available for download sorted by filename.
|
||||||
pub(crate) files: Vec<PypiFile>,
|
pub(crate) files: Vec<PypiFile>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleDetailHTML {
|
impl SimpleHtml {
|
||||||
/// Parse the list of [`PypiFile`]s from the simple HTML page returned by the given URL.
|
/// Parse the list of [`PypiFile`]s from the simple HTML page returned by the given URL.
|
||||||
#[instrument(skip_all, fields(url = % url))]
|
#[instrument(skip_all, fields(url = % url))]
|
||||||
pub(crate) fn parse(text: &str, url: &DisplaySafeUrl) -> Result<Self, Error> {
|
pub(crate) fn parse(text: &str, url: &Url) -> Result<Self, Error> {
|
||||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
||||||
|
|
||||||
// Parse the first `<base>` tag, if any, to determine the base URL to which all
|
// Parse the first `<base>` tag, if any, to determine the base URL to which all
|
||||||
// relative URLs should be resolved. The HTML spec requires that the `<base>` tag
|
// relative URLs should be resolved. The HTML spec requires that the `<base>` tag
|
||||||
// appear before other tags with attribute values of URLs.
|
// appear before other tags with attribute values of URLs.
|
||||||
let base = BaseUrl::from(
|
let base = BaseUrl::from(DisplaySafeUrl::from(
|
||||||
dom.nodes()
|
dom.nodes()
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|node| node.as_tag())
|
.filter_map(|node| node.as_tag())
|
||||||
|
|
@ -38,7 +38,7 @@ impl SimpleDetailHTML {
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.flatten()
|
.flatten()
|
||||||
.unwrap_or_else(|| url.clone()),
|
.unwrap_or_else(|| url.clone()),
|
||||||
);
|
));
|
||||||
|
|
||||||
// Parse each `<a>` tag, to extract the filename, hash, and URL.
|
// Parse each `<a>` tag, to extract the filename, hash, and URL.
|
||||||
let mut files: Vec<PypiFile> = dom
|
let mut files: Vec<PypiFile> = dom
|
||||||
|
|
@ -67,19 +67,18 @@ impl SimpleDetailHTML {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse the `href` from a `<base>` tag.
|
/// Parse the `href` from a `<base>` tag.
|
||||||
fn parse_base(base: &HTMLTag) -> Result<Option<DisplaySafeUrl>, Error> {
|
fn parse_base(base: &HTMLTag) -> Result<Option<Url>, Error> {
|
||||||
let Some(Some(href)) = base.attributes().get("href") else {
|
let Some(Some(href)) = base.attributes().get("href") else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let href = std::str::from_utf8(href.as_bytes())?;
|
let href = std::str::from_utf8(href.as_bytes())?;
|
||||||
let url =
|
let url = Url::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
||||||
DisplaySafeUrl::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
|
||||||
Ok(Some(url))
|
Ok(Some(url))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a [`PypiFile`] from an `<a>` tag.
|
/// Parse a [`PypiFile`] from an `<a>` tag.
|
||||||
///
|
///
|
||||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute.
|
/// Returns `None` if the `<a>` don't doesn't have an `href` attribute.
|
||||||
fn parse_anchor(link: &HTMLTag) -> Result<Option<PypiFile>, Error> {
|
fn parse_anchor(link: &HTMLTag) -> Result<Option<PypiFile>, Error> {
|
||||||
// Extract the href.
|
// Extract the href.
|
||||||
let Some(href) = link
|
let Some(href) = link
|
||||||
|
|
@ -226,56 +225,6 @@ impl SimpleDetailHTML {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A parsed structure from PyPI "HTML" index format listing all available packages.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub(crate) struct SimpleIndexHtml {
|
|
||||||
/// The list of project names available in the index.
|
|
||||||
pub(crate) projects: Vec<PackageName>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SimpleIndexHtml {
|
|
||||||
/// Parse the list of project names from the Simple API index HTML page.
|
|
||||||
pub(crate) fn parse(text: &str) -> Result<Self, Error> {
|
|
||||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
|
||||||
|
|
||||||
// Parse each `<a>` tag to extract the project name.
|
|
||||||
let parser = dom.parser();
|
|
||||||
let mut projects = dom
|
|
||||||
.nodes()
|
|
||||||
.iter()
|
|
||||||
.filter_map(|node| node.as_tag())
|
|
||||||
.filter(|link| link.name().as_bytes() == b"a")
|
|
||||||
.filter_map(|link| Self::parse_anchor_project_name(link, parser))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
// Sort for deterministic ordering.
|
|
||||||
projects.sort_unstable();
|
|
||||||
|
|
||||||
Ok(Self { projects })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a project name from an `<a>` tag.
|
|
||||||
///
|
|
||||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute or text content.
|
|
||||||
fn parse_anchor_project_name(link: &HTMLTag, parser: &tl::Parser) -> Option<PackageName> {
|
|
||||||
// Extract the href.
|
|
||||||
link.attributes()
|
|
||||||
.get("href")
|
|
||||||
.flatten()
|
|
||||||
.filter(|bytes| !bytes.as_bytes().is_empty())?;
|
|
||||||
|
|
||||||
// Extract the text content, which should be the project name.
|
|
||||||
let inner_text = link.inner_text(parser);
|
|
||||||
let project_name = inner_text.trim();
|
|
||||||
|
|
||||||
if project_name.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
PackageName::from_str(project_name).ok()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
|
|
@ -285,7 +234,7 @@ pub enum Error {
|
||||||
FromUtf8(#[from] std::string::FromUtf8Error),
|
FromUtf8(#[from] std::string::FromUtf8Error),
|
||||||
|
|
||||||
#[error("Failed to parse URL: {0}")]
|
#[error("Failed to parse URL: {0}")]
|
||||||
UrlParse(String, #[source] DisplaySafeUrlError),
|
UrlParse(String, #[source] url::ParseError),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
HtmlParse(#[from] tl::ParseError),
|
HtmlParse(#[from] tl::ParseError),
|
||||||
|
|
@ -325,10 +274,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -382,10 +331,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -442,10 +391,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -499,10 +448,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -556,10 +505,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -613,10 +562,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -668,10 +617,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -723,10 +672,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
";
|
";
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -761,10 +710,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -799,10 +748,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -854,10 +803,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -909,11 +858,11 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base);
|
let result = SimpleHtml::parse(text, &base);
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
Ok(
|
Ok(
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -966,11 +915,11 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base);
|
let result = SimpleHtml::parse(text, &base);
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
Ok(
|
Ok(
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1023,8 +972,8 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap_err();
|
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||||
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, `sha512`, or `blake2b`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, `sha512`, or `blake2b`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1040,13 +989,11 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse(
|
let base = Url::parse("https://storage.googleapis.com/jax-releases/jax_cuda_releases.html")
|
||||||
"https://storage.googleapis.com/jax-releases/jax_cuda_releases.html",
|
.unwrap();
|
||||||
)
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
.unwrap();
|
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1124,11 +1071,11 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1228,10 +1175,10 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1300,11 +1247,11 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1427,180 +1374,4 @@ mod tests {
|
||||||
}
|
}
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test parsing Simple API index (root) HTML.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>Simple Index</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>Simple Index</h1>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
<a href="/simple/jinja2/">jinja2</a><br/>
|
|
||||||
<a href="/simple/requests/">requests</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"jinja2",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"requests",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that project names are sorted.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_sorted() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="/simple/zebra/">zebra</a><br/>
|
|
||||||
<a href="/simple/apple/">apple</a><br/>
|
|
||||||
<a href="/simple/monkey/">monkey</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"apple",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"monkey",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"zebra",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that links without `href` attributes are ignored.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_missing_href() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<h1>Simple Index</h1>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
<a>no-href-project</a><br/>
|
|
||||||
<a href="/simple/requests/">requests</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"requests",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that links with empty `href` attributes are ignored.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_empty_href() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="">empty-href</a><br/>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that links with empty text content are ignored.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_empty_text() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="/simple/empty/"></a><br/>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
<a href="/simple/whitespace/"> </a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test parsing with case variations and normalization.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_case_variations() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="/simple/Flask/">Flask</a><br/>
|
|
||||||
<a href="/simple/django/">django</a><br/>
|
|
||||||
<a href="/simple/PyYAML/">PyYAML</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
// Note: We preserve the case as returned by the server
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"django",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"pyyaml",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,15 @@
|
||||||
pub use base_client::{
|
pub use base_client::{
|
||||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_MAX_REDIRECTS, DEFAULT_RETRIES,
|
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
|
||||||
ExtraMiddleware, RedirectClientWithMiddleware, RedirectPolicy, RequestBuilder,
|
RedirectClientWithMiddleware, RequestBuilder, RetryParsingError, UvRetryableStrategy,
|
||||||
RetryParsingError, UvRetryableStrategy, is_transient_network_error,
|
is_transient_network_error, retries_from_env,
|
||||||
};
|
};
|
||||||
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
||||||
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
||||||
pub use flat_index::{FlatIndexClient, FlatIndexEntries, FlatIndexEntry, FlatIndexError};
|
pub use flat_index::{FlatIndexClient, FlatIndexEntries, FlatIndexEntry, FlatIndexError};
|
||||||
pub use linehaul::LineHaul;
|
pub use linehaul::LineHaul;
|
||||||
pub use registry_client::{
|
pub use registry_client::{
|
||||||
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleDetailMetadata,
|
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleMetadata,
|
||||||
SimpleDetailMetadatum, SimpleIndexMetadata, VersionFiles,
|
SimpleMetadatum, VersionFiles,
|
||||||
};
|
};
|
||||||
pub use rkyvutil::{Deserializer, OwnedArchive, Serializer, Validator};
|
pub use rkyvutil::{Deserializer, OwnedArchive, Serializer, Validator};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,14 +5,12 @@ use tracing::instrument;
|
||||||
|
|
||||||
use uv_pep508::MarkerEnvironment;
|
use uv_pep508::MarkerEnvironment;
|
||||||
use uv_platform_tags::{Os, Platform};
|
use uv_platform_tags::{Os, Platform};
|
||||||
use uv_static::EnvVars;
|
|
||||||
use uv_version::version;
|
use uv_version::version;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
||||||
pub struct Installer {
|
pub struct Installer {
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
pub version: Option<String>,
|
pub version: Option<String>,
|
||||||
pub subcommand: Option<Vec<String>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
||||||
|
|
@ -64,20 +62,11 @@ pub struct LineHaul {
|
||||||
impl LineHaul {
|
impl LineHaul {
|
||||||
/// Initializes Linehaul information based on PEP 508 markers.
|
/// Initializes Linehaul information based on PEP 508 markers.
|
||||||
#[instrument(name = "linehaul", skip_all)]
|
#[instrument(name = "linehaul", skip_all)]
|
||||||
pub fn new(
|
pub fn new(markers: &MarkerEnvironment, platform: Option<&Platform>) -> Self {
|
||||||
markers: Option<&MarkerEnvironment>,
|
|
||||||
platform: Option<&Platform>,
|
|
||||||
subcommand: Option<Vec<String>>,
|
|
||||||
) -> Self {
|
|
||||||
// https://github.com/pypa/pip/blob/24.0/src/pip/_internal/network/session.py#L87
|
// https://github.com/pypa/pip/blob/24.0/src/pip/_internal/network/session.py#L87
|
||||||
let looks_like_ci = [
|
let looks_like_ci = ["BUILD_BUILDID", "BUILD_ID", "CI", "PIP_IS_CI"]
|
||||||
EnvVars::BUILD_BUILDID,
|
.iter()
|
||||||
EnvVars::BUILD_ID,
|
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
||||||
EnvVars::CI,
|
|
||||||
EnvVars::PIP_IS_CI,
|
|
||||||
]
|
|
||||||
.iter()
|
|
||||||
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
|
||||||
|
|
||||||
let libc = match platform.map(Platform::os) {
|
let libc = match platform.map(Platform::os) {
|
||||||
Some(Os::Manylinux { major, minor }) => Some(Libc {
|
Some(Os::Manylinux { major, minor }) => Some(Libc {
|
||||||
|
|
@ -128,19 +117,18 @@ impl LineHaul {
|
||||||
installer: Option::from(Installer {
|
installer: Option::from(Installer {
|
||||||
name: Some("uv".to_string()),
|
name: Some("uv".to_string()),
|
||||||
version: Some(version().to_string()),
|
version: Some(version().to_string()),
|
||||||
subcommand,
|
|
||||||
}),
|
}),
|
||||||
python: markers.map(|markers| markers.python_full_version().version.to_string()),
|
python: Some(markers.python_full_version().version.to_string()),
|
||||||
implementation: Option::from(Implementation {
|
implementation: Option::from(Implementation {
|
||||||
name: markers.map(|markers| markers.platform_python_implementation().to_string()),
|
name: Some(markers.platform_python_implementation().to_string()),
|
||||||
version: markers.map(|markers| markers.python_full_version().version.to_string()),
|
version: Some(markers.python_full_version().version.to_string()),
|
||||||
}),
|
}),
|
||||||
distro,
|
distro,
|
||||||
system: Option::from(System {
|
system: Option::from(System {
|
||||||
name: markers.map(|markers| markers.platform_system().to_string()),
|
name: Some(markers.platform_system().to_string()),
|
||||||
release: markers.map(|markers| markers.platform_release().to_string()),
|
release: Some(markers.platform_release().to_string()),
|
||||||
}),
|
}),
|
||||||
cpu: markers.map(|markers| markers.platform_machine().to_string()),
|
cpu: Some(markers.platform_machine().to_string()),
|
||||||
// Should probably always be None in uv.
|
// Should probably always be None in uv.
|
||||||
openssl_version: None,
|
openssl_version: None,
|
||||||
// Should probably always be None in uv.
|
// Should probably always be None in uv.
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ impl Middleware for OfflineMiddleware {
|
||||||
) -> reqwest_middleware::Result<Response> {
|
) -> reqwest_middleware::Result<Response> {
|
||||||
Err(reqwest_middleware::Error::Middleware(
|
Err(reqwest_middleware::Error::Middleware(
|
||||||
OfflineError {
|
OfflineError {
|
||||||
url: DisplaySafeUrl::from_url(req.url().clone()),
|
url: DisplaySafeUrl::from(req.url().clone()),
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
))
|
))
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ use tokio::sync::{Mutex, Semaphore};
|
||||||
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_auth::{CredentialsCache, Indexes, PyxTokenStore};
|
use uv_auth::{Indexes, PyxTokenStore};
|
||||||
use uv_cache::{Cache, CacheBucket, CacheEntry, WheelCache};
|
use uv_cache::{Cache, CacheBucket, CacheEntry, WheelCache};
|
||||||
use uv_configuration::IndexStrategy;
|
use uv_configuration::IndexStrategy;
|
||||||
use uv_configuration::KeyringProviderType;
|
use uv_configuration::KeyringProviderType;
|
||||||
|
|
@ -29,9 +29,7 @@ use uv_normalize::PackageName;
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
use uv_pep508::MarkerEnvironment;
|
use uv_pep508::MarkerEnvironment;
|
||||||
use uv_platform_tags::Platform;
|
use uv_platform_tags::Platform;
|
||||||
use uv_pypi_types::{
|
use uv_pypi_types::{PypiSimpleDetail, PyxSimpleDetail, ResolutionMetadata};
|
||||||
PypiSimpleDetail, PypiSimpleIndex, PyxSimpleDetail, PyxSimpleIndex, ResolutionMetadata,
|
|
||||||
};
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_small_str::SmallString;
|
use uv_small_str::SmallString;
|
||||||
use uv_torch::TorchStrategy;
|
use uv_torch::TorchStrategy;
|
||||||
|
|
@ -39,7 +37,7 @@ use uv_torch::TorchStrategy;
|
||||||
use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy};
|
use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy};
|
||||||
use crate::cached_client::CacheControl;
|
use crate::cached_client::CacheControl;
|
||||||
use crate::flat_index::FlatIndexEntry;
|
use crate::flat_index::FlatIndexEntry;
|
||||||
use crate::html::SimpleDetailHTML;
|
use crate::html::SimpleHtml;
|
||||||
use crate::remote_metadata::wheel_metadata_from_remote_zip;
|
use crate::remote_metadata::wheel_metadata_from_remote_zip;
|
||||||
use crate::rkyvutil::OwnedArchive;
|
use crate::rkyvutil::OwnedArchive;
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
@ -148,30 +146,8 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add all authenticated sources to the cache.
|
pub fn build(self) -> RegistryClient {
|
||||||
pub fn cache_index_credentials(&mut self) {
|
self.index_locations.cache_index_credentials();
|
||||||
for index in self.index_locations.known_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
trace!(
|
|
||||||
"Read credentials for index {}",
|
|
||||||
index
|
|
||||||
.name
|
|
||||||
.as_ref()
|
|
||||||
.map(ToString::to_string)
|
|
||||||
.unwrap_or_else(|| index.url.to_string())
|
|
||||||
);
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
self.base_client_builder
|
|
||||||
.store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
self.base_client_builder
|
|
||||||
.store_credentials(index.raw_url(), credentials);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build(mut self) -> RegistryClient {
|
|
||||||
self.cache_index_credentials();
|
|
||||||
let index_urls = self.index_locations.index_urls();
|
let index_urls = self.index_locations.index_urls();
|
||||||
|
|
||||||
// Build a base client
|
// Build a base client
|
||||||
|
|
@ -202,8 +178,8 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Share the underlying client between two different middleware configurations.
|
/// Share the underlying client between two different middleware configurations.
|
||||||
pub fn wrap_existing(mut self, existing: &BaseClient) -> RegistryClient {
|
pub fn wrap_existing(self, existing: &BaseClient) -> RegistryClient {
|
||||||
self.cache_index_credentials();
|
self.index_locations.cache_index_credentials();
|
||||||
let index_urls = self.index_locations.index_urls();
|
let index_urls = self.index_locations.index_urls();
|
||||||
|
|
||||||
// Wrap in any relevant middleware and handle connectivity.
|
// Wrap in any relevant middleware and handle connectivity.
|
||||||
|
|
@ -260,7 +236,7 @@ pub struct RegistryClient {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum MetadataFormat {
|
pub enum MetadataFormat {
|
||||||
/// The metadata adheres to the Simple Repository API format.
|
/// The metadata adheres to the Simple Repository API format.
|
||||||
Simple(OwnedArchive<SimpleDetailMetadata>),
|
Simple(OwnedArchive<SimpleMetadata>),
|
||||||
/// The metadata consists of a list of distributions from a "flat" index.
|
/// The metadata consists of a list of distributions from a "flat" index.
|
||||||
Flat(Vec<FlatIndexEntry>),
|
Flat(Vec<FlatIndexEntry>),
|
||||||
}
|
}
|
||||||
|
|
@ -291,10 +267,6 @@ impl RegistryClient {
|
||||||
self.timeout
|
self.timeout
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
|
||||||
self.client.uncached().credentials_cache()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the appropriate index URLs for the given [`PackageName`].
|
/// Return the appropriate index URLs for the given [`PackageName`].
|
||||||
fn index_urls_for(
|
fn index_urls_for(
|
||||||
&self,
|
&self,
|
||||||
|
|
@ -332,7 +304,7 @@ impl RegistryClient {
|
||||||
/// and [PEP 691 – JSON-based Simple API for Python Package Indexes](https://peps.python.org/pep-0691/),
|
/// and [PEP 691 – JSON-based Simple API for Python Package Indexes](https://peps.python.org/pep-0691/),
|
||||||
/// which the PyPI JSON API implements.
|
/// which the PyPI JSON API implements.
|
||||||
#[instrument(skip_all, fields(package = % package_name))]
|
#[instrument(skip_all, fields(package = % package_name))]
|
||||||
pub async fn simple_detail<'index>(
|
pub async fn package_metadata<'index>(
|
||||||
&'index self,
|
&'index self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
index: Option<IndexMetadataRef<'index>>,
|
index: Option<IndexMetadataRef<'index>>,
|
||||||
|
|
@ -363,7 +335,7 @@ impl RegistryClient {
|
||||||
let status_code_strategy =
|
let status_code_strategy =
|
||||||
self.index_urls.status_code_strategy_for(index.url);
|
self.index_urls.status_code_strategy_for(index.url);
|
||||||
match self
|
match self
|
||||||
.simple_detail_single_index(
|
.simple_single_index(
|
||||||
package_name,
|
package_name,
|
||||||
index.url,
|
index.url,
|
||||||
capabilities,
|
capabilities,
|
||||||
|
|
@ -409,7 +381,7 @@ impl RegistryClient {
|
||||||
let status_code_strategy =
|
let status_code_strategy =
|
||||||
IndexStatusCodeStrategy::ignore_authentication_error_codes();
|
IndexStatusCodeStrategy::ignore_authentication_error_codes();
|
||||||
let metadata = match self
|
let metadata = match self
|
||||||
.simple_detail_single_index(
|
.simple_single_index(
|
||||||
package_name,
|
package_name,
|
||||||
index.url,
|
index.url,
|
||||||
capabilities,
|
capabilities,
|
||||||
|
|
@ -443,7 +415,7 @@ impl RegistryClient {
|
||||||
if results.is_empty() {
|
if results.is_empty() {
|
||||||
return match self.connectivity {
|
return match self.connectivity {
|
||||||
Connectivity::Online => {
|
Connectivity::Online => {
|
||||||
Err(ErrorKind::RemotePackageNotFound(package_name.clone()).into())
|
Err(ErrorKind::PackageNotFound(package_name.to_string()).into())
|
||||||
}
|
}
|
||||||
Connectivity::Offline => Err(ErrorKind::Offline(package_name.to_string()).into()),
|
Connectivity::Offline => Err(ErrorKind::Offline(package_name.to_string()).into()),
|
||||||
};
|
};
|
||||||
|
|
@ -492,11 +464,11 @@ impl RegistryClient {
|
||||||
Ok(package_entries)
|
Ok(package_entries)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the [`SimpleDetailMetadata`] from a single index for a given package.
|
/// Fetch the [`SimpleMetadata`] from a single index for a given package.
|
||||||
///
|
///
|
||||||
/// The index can either be a PEP 503-compatible remote repository, or a local directory laid
|
/// The index can either be a PEP 503-compatible remote repository, or a local directory laid
|
||||||
/// out in the same format.
|
/// out in the same format.
|
||||||
async fn simple_detail_single_index(
|
async fn simple_single_index(
|
||||||
&self,
|
&self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
index: &IndexUrl,
|
index: &IndexUrl,
|
||||||
|
|
@ -539,13 +511,13 @@ impl RegistryClient {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let _lock = {
|
let _lock = {
|
||||||
let lock_entry = cache_entry.with_file(format!("{package_name}.lock"));
|
let lock_entry = cache_entry.with_file(format!("{package_name}.lock"));
|
||||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = if matches!(index, IndexUrl::Path(_)) {
|
let result = if matches!(index, IndexUrl::Path(_)) {
|
||||||
self.fetch_local_simple_detail(package_name, &url).await
|
self.fetch_local_index(package_name, &url).await
|
||||||
} else {
|
} else {
|
||||||
self.fetch_remote_simple_detail(package_name, &url, index, &cache_entry, cache_control)
|
self.fetch_remote_index(package_name, &url, index, &cache_entry, cache_control)
|
||||||
.await
|
.await
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -574,22 +546,22 @@ impl RegistryClient {
|
||||||
ErrorKind::Offline(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
ErrorKind::Offline(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
||||||
|
|
||||||
// The package could not be found in the local index.
|
// The package could not be found in the local index.
|
||||||
ErrorKind::LocalPackageNotFound(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
ErrorKind::FileNotFound(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
||||||
|
|
||||||
_ => Err(err),
|
_ => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the [`SimpleDetailMetadata`] from a remote URL, using the PEP 503 Simple Repository API.
|
/// Fetch the [`SimpleMetadata`] from a remote URL, using the PEP 503 Simple Repository API.
|
||||||
async fn fetch_remote_simple_detail(
|
async fn fetch_remote_index(
|
||||||
&self,
|
&self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
index: &IndexUrl,
|
index: &IndexUrl,
|
||||||
cache_entry: &CacheEntry,
|
cache_entry: &CacheEntry,
|
||||||
cache_control: CacheControl<'_>,
|
cache_control: CacheControl<'_>,
|
||||||
) -> Result<OwnedArchive<SimpleDetailMetadata>, Error> {
|
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||||
// In theory, we should be able to pass `MediaType::all()` to all registries, and as
|
// In theory, we should be able to pass `MediaType::all()` to all registries, and as
|
||||||
// unsupported media types should be ignored by the server. For now, we implement this
|
// unsupported media types should be ignored by the server. For now, we implement this
|
||||||
// defensively to avoid issues with misconfigured servers.
|
// defensively to avoid issues with misconfigured servers.
|
||||||
|
|
@ -613,7 +585,7 @@ impl RegistryClient {
|
||||||
async {
|
async {
|
||||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||||
// This ensures that we handle redirects and other URL transformations correctly.
|
// This ensures that we handle redirects and other URL transformations correctly.
|
||||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
let url = DisplaySafeUrl::from(response.url().clone());
|
||||||
|
|
||||||
let content_type = response
|
let content_type = response
|
||||||
.headers()
|
.headers()
|
||||||
|
|
@ -639,7 +611,7 @@ impl RegistryClient {
|
||||||
let data: PyxSimpleDetail = rmp_serde::from_slice(bytes.as_ref())
|
let data: PyxSimpleDetail = rmp_serde::from_slice(bytes.as_ref())
|
||||||
.map_err(|err| Error::from_msgpack_err(err, url.clone()))?;
|
.map_err(|err| Error::from_msgpack_err(err, url.clone()))?;
|
||||||
|
|
||||||
SimpleDetailMetadata::from_pyx_files(
|
SimpleMetadata::from_pyx_files(
|
||||||
data.files,
|
data.files,
|
||||||
data.core_metadata,
|
data.core_metadata,
|
||||||
package_name,
|
package_name,
|
||||||
|
|
@ -654,7 +626,7 @@ impl RegistryClient {
|
||||||
let data: PyxSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
let data: PyxSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||||
|
|
||||||
SimpleDetailMetadata::from_pyx_files(
|
SimpleMetadata::from_pyx_files(
|
||||||
data.files,
|
data.files,
|
||||||
data.core_metadata,
|
data.core_metadata,
|
||||||
package_name,
|
package_name,
|
||||||
|
|
@ -670,14 +642,14 @@ impl RegistryClient {
|
||||||
let data: PypiSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
let data: PypiSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||||
|
|
||||||
SimpleDetailMetadata::from_pypi_files(data.files, package_name, &url)
|
SimpleMetadata::from_pypi_files(data.files, package_name, &url)
|
||||||
}
|
}
|
||||||
MediaType::PypiV1Html | MediaType::TextHtml => {
|
MediaType::PypiV1Html | MediaType::TextHtml => {
|
||||||
let text = response
|
let text = response
|
||||||
.text()
|
.text()
|
||||||
.await
|
.await
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
SimpleDetailMetadata::from_html(&text, package_name, &url)?
|
SimpleMetadata::from_html(&text, package_name, &url)?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
OwnedArchive::from_unarchived(&unarchived)
|
OwnedArchive::from_unarchived(&unarchived)
|
||||||
|
|
@ -697,13 +669,13 @@ impl RegistryClient {
|
||||||
Ok(simple)
|
Ok(simple)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the [`SimpleDetailMetadata`] from a local file, using a PEP 503-compatible directory
|
/// Fetch the [`SimpleMetadata`] from a local file, using a PEP 503-compatible directory
|
||||||
/// structure.
|
/// structure.
|
||||||
async fn fetch_local_simple_detail(
|
async fn fetch_local_index(
|
||||||
&self,
|
&self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
) -> Result<OwnedArchive<SimpleDetailMetadata>, Error> {
|
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||||
let path = url
|
let path = url
|
||||||
.to_file_path()
|
.to_file_path()
|
||||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
||||||
|
|
@ -711,185 +683,15 @@ impl RegistryClient {
|
||||||
let text = match fs_err::tokio::read_to_string(&path).await {
|
let text = match fs_err::tokio::read_to_string(&path).await {
|
||||||
Ok(text) => text,
|
Ok(text) => text,
|
||||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||||
return Err(Error::from(ErrorKind::LocalPackageNotFound(
|
return Err(Error::from(ErrorKind::FileNotFound(
|
||||||
package_name.clone(),
|
package_name.to_string(),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(Error::from(ErrorKind::Io(err)));
|
return Err(Error::from(ErrorKind::Io(err)));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let metadata = SimpleDetailMetadata::from_html(&text, package_name, url)?;
|
let metadata = SimpleMetadata::from_html(&text, package_name, url)?;
|
||||||
OwnedArchive::from_unarchived(&metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the list of projects from a Simple API index at a remote URL.
|
|
||||||
///
|
|
||||||
/// This fetches the root of a Simple API index (e.g., `https://pypi.org/simple/`)
|
|
||||||
/// which returns a list of all available projects.
|
|
||||||
pub async fn fetch_simple_index(
|
|
||||||
&self,
|
|
||||||
index_url: &IndexUrl,
|
|
||||||
) -> Result<SimpleIndexMetadata, Error> {
|
|
||||||
// Format the URL for PyPI.
|
|
||||||
let mut url = index_url.url().clone();
|
|
||||||
url.path_segments_mut()
|
|
||||||
.map_err(|()| ErrorKind::CannotBeABase(index_url.url().clone()))?
|
|
||||||
.pop_if_empty()
|
|
||||||
// The URL *must* end in a trailing slash for proper relative path behavior
|
|
||||||
// ref https://github.com/servo/rust-url/issues/333
|
|
||||||
.push("");
|
|
||||||
|
|
||||||
if url.scheme() == "file" {
|
|
||||||
let archived = self.fetch_local_simple_index(&url).await?;
|
|
||||||
Ok(OwnedArchive::deserialize(&archived))
|
|
||||||
} else {
|
|
||||||
let archived = self.fetch_remote_simple_index(&url, index_url).await?;
|
|
||||||
Ok(OwnedArchive::deserialize(&archived))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the list of projects from a remote Simple API index.
|
|
||||||
async fn fetch_remote_simple_index(
|
|
||||||
&self,
|
|
||||||
url: &DisplaySafeUrl,
|
|
||||||
index: &IndexUrl,
|
|
||||||
) -> Result<OwnedArchive<SimpleIndexMetadata>, Error> {
|
|
||||||
// In theory, we should be able to pass `MediaType::all()` to all registries, and as
|
|
||||||
// unsupported media types should be ignored by the server. For now, we implement this
|
|
||||||
// defensively to avoid issues with misconfigured servers.
|
|
||||||
let accept = if self
|
|
||||||
.pyx_token_store
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|token_store| token_store.is_known_url(index.url()))
|
|
||||||
{
|
|
||||||
MediaType::all()
|
|
||||||
} else {
|
|
||||||
MediaType::pypi()
|
|
||||||
};
|
|
||||||
|
|
||||||
let cache_entry = self.cache.entry(
|
|
||||||
CacheBucket::Simple,
|
|
||||||
WheelCache::Index(index).root(),
|
|
||||||
"index.html.rkyv",
|
|
||||||
);
|
|
||||||
let cache_control = match self.connectivity {
|
|
||||||
Connectivity::Online => {
|
|
||||||
if let Some(header) = self.index_urls.simple_api_cache_control_for(index) {
|
|
||||||
CacheControl::Override(header)
|
|
||||||
} else {
|
|
||||||
CacheControl::from(
|
|
||||||
self.cache
|
|
||||||
.freshness(&cache_entry, None, None)
|
|
||||||
.map_err(ErrorKind::Io)?,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Connectivity::Offline => CacheControl::AllowStale,
|
|
||||||
};
|
|
||||||
|
|
||||||
let parse_simple_response = |response: Response| {
|
|
||||||
async {
|
|
||||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
|
||||||
// This ensures that we handle redirects and other URL transformations correctly.
|
|
||||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
|
||||||
|
|
||||||
let content_type = response
|
|
||||||
.headers()
|
|
||||||
.get("content-type")
|
|
||||||
.ok_or_else(|| Error::from(ErrorKind::MissingContentType(url.clone())))?;
|
|
||||||
let content_type = content_type.to_str().map_err(|err| {
|
|
||||||
Error::from(ErrorKind::InvalidContentTypeHeader(url.clone(), err))
|
|
||||||
})?;
|
|
||||||
let media_type = content_type.split(';').next().unwrap_or(content_type);
|
|
||||||
let media_type = MediaType::from_str(media_type).ok_or_else(|| {
|
|
||||||
Error::from(ErrorKind::UnsupportedMediaType(
|
|
||||||
url.clone(),
|
|
||||||
media_type.to_string(),
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let metadata = match media_type {
|
|
||||||
MediaType::PyxV1Msgpack => {
|
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
let data: PyxSimpleIndex = rmp_serde::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_msgpack_err(err, url.clone()))?;
|
|
||||||
SimpleIndexMetadata::from_pyx_index(data)
|
|
||||||
}
|
|
||||||
MediaType::PyxV1Json => {
|
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
let data: PyxSimpleIndex = serde_json::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
|
||||||
SimpleIndexMetadata::from_pyx_index(data)
|
|
||||||
}
|
|
||||||
MediaType::PypiV1Json => {
|
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
let data: PypiSimpleIndex = serde_json::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
|
||||||
SimpleIndexMetadata::from_pypi_index(data)
|
|
||||||
}
|
|
||||||
MediaType::PypiV1Html | MediaType::TextHtml => {
|
|
||||||
let text = response
|
|
||||||
.text()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
SimpleIndexMetadata::from_html(&text, &url)?
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
OwnedArchive::from_unarchived(&metadata)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let simple_request = self
|
|
||||||
.uncached_client(url)
|
|
||||||
.get(Url::from(url.clone()))
|
|
||||||
.header("Accept-Encoding", "gzip, deflate, zstd")
|
|
||||||
.header("Accept", accept)
|
|
||||||
.build()
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
|
|
||||||
let index = self
|
|
||||||
.cached_client()
|
|
||||||
.get_cacheable_with_retry(
|
|
||||||
simple_request,
|
|
||||||
&cache_entry,
|
|
||||||
cache_control,
|
|
||||||
parse_simple_response,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(index)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the list of projects from a local Simple API index.
|
|
||||||
async fn fetch_local_simple_index(
|
|
||||||
&self,
|
|
||||||
url: &DisplaySafeUrl,
|
|
||||||
) -> Result<OwnedArchive<SimpleIndexMetadata>, Error> {
|
|
||||||
let path = url
|
|
||||||
.to_file_path()
|
|
||||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
|
||||||
.join("index.html");
|
|
||||||
let text = match fs_err::tokio::read_to_string(&path).await {
|
|
||||||
Ok(text) => text,
|
|
||||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
|
||||||
return Err(Error::from(ErrorKind::LocalIndexNotFound(path)));
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(Error::from(ErrorKind::Io(err)));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let metadata = SimpleIndexMetadata::from_html(&text, url)?;
|
|
||||||
OwnedArchive::from_unarchived(&metadata)
|
OwnedArchive::from_unarchived(&metadata)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1031,7 +833,7 @@ impl RegistryClient {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let _lock = {
|
let _lock = {
|
||||||
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
||||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let response_callback = async |response: Response| {
|
let response_callback = async |response: Response| {
|
||||||
|
|
@ -1115,7 +917,7 @@ impl RegistryClient {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let _lock = {
|
let _lock = {
|
||||||
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
||||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||||
};
|
};
|
||||||
|
|
||||||
// Attempt to fetch via a range request.
|
// Attempt to fetch via a range request.
|
||||||
|
|
@ -1246,7 +1048,7 @@ impl RegistryClient {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) enum SimpleMetadataSearchOutcome {
|
pub(crate) enum SimpleMetadataSearchOutcome {
|
||||||
/// Simple metadata was found
|
/// Simple metadata was found
|
||||||
Found(OwnedArchive<SimpleDetailMetadata>),
|
Found(OwnedArchive<SimpleMetadata>),
|
||||||
/// Simple metadata was not found
|
/// Simple metadata was not found
|
||||||
NotFound,
|
NotFound,
|
||||||
/// A status code failure was encountered when searching for
|
/// A status code failure was encountered when searching for
|
||||||
|
|
@ -1327,62 +1129,20 @@ pub struct VersionSourceDist {
|
||||||
pub file: File,
|
pub file: File,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The list of projects available in a Simple API index.
|
|
||||||
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||||
#[rkyv(derive(Debug))]
|
#[rkyv(derive(Debug))]
|
||||||
pub struct SimpleIndexMetadata {
|
pub struct SimpleMetadata(Vec<SimpleMetadatum>);
|
||||||
/// The list of project names available in the index.
|
|
||||||
projects: Vec<PackageName>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SimpleIndexMetadata {
|
|
||||||
/// Iterate over the projects in the index.
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &PackageName> {
|
|
||||||
self.projects.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a [`SimpleIndexMetadata`] from a [`PypiSimpleIndex`].
|
|
||||||
fn from_pypi_index(index: PypiSimpleIndex) -> Self {
|
|
||||||
Self {
|
|
||||||
projects: index.projects.into_iter().map(|entry| entry.name).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a [`SimpleIndexMetadata`] from a [`PyxSimpleIndex`].
|
|
||||||
fn from_pyx_index(index: PyxSimpleIndex) -> Self {
|
|
||||||
Self {
|
|
||||||
projects: index.projects.into_iter().map(|entry| entry.name).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a [`SimpleIndexMetadata`] from HTML content.
|
|
||||||
fn from_html(text: &str, url: &DisplaySafeUrl) -> Result<Self, Error> {
|
|
||||||
let html = crate::html::SimpleIndexHtml::parse(text).map_err(|err| {
|
|
||||||
Error::from(ErrorKind::BadHtml {
|
|
||||||
source: err,
|
|
||||||
url: url.clone(),
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
Ok(Self {
|
|
||||||
projects: html.projects,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
|
||||||
#[rkyv(derive(Debug))]
|
|
||||||
pub struct SimpleDetailMetadata(Vec<SimpleDetailMetadatum>);
|
|
||||||
|
|
||||||
#[derive(Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
#[derive(Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||||
#[rkyv(derive(Debug))]
|
#[rkyv(derive(Debug))]
|
||||||
pub struct SimpleDetailMetadatum {
|
pub struct SimpleMetadatum {
|
||||||
pub version: Version,
|
pub version: Version,
|
||||||
pub files: VersionFiles,
|
pub files: VersionFiles,
|
||||||
pub metadata: Option<ResolutionMetadata>,
|
pub metadata: Option<ResolutionMetadata>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleDetailMetadata {
|
impl SimpleMetadata {
|
||||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &SimpleDetailMetadatum> {
|
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &SimpleMetadatum> {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1426,7 +1186,7 @@ impl SimpleDetailMetadata {
|
||||||
Self(
|
Self(
|
||||||
version_map
|
version_map
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(version, files)| SimpleDetailMetadatum {
|
.map(|(version, files)| SimpleMetadatum {
|
||||||
version,
|
version,
|
||||||
files,
|
files,
|
||||||
metadata: None,
|
metadata: None,
|
||||||
|
|
@ -1485,10 +1245,10 @@ impl SimpleDetailMetadata {
|
||||||
version: version.clone(),
|
version: version.clone(),
|
||||||
requires_dist: metadata.requires_dist,
|
requires_dist: metadata.requires_dist,
|
||||||
requires_python: metadata.requires_python,
|
requires_python: metadata.requires_python,
|
||||||
provides_extra: metadata.provides_extra,
|
provides_extras: metadata.provides_extras,
|
||||||
dynamic: false,
|
dynamic: false,
|
||||||
});
|
});
|
||||||
SimpleDetailMetadatum {
|
SimpleMetadatum {
|
||||||
version,
|
version,
|
||||||
files,
|
files,
|
||||||
metadata,
|
metadata,
|
||||||
|
|
@ -1498,34 +1258,34 @@ impl SimpleDetailMetadata {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the [`SimpleDetailMetadata`] from an HTML index.
|
/// Read the [`SimpleMetadata`] from an HTML index.
|
||||||
fn from_html(
|
fn from_html(
|
||||||
text: &str,
|
text: &str,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(text, url)
|
let SimpleHtml { base, files } =
|
||||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
SimpleHtml::parse(text, url).map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||||
|
|
||||||
Ok(Self::from_pypi_files(files, package_name, base.as_url()))
|
Ok(Self::from_pypi_files(files, package_name, base.as_url()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoIterator for SimpleDetailMetadata {
|
impl IntoIterator for SimpleMetadata {
|
||||||
type Item = SimpleDetailMetadatum;
|
type Item = SimpleMetadatum;
|
||||||
type IntoIter = std::vec::IntoIter<SimpleDetailMetadatum>;
|
type IntoIter = std::vec::IntoIter<SimpleMetadatum>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
self.0.into_iter()
|
self.0.into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ArchivedSimpleDetailMetadata {
|
impl ArchivedSimpleMetadata {
|
||||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &rkyv::Archived<SimpleDetailMetadatum>> {
|
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &rkyv::Archived<SimpleMetadatum>> {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn datum(&self, i: usize) -> Option<&rkyv::Archived<SimpleDetailMetadatum>> {
|
pub fn datum(&self, i: usize) -> Option<&rkyv::Archived<SimpleMetadatum>> {
|
||||||
self.0.get(i)
|
self.0.get(i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1608,9 +1368,7 @@ mod tests {
|
||||||
use uv_pypi_types::PypiSimpleDetail;
|
use uv_pypi_types::PypiSimpleDetail;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::{
|
use crate::{BaseClientBuilder, SimpleMetadata, SimpleMetadatum, html::SimpleHtml};
|
||||||
BaseClientBuilder, SimpleDetailMetadata, SimpleDetailMetadatum, html::SimpleDetailHTML,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::RegistryClientBuilder;
|
use crate::RegistryClientBuilder;
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
|
|
@ -1828,14 +1586,14 @@ mod tests {
|
||||||
"#;
|
"#;
|
||||||
let data: PypiSimpleDetail = serde_json::from_str(response).unwrap();
|
let data: PypiSimpleDetail = serde_json::from_str(response).unwrap();
|
||||||
let base = DisplaySafeUrl::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
let base = DisplaySafeUrl::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
||||||
let simple_metadata = SimpleDetailMetadata::from_pypi_files(
|
let simple_metadata = SimpleMetadata::from_pypi_files(
|
||||||
data.files,
|
data.files,
|
||||||
&PackageName::from_str("pyflyby").unwrap(),
|
&PackageName::from_str("pyflyby").unwrap(),
|
||||||
&base,
|
&base,
|
||||||
);
|
);
|
||||||
let versions: Vec<String> = simple_metadata
|
let versions: Vec<String> = simple_metadata
|
||||||
.iter()
|
.iter()
|
||||||
.map(|SimpleDetailMetadatum { version, .. }| version.to_string())
|
.map(|SimpleMetadatum { version, .. }| version.to_string())
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(versions, ["1.7.8".to_string()]);
|
assert_eq!(versions, ["1.7.8".to_string()]);
|
||||||
}
|
}
|
||||||
|
|
@ -1866,7 +1624,7 @@ mod tests {
|
||||||
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
||||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(text, &base).unwrap();
|
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||||
let base = SmallString::from(base.as_str());
|
let base = SmallString::from(base.as_str());
|
||||||
|
|
||||||
// Test parsing of the file urls
|
// Test parsing of the file urls
|
||||||
|
|
|
||||||
|
|
@ -1,382 +0,0 @@
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use futures::future;
|
|
||||||
use http_body_util::combinators::BoxBody;
|
|
||||||
use http_body_util::{BodyExt, Full};
|
|
||||||
use hyper::body::{Bytes, Incoming};
|
|
||||||
use hyper::header::USER_AGENT;
|
|
||||||
use hyper::service::service_fn;
|
|
||||||
use hyper::{Request, Response};
|
|
||||||
use hyper_util::rt::{TokioExecutor, TokioIo};
|
|
||||||
use hyper_util::server::conn::auto::Builder;
|
|
||||||
use rcgen::{
|
|
||||||
BasicConstraints, Certificate, CertificateParams, DnType, ExtendedKeyUsagePurpose, IsCa,
|
|
||||||
Issuer, KeyPair, KeyUsagePurpose, SanType, date_time_ymd,
|
|
||||||
};
|
|
||||||
use rustls::pki_types::{CertificateDer, PrivateKeyDer};
|
|
||||||
use rustls::server::WebPkiClientVerifier;
|
|
||||||
use rustls::{RootCertStore, ServerConfig};
|
|
||||||
use tokio::net::TcpListener;
|
|
||||||
use tokio::task::JoinHandle;
|
|
||||||
use tokio_rustls::TlsAcceptor;
|
|
||||||
|
|
||||||
use uv_fs::Simplified;
|
|
||||||
|
|
||||||
/// An issued certificate, together with the subject keypair.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct SelfSigned {
|
|
||||||
/// An issued certificate.
|
|
||||||
pub public: Certificate,
|
|
||||||
/// The certificate's subject signing key.
|
|
||||||
pub private: KeyPair,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Defines the base location for temporary generated certs.
|
|
||||||
///
|
|
||||||
/// See [`TestContext::test_bucket_dir`] for implementation rationale.
|
|
||||||
pub(crate) fn test_cert_dir() -> PathBuf {
|
|
||||||
std::env::temp_dir()
|
|
||||||
.simple_canonicalize()
|
|
||||||
.expect("failed to canonicalize temp dir")
|
|
||||||
.join("uv")
|
|
||||||
.join("tests")
|
|
||||||
.join("certs")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generates a self-signed server certificate for `uv-test-server`, `localhost` and `127.0.0.1`.
|
|
||||||
/// This certificate is standalone and not issued by a self-signed Root CA.
|
|
||||||
///
|
|
||||||
/// Use sparingly as generation of certs is a slow operation.
|
|
||||||
pub(crate) fn generate_self_signed_certs() -> Result<SelfSigned> {
|
|
||||||
let mut params = CertificateParams::default();
|
|
||||||
params.is_ca = IsCa::NoCa;
|
|
||||||
params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
params.key_usages.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
params.key_usages.push(KeyUsagePurpose::KeyEncipherment);
|
|
||||||
params
|
|
||||||
.extended_key_usages
|
|
||||||
.push(ExtendedKeyUsagePurpose::ServerAuth);
|
|
||||||
params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-server");
|
|
||||||
params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-server".try_into()?));
|
|
||||||
params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("localhost".try_into()?));
|
|
||||||
params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::IpAddress("127.0.0.1".parse()?));
|
|
||||||
let private = KeyPair::generate()?;
|
|
||||||
let public = params.self_signed(&private)?;
|
|
||||||
|
|
||||||
Ok(SelfSigned { public, private })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generates a self-signed root CA, server certificate, and client certificate.
|
|
||||||
/// There are no intermediate certs generated as part of this function.
|
|
||||||
/// The server certificate is for `uv-test-server`, `localhost` and `127.0.0.1` issued by this CA.
|
|
||||||
/// The client certificate is for `uv-test-client` issued by this CA.
|
|
||||||
///
|
|
||||||
/// Use sparingly as generation of these certs is a very slow operation.
|
|
||||||
pub(crate) fn generate_self_signed_certs_with_ca() -> Result<(SelfSigned, SelfSigned, SelfSigned)> {
|
|
||||||
// Generate the CA
|
|
||||||
let mut ca_params = CertificateParams::default();
|
|
||||||
ca_params.is_ca = IsCa::Ca(BasicConstraints::Unconstrained); // root cert
|
|
||||||
ca_params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
ca_params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
ca_params.key_usages.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
ca_params.key_usages.push(KeyUsagePurpose::KeyCertSign);
|
|
||||||
ca_params.key_usages.push(KeyUsagePurpose::CrlSign);
|
|
||||||
ca_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
ca_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-ca");
|
|
||||||
ca_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-ca".try_into()?));
|
|
||||||
let ca_private_key = KeyPair::generate()?;
|
|
||||||
let ca_public_cert = ca_params.self_signed(&ca_private_key)?;
|
|
||||||
let ca_cert_issuer = Issuer::new(ca_params, &ca_private_key);
|
|
||||||
|
|
||||||
// Generate server cert issued by this CA
|
|
||||||
let mut server_params = CertificateParams::default();
|
|
||||||
server_params.is_ca = IsCa::NoCa;
|
|
||||||
server_params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
server_params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
server_params.use_authority_key_identifier_extension = true;
|
|
||||||
server_params
|
|
||||||
.key_usages
|
|
||||||
.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
server_params
|
|
||||||
.key_usages
|
|
||||||
.push(KeyUsagePurpose::KeyEncipherment);
|
|
||||||
server_params
|
|
||||||
.extended_key_usages
|
|
||||||
.push(ExtendedKeyUsagePurpose::ServerAuth);
|
|
||||||
server_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
server_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-server");
|
|
||||||
server_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-server".try_into()?));
|
|
||||||
server_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("localhost".try_into()?));
|
|
||||||
server_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::IpAddress("127.0.0.1".parse()?));
|
|
||||||
let server_private_key = KeyPair::generate()?;
|
|
||||||
let server_public_cert = server_params.signed_by(&server_private_key, &ca_cert_issuer)?;
|
|
||||||
|
|
||||||
// Generate client cert issued by this CA
|
|
||||||
let mut client_params = CertificateParams::default();
|
|
||||||
client_params.is_ca = IsCa::NoCa;
|
|
||||||
client_params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
client_params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
client_params.use_authority_key_identifier_extension = true;
|
|
||||||
client_params
|
|
||||||
.key_usages
|
|
||||||
.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
client_params
|
|
||||||
.extended_key_usages
|
|
||||||
.push(ExtendedKeyUsagePurpose::ClientAuth);
|
|
||||||
client_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
client_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-client");
|
|
||||||
client_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-client".try_into()?));
|
|
||||||
let client_private_key = KeyPair::generate()?;
|
|
||||||
let client_public_cert = client_params.signed_by(&client_private_key, &ca_cert_issuer)?;
|
|
||||||
|
|
||||||
let ca_self_signed = SelfSigned {
|
|
||||||
public: ca_public_cert,
|
|
||||||
private: ca_private_key,
|
|
||||||
};
|
|
||||||
let server_self_signed = SelfSigned {
|
|
||||||
public: server_public_cert,
|
|
||||||
private: server_private_key,
|
|
||||||
};
|
|
||||||
let client_self_signed = SelfSigned {
|
|
||||||
public: client_public_cert,
|
|
||||||
private: client_private_key,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((ca_self_signed, server_self_signed, client_self_signed))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Plain is fine for now; Arc/Box could be used later if we need to support move.
|
|
||||||
type ServerSvcFn =
|
|
||||||
fn(
|
|
||||||
Request<Incoming>,
|
|
||||||
) -> future::Ready<Result<Response<BoxBody<Bytes, hyper::Error>>, hyper::Error>>;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub(crate) struct TestServerBuilder<'a> {
|
|
||||||
// Custom server response function
|
|
||||||
svc_fn: Option<ServerSvcFn>,
|
|
||||||
// CA certificate
|
|
||||||
ca_cert: Option<&'a SelfSigned>,
|
|
||||||
// Server certificate
|
|
||||||
server_cert: Option<&'a SelfSigned>,
|
|
||||||
// Enable mTLS Verification
|
|
||||||
mutual_tls: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> TestServerBuilder<'a> {
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
svc_fn: None,
|
|
||||||
server_cert: None,
|
|
||||||
ca_cert: None,
|
|
||||||
mutual_tls: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[expect(unused)]
|
|
||||||
/// Provide a custom server response function.
|
|
||||||
pub(crate) fn with_svc_fn(mut self, svc_fn: ServerSvcFn) -> Self {
|
|
||||||
self.svc_fn = Some(svc_fn);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Provide the server certificate. This will enable TLS (HTTPS).
|
|
||||||
pub(crate) fn with_server_cert(mut self, server_cert: &'a SelfSigned) -> Self {
|
|
||||||
self.server_cert = Some(server_cert);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// CA certificate used to build the `RootCertStore` for client verification.
|
|
||||||
/// Requires `with_server_cert`.
|
|
||||||
pub(crate) fn with_ca_cert(mut self, ca_cert: &'a SelfSigned) -> Self {
|
|
||||||
self.ca_cert = Some(ca_cert);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Enforce mutual TLS (client cert auth).
|
|
||||||
/// Requires `with_server_cert` and `with_ca_cert`.
|
|
||||||
pub(crate) fn with_mutual_tls(mut self, mutual: bool) -> Self {
|
|
||||||
self.mutual_tls = mutual;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Starts the HTTP(S) server with optional mTLS enforcement.
|
|
||||||
pub(crate) async fn start(self) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
// Validate builder input combinations
|
|
||||||
if self.ca_cert.is_some() && self.server_cert.is_none() {
|
|
||||||
anyhow::bail!("server certificate is required when CA certificate is provided");
|
|
||||||
}
|
|
||||||
if self.mutual_tls && (self.ca_cert.is_none() || self.server_cert.is_none()) {
|
|
||||||
anyhow::bail!("ca certificate is required for mTLS");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up the TCP listener on a random available port
|
|
||||||
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
|
||||||
let addr = listener.local_addr()?;
|
|
||||||
|
|
||||||
// Setup TLS Config (if any)
|
|
||||||
let tls_acceptor = if let Some(server_cert) = self.server_cert {
|
|
||||||
// Prepare Server Cert and KeyPair
|
|
||||||
let server_key = PrivateKeyDer::try_from(server_cert.private.serialize_der()).unwrap();
|
|
||||||
let server_cert = vec![CertificateDer::from(server_cert.public.der().to_vec())];
|
|
||||||
|
|
||||||
// Setup CA Verifier
|
|
||||||
let client_verifier = if let Some(ca_cert) = self.ca_cert {
|
|
||||||
let mut root_store = RootCertStore::empty();
|
|
||||||
root_store
|
|
||||||
.add(CertificateDer::from(ca_cert.public.der().to_vec()))
|
|
||||||
.expect("failed to add CA cert");
|
|
||||||
if self.mutual_tls {
|
|
||||||
// Setup mTLS CA config
|
|
||||||
WebPkiClientVerifier::builder(root_store.into())
|
|
||||||
.build()
|
|
||||||
.expect("failed to setup client verifier")
|
|
||||||
} else {
|
|
||||||
// Only load the CA roots
|
|
||||||
WebPkiClientVerifier::builder(root_store.into())
|
|
||||||
.allow_unauthenticated()
|
|
||||||
.build()
|
|
||||||
.expect("failed to setup client verifier")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
WebPkiClientVerifier::no_client_auth()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut tls_config = ServerConfig::builder()
|
|
||||||
.with_client_cert_verifier(client_verifier)
|
|
||||||
.with_single_cert(server_cert, server_key)?;
|
|
||||||
tls_config.alpn_protocols = vec![b"http/1.1".to_vec(), b"http/1.0".to_vec()];
|
|
||||||
|
|
||||||
Some(TlsAcceptor::from(Arc::new(tls_config)))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Setup Response Handler
|
|
||||||
let svc_fn = if let Some(custom_svc_fn) = self.svc_fn {
|
|
||||||
custom_svc_fn
|
|
||||||
} else {
|
|
||||||
|req: Request<Incoming>| {
|
|
||||||
// Get User Agent Header and send it back in the response
|
|
||||||
let user_agent = req
|
|
||||||
.headers()
|
|
||||||
.get(USER_AGENT)
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.map(ToString::to_string)
|
|
||||||
.unwrap_or_default(); // Empty Default
|
|
||||||
let response_content = Full::new(Bytes::from(user_agent))
|
|
||||||
.map_err(|_| unreachable!())
|
|
||||||
.boxed();
|
|
||||||
// If we ever want a true echo server, we can use instead
|
|
||||||
// let response_content = req.into_body().boxed();
|
|
||||||
// although uv-client doesn't expose post currently.
|
|
||||||
future::ok::<_, hyper::Error>(Response::new(response_content))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Spawn the server loop in a background task
|
|
||||||
let server_task = tokio::spawn(async move {
|
|
||||||
let svc = service_fn(move |req: Request<Incoming>| svc_fn(req));
|
|
||||||
|
|
||||||
let (tcp_stream, _remote_addr) = listener
|
|
||||||
.accept()
|
|
||||||
.await
|
|
||||||
.context("Failed to accept TCP connection")?;
|
|
||||||
|
|
||||||
// Start Server (not wrapped in loop {} since we want a single response server)
|
|
||||||
// If we want server to accept multiple connections, we can wrap it in loop {}
|
|
||||||
// but we'll need to ensure to handle termination signals in the tests otherwise
|
|
||||||
// it may never stop.
|
|
||||||
if let Some(tls_acceptor) = tls_acceptor {
|
|
||||||
let tls_stream = tls_acceptor
|
|
||||||
.accept(tcp_stream)
|
|
||||||
.await
|
|
||||||
.context("Failed to accept TLS connection")?;
|
|
||||||
let socket = TokioIo::new(tls_stream);
|
|
||||||
tokio::task::spawn(async move {
|
|
||||||
Builder::new(TokioExecutor::new())
|
|
||||||
.serve_connection(socket, svc)
|
|
||||||
.await
|
|
||||||
.expect("HTTPS Server Started");
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
let socket = TokioIo::new(tcp_stream);
|
|
||||||
tokio::task::spawn(async move {
|
|
||||||
Builder::new(TokioExecutor::new())
|
|
||||||
.serve_connection(socket, svc)
|
|
||||||
.await
|
|
||||||
.expect("HTTP Server Started");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok((server_task, addr))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Single Request HTTP server that echoes the User Agent Header.
|
|
||||||
pub(crate) async fn start_http_user_agent_server() -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
TestServerBuilder::new().start().await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Single Request HTTPS server that echoes the User Agent Header.
|
|
||||||
pub(crate) async fn start_https_user_agent_server(
|
|
||||||
server_cert: &SelfSigned,
|
|
||||||
) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
TestServerBuilder::new()
|
|
||||||
.with_server_cert(server_cert)
|
|
||||||
.start()
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Single Request HTTPS mTLS server that echoes the User Agent Header.
|
|
||||||
pub(crate) async fn start_https_mtls_user_agent_server(
|
|
||||||
ca_cert: &SelfSigned,
|
|
||||||
server_cert: &SelfSigned,
|
|
||||||
) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
TestServerBuilder::new()
|
|
||||||
.with_ca_cert(ca_cert)
|
|
||||||
.with_server_cert(server_cert)
|
|
||||||
.with_mutual_tls(true)
|
|
||||||
.start()
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,2 @@
|
||||||
mod http_util;
|
|
||||||
mod remote_metadata;
|
mod remote_metadata;
|
||||||
mod ssl_certs;
|
|
||||||
mod user_agent_version;
|
mod user_agent_version;
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
||||||
let cache = Cache::temp()?.init().await?;
|
let cache = Cache::temp()?.init()?;
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||||
|
|
||||||
// The first run is without cache (the tempdir is empty), the second has the cache from the
|
// The first run is without cache (the tempdir is empty), the second has the cache from the
|
||||||
|
|
@ -21,11 +21,11 @@ async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
||||||
let filename = WheelFilename::from_str(url.rsplit_once('/').unwrap().1)?;
|
let filename = WheelFilename::from_str(url.rsplit_once('/').unwrap().1)?;
|
||||||
let dist = BuiltDist::DirectUrl(DirectUrlBuiltDist {
|
let dist = BuiltDist::DirectUrl(DirectUrlBuiltDist {
|
||||||
filename,
|
filename,
|
||||||
location: Box::new(DisplaySafeUrl::parse(url)?),
|
location: Box::new(DisplaySafeUrl::parse(url).unwrap()),
|
||||||
url: VerbatimUrl::from_str(url)?,
|
url: VerbatimUrl::from_str(url).unwrap(),
|
||||||
});
|
});
|
||||||
let capabilities = IndexCapabilities::default();
|
let capabilities = IndexCapabilities::default();
|
||||||
let metadata = client.wheel_metadata(&dist, &capabilities).await?;
|
let metadata = client.wheel_metadata(&dist, &capabilities).await.unwrap();
|
||||||
assert_eq!(metadata.version.to_string(), "4.66.1");
|
assert_eq!(metadata.version.to_string(), "4.66.1");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,333 +0,0 @@
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
use rustls::AlertDescription;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use uv_cache::Cache;
|
|
||||||
use uv_client::BaseClientBuilder;
|
|
||||||
use uv_client::RegistryClientBuilder;
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
use uv_static::EnvVars;
|
|
||||||
|
|
||||||
use crate::http_util::{
|
|
||||||
generate_self_signed_certs, generate_self_signed_certs_with_ca,
|
|
||||||
start_https_mtls_user_agent_server, start_https_user_agent_server, test_cert_dir,
|
|
||||||
};
|
|
||||||
|
|
||||||
// SAFETY: This test is meant to run with single thread configuration
|
|
||||||
#[tokio::test]
|
|
||||||
#[allow(unsafe_code)]
|
|
||||||
async fn ssl_env_vars() -> Result<()> {
|
|
||||||
// Ensure our environment is not polluted with anything that may affect `rustls-native-certs`
|
|
||||||
unsafe {
|
|
||||||
std::env::remove_var(EnvVars::UV_NATIVE_TLS);
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_DIR);
|
|
||||||
std::env::remove_var(EnvVars::SSL_CLIENT_CERT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create temporary cert dirs
|
|
||||||
let cert_dir = test_cert_dir();
|
|
||||||
fs_err::create_dir_all(&cert_dir).expect("Failed to create test cert bucket");
|
|
||||||
let cert_dir =
|
|
||||||
tempfile::TempDir::new_in(cert_dir).expect("Failed to create test cert directory");
|
|
||||||
let does_not_exist_cert_dir = cert_dir.path().join("does_not_exist");
|
|
||||||
|
|
||||||
// Generate self-signed standalone cert
|
|
||||||
let standalone_server_cert = generate_self_signed_certs()?;
|
|
||||||
let standalone_public_pem_path = cert_dir.path().join("standalone_public.pem");
|
|
||||||
let standalone_private_pem_path = cert_dir.path().join("standalone_private.pem");
|
|
||||||
|
|
||||||
// Generate self-signed CA, server, and client certs
|
|
||||||
let (ca_cert, server_cert, client_cert) = generate_self_signed_certs_with_ca()?;
|
|
||||||
let ca_public_pem_path = cert_dir.path().join("ca_public.pem");
|
|
||||||
let ca_private_pem_path = cert_dir.path().join("ca_private.pem");
|
|
||||||
let server_public_pem_path = cert_dir.path().join("server_public.pem");
|
|
||||||
let server_private_pem_path = cert_dir.path().join("server_private.pem");
|
|
||||||
let client_combined_pem_path = cert_dir.path().join("client_combined.pem");
|
|
||||||
|
|
||||||
// Persist the certs in PKCS8 format as the env vars expect a path on disk
|
|
||||||
fs_err::write(
|
|
||||||
standalone_public_pem_path.as_path(),
|
|
||||||
standalone_server_cert.public.pem(),
|
|
||||||
)?;
|
|
||||||
fs_err::write(
|
|
||||||
standalone_private_pem_path.as_path(),
|
|
||||||
standalone_server_cert.private.serialize_pem(),
|
|
||||||
)?;
|
|
||||||
fs_err::write(ca_public_pem_path.as_path(), ca_cert.public.pem())?;
|
|
||||||
fs_err::write(
|
|
||||||
ca_private_pem_path.as_path(),
|
|
||||||
ca_cert.private.serialize_pem(),
|
|
||||||
)?;
|
|
||||||
fs_err::write(server_public_pem_path.as_path(), server_cert.public.pem())?;
|
|
||||||
fs_err::write(
|
|
||||||
server_private_pem_path.as_path(),
|
|
||||||
server_cert.private.serialize_pem(),
|
|
||||||
)?;
|
|
||||||
fs_err::write(
|
|
||||||
client_combined_pem_path.as_path(),
|
|
||||||
// SSL_CLIENT_CERT expects a "combined" cert with the public and private key.
|
|
||||||
format!(
|
|
||||||
"{}\n{}",
|
|
||||||
client_cert.public.pem(),
|
|
||||||
client_cert.private.serialize_pem()
|
|
||||||
),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// ** Set SSL_CERT_FILE to non-existent location
|
|
||||||
// ** Then verify our request fails to establish a connection
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
std::env::set_var(EnvVars::SSL_CERT_FILE, does_not_exist_cert_dir.as_os_str());
|
|
||||||
}
|
|
||||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
|
||||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
|
||||||
let cache = Cache::temp()?.init().await?;
|
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
|
||||||
let res = client
|
|
||||||
.cached_client()
|
|
||||||
.uncached()
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url))
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
unsafe {
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the client error
|
|
||||||
let Some(reqwest_middleware::Error::Middleware(middleware_error)) = res.err() else {
|
|
||||||
panic!("expected middleware error");
|
|
||||||
};
|
|
||||||
let reqwest_error = middleware_error
|
|
||||||
.chain()
|
|
||||||
.find_map(|err| {
|
|
||||||
err.downcast_ref::<reqwest_middleware::Error>().map(|err| {
|
|
||||||
if let reqwest_middleware::Error::Reqwest(inner) = err {
|
|
||||||
inner
|
|
||||||
} else {
|
|
||||||
panic!("expected reqwest error")
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.expect("expected reqwest error");
|
|
||||||
assert!(reqwest_error.is_connect());
|
|
||||||
|
|
||||||
// Validate the server error
|
|
||||||
let server_res = server_task.await?;
|
|
||||||
let expected_err = if let Err(anyhow_err) = server_res
|
|
||||||
&& let Some(io_err) = anyhow_err.downcast_ref::<std::io::Error>()
|
|
||||||
&& let Some(wrapped_err) = io_err.get_ref()
|
|
||||||
&& let Some(tls_err) = wrapped_err.downcast_ref::<rustls::Error>()
|
|
||||||
&& matches!(
|
|
||||||
tls_err,
|
|
||||||
rustls::Error::AlertReceived(AlertDescription::UnknownCA)
|
|
||||||
) {
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
assert!(expected_err);
|
|
||||||
|
|
||||||
// ** Set SSL_CERT_FILE to our public certificate
|
|
||||||
// ** Then verify our request successfully establishes a connection
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
std::env::set_var(
|
|
||||||
EnvVars::SSL_CERT_FILE,
|
|
||||||
standalone_public_pem_path.as_os_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
|
||||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
|
||||||
let cache = Cache::temp()?.init().await?;
|
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
|
||||||
let res = client
|
|
||||||
.cached_client()
|
|
||||||
.uncached()
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url))
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let _ = server_task.await?; // wait for server shutdown
|
|
||||||
unsafe {
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ** Set SSL_CERT_DIR to our cert dir as well as some other dir that does not exist
|
|
||||||
// ** Then verify our request still successfully establishes a connection
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
std::env::set_var(
|
|
||||||
EnvVars::SSL_CERT_DIR,
|
|
||||||
std::env::join_paths(vec![
|
|
||||||
cert_dir.path().as_os_str(),
|
|
||||||
does_not_exist_cert_dir.as_os_str(),
|
|
||||||
])?,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
|
||||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
|
||||||
let cache = Cache::temp()?.init().await?;
|
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
|
||||||
let res = client
|
|
||||||
.cached_client()
|
|
||||||
.uncached()
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url))
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let _ = server_task.await?; // wait for server shutdown
|
|
||||||
unsafe {
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_DIR);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ** Set SSL_CERT_DIR to only the dir that does not exist
|
|
||||||
// ** Then verify our request fails to establish a connection
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
std::env::set_var(EnvVars::SSL_CERT_DIR, does_not_exist_cert_dir.as_os_str());
|
|
||||||
}
|
|
||||||
let (server_task, addr) = start_https_user_agent_server(&standalone_server_cert).await?;
|
|
||||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
|
||||||
let cache = Cache::temp()?.init().await?;
|
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
|
||||||
let res = client
|
|
||||||
.cached_client()
|
|
||||||
.uncached()
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url))
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
unsafe {
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_DIR);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the client error
|
|
||||||
let Some(reqwest_middleware::Error::Middleware(middleware_error)) = res.err() else {
|
|
||||||
panic!("expected middleware error");
|
|
||||||
};
|
|
||||||
let reqwest_error = middleware_error
|
|
||||||
.chain()
|
|
||||||
.find_map(|err| {
|
|
||||||
err.downcast_ref::<reqwest_middleware::Error>().map(|err| {
|
|
||||||
if let reqwest_middleware::Error::Reqwest(inner) = err {
|
|
||||||
inner
|
|
||||||
} else {
|
|
||||||
panic!("expected reqwest error")
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.expect("expected reqwest error");
|
|
||||||
assert!(reqwest_error.is_connect());
|
|
||||||
|
|
||||||
// Validate the server error
|
|
||||||
let server_res = server_task.await?;
|
|
||||||
let expected_err = if let Err(anyhow_err) = server_res
|
|
||||||
&& let Some(io_err) = anyhow_err.downcast_ref::<std::io::Error>()
|
|
||||||
&& let Some(wrapped_err) = io_err.get_ref()
|
|
||||||
&& let Some(tls_err) = wrapped_err.downcast_ref::<rustls::Error>()
|
|
||||||
&& matches!(
|
|
||||||
tls_err,
|
|
||||||
rustls::Error::AlertReceived(AlertDescription::UnknownCA)
|
|
||||||
) {
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
assert!(expected_err);
|
|
||||||
|
|
||||||
// *** mTLS Tests
|
|
||||||
|
|
||||||
// ** Set SSL_CERT_FILE to our CA and SSL_CLIENT_CERT to our client cert
|
|
||||||
// ** Then verify our request still successfully establishes a connection
|
|
||||||
|
|
||||||
// We need to set SSL_CERT_FILE or SSL_CERT_DIR to our CA as we need to tell
|
|
||||||
// our HTTP client that we trust certificates issued by our self-signed CA.
|
|
||||||
// This inherently also tests that our server cert is also validated as part
|
|
||||||
// of the certificate path validation algorithm.
|
|
||||||
unsafe {
|
|
||||||
std::env::set_var(EnvVars::SSL_CERT_FILE, ca_public_pem_path.as_os_str());
|
|
||||||
std::env::set_var(
|
|
||||||
EnvVars::SSL_CLIENT_CERT,
|
|
||||||
client_combined_pem_path.as_os_str(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let (server_task, addr) = start_https_mtls_user_agent_server(&ca_cert, &server_cert).await?;
|
|
||||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
|
||||||
let cache = Cache::temp()?.init().await?;
|
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
|
||||||
let res = client
|
|
||||||
.cached_client()
|
|
||||||
.uncached()
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url))
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let _ = server_task.await?; // wait for server shutdown
|
|
||||||
unsafe {
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
|
||||||
std::env::remove_var(EnvVars::SSL_CLIENT_CERT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ** Set SSL_CERT_FILE to our CA and unset SSL_CLIENT_CERT
|
|
||||||
// ** Then verify our request fails to establish a connection
|
|
||||||
|
|
||||||
unsafe {
|
|
||||||
std::env::set_var(EnvVars::SSL_CERT_FILE, ca_public_pem_path.as_os_str());
|
|
||||||
}
|
|
||||||
let (server_task, addr) = start_https_mtls_user_agent_server(&ca_cert, &server_cert).await?;
|
|
||||||
let url = DisplaySafeUrl::from_str(&format!("https://{addr}"))?;
|
|
||||||
let cache = Cache::temp()?.init().await?;
|
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
|
||||||
let res = client
|
|
||||||
.cached_client()
|
|
||||||
.uncached()
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url))
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
unsafe {
|
|
||||||
std::env::remove_var(EnvVars::SSL_CERT_FILE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate the client error
|
|
||||||
let Some(reqwest_middleware::Error::Middleware(middleware_error)) = res.err() else {
|
|
||||||
panic!("expected middleware error");
|
|
||||||
};
|
|
||||||
let reqwest_error = middleware_error
|
|
||||||
.chain()
|
|
||||||
.find_map(|err| {
|
|
||||||
err.downcast_ref::<reqwest_middleware::Error>().map(|err| {
|
|
||||||
if let reqwest_middleware::Error::Reqwest(inner) = err {
|
|
||||||
inner
|
|
||||||
} else {
|
|
||||||
panic!("expected reqwest error")
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.expect("expected reqwest error");
|
|
||||||
assert!(reqwest_error.is_connect());
|
|
||||||
|
|
||||||
// Validate the server error
|
|
||||||
let server_res = server_task.await?;
|
|
||||||
let expected_err = if let Err(anyhow_err) = server_res
|
|
||||||
&& let Some(io_err) = anyhow_err.downcast_ref::<std::io::Error>()
|
|
||||||
&& let Some(wrapped_err) = io_err.get_ref()
|
|
||||||
&& let Some(tls_err) = wrapped_err.downcast_ref::<rustls::Error>()
|
|
||||||
&& matches!(tls_err, rustls::Error::NoCertificatesPresented)
|
|
||||||
{
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
assert!(expected_err);
|
|
||||||
|
|
||||||
// Fin.
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
@ -1,9 +1,16 @@
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use futures::future;
|
||||||
|
use http_body_util::Full;
|
||||||
|
use hyper::body::Bytes;
|
||||||
|
use hyper::header::USER_AGENT;
|
||||||
|
use hyper::server::conn::http1;
|
||||||
|
use hyper::service::service_fn;
|
||||||
|
use hyper::{Request, Response};
|
||||||
|
use hyper_util::rt::TokioIo;
|
||||||
use insta::{assert_json_snapshot, assert_snapshot, with_settings};
|
use insta::{assert_json_snapshot, assert_snapshot, with_settings};
|
||||||
|
use std::str::FromStr;
|
||||||
|
use tokio::net::TcpListener;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::RegistryClientBuilder;
|
use uv_client::RegistryClientBuilder;
|
||||||
use uv_client::{BaseClientBuilder, LineHaul};
|
use uv_client::{BaseClientBuilder, LineHaul};
|
||||||
|
|
@ -12,15 +19,39 @@ use uv_platform_tags::{Arch, Os, Platform};
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_version::version;
|
use uv_version::version;
|
||||||
|
|
||||||
use crate::http_util::start_http_user_agent_server;
|
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_user_agent_has_version() -> Result<()> {
|
async fn test_user_agent_has_version() -> Result<()> {
|
||||||
// Initialize dummy http server
|
// Set up the TCP listener on a random available port
|
||||||
let (server_task, addr) = start_http_user_agent_server().await?;
|
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||||
|
let addr = listener.local_addr()?;
|
||||||
|
|
||||||
|
// Spawn the server loop in a background task
|
||||||
|
let server_task = tokio::spawn(async move {
|
||||||
|
let svc = service_fn(move |req: Request<hyper::body::Incoming>| {
|
||||||
|
// Get User Agent Header and send it back in the response
|
||||||
|
let user_agent = req
|
||||||
|
.headers()
|
||||||
|
.get(USER_AGENT)
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.map(ToString::to_string)
|
||||||
|
.unwrap_or_default(); // Empty Default
|
||||||
|
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
||||||
|
});
|
||||||
|
// Start Server (not wrapped in loop {} since we want a single response server)
|
||||||
|
// If you want server to accept multiple connections, wrap it in loop {}
|
||||||
|
let (socket, _) = listener.accept().await.unwrap();
|
||||||
|
let socket = TokioIo::new(socket);
|
||||||
|
tokio::task::spawn(async move {
|
||||||
|
http1::Builder::new()
|
||||||
|
.serve_connection(socket, svc)
|
||||||
|
.with_upgrades()
|
||||||
|
.await
|
||||||
|
.expect("Server Started");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// Initialize uv-client
|
// Initialize uv-client
|
||||||
let cache = Cache::temp()?.init().await?;
|
let cache = Cache::temp()?.init()?;
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
||||||
|
|
||||||
// Send request to our dummy server
|
// Send request to our dummy server
|
||||||
|
|
@ -39,102 +70,45 @@ async fn test_user_agent_has_version() -> Result<()> {
|
||||||
// Check User Agent
|
// Check User Agent
|
||||||
let body = res.text().await?;
|
let body = res.text().await?;
|
||||||
|
|
||||||
let (uv_version, uv_linehaul) = body
|
// Verify body matches regex
|
||||||
.split_once(' ')
|
assert_eq!(body, format!("uv/{}", version()));
|
||||||
.expect("Failed to split User-Agent header");
|
|
||||||
|
|
||||||
// Deserializing Linehaul
|
|
||||||
let linehaul: LineHaul = serde_json::from_str(uv_linehaul)?;
|
|
||||||
|
|
||||||
// Assert linehaul user agent
|
|
||||||
let filters = vec![(version(), "[VERSION]")];
|
|
||||||
with_settings!({
|
|
||||||
filters => filters
|
|
||||||
}, {
|
|
||||||
// Assert uv version
|
|
||||||
assert_snapshot!(uv_version, @"uv/[VERSION]");
|
|
||||||
// Assert linehaul json
|
|
||||||
assert_json_snapshot!(&linehaul.installer, @r#"
|
|
||||||
{
|
|
||||||
"name": "uv",
|
|
||||||
"version": "[VERSION]",
|
|
||||||
"subcommand": null
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Wait for the server task to complete, to be a good citizen.
|
// Wait for the server task to complete, to be a good citizen.
|
||||||
let _ = server_task.await?;
|
server_task.await?;
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_user_agent_has_subcommand() -> Result<()> {
|
|
||||||
// Initialize dummy http server
|
|
||||||
let (server_task, addr) = start_http_user_agent_server().await?;
|
|
||||||
|
|
||||||
// Initialize uv-client
|
|
||||||
let cache = Cache::temp()?.init().await?;
|
|
||||||
let client = RegistryClientBuilder::new(
|
|
||||||
BaseClientBuilder::default().subcommand(vec!["foo".to_owned(), "bar".to_owned()]),
|
|
||||||
cache,
|
|
||||||
)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// Send request to our dummy server
|
|
||||||
let url = DisplaySafeUrl::from_str(&format!("http://{addr}"))?;
|
|
||||||
let res = client
|
|
||||||
.cached_client()
|
|
||||||
.uncached()
|
|
||||||
.for_host(&url)
|
|
||||||
.get(Url::from(url))
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Check the HTTP status
|
|
||||||
assert!(res.status().is_success());
|
|
||||||
|
|
||||||
// Check User Agent
|
|
||||||
let body = res.text().await?;
|
|
||||||
|
|
||||||
let (uv_version, uv_linehaul) = body
|
|
||||||
.split_once(' ')
|
|
||||||
.expect("Failed to split User-Agent header");
|
|
||||||
|
|
||||||
// Deserializing Linehaul
|
|
||||||
let linehaul: LineHaul = serde_json::from_str(uv_linehaul)?;
|
|
||||||
|
|
||||||
// Assert linehaul user agent
|
|
||||||
let filters = vec![(version(), "[VERSION]")];
|
|
||||||
with_settings!({
|
|
||||||
filters => filters
|
|
||||||
}, {
|
|
||||||
// Assert uv version
|
|
||||||
assert_snapshot!(uv_version, @"uv/[VERSION]");
|
|
||||||
// Assert linehaul json
|
|
||||||
assert_json_snapshot!(&linehaul.installer, @r#"
|
|
||||||
{
|
|
||||||
"name": "uv",
|
|
||||||
"version": "[VERSION]",
|
|
||||||
"subcommand": [
|
|
||||||
"foo",
|
|
||||||
"bar"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Wait for the server task to complete, to be a good citizen.
|
|
||||||
let _ = server_task.await?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn test_user_agent_has_linehaul() -> Result<()> {
|
async fn test_user_agent_has_linehaul() -> Result<()> {
|
||||||
// Initialize dummy http server
|
// Set up the TCP listener on a random available port
|
||||||
let (server_task, addr) = start_http_user_agent_server().await?;
|
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||||
|
let addr = listener.local_addr()?;
|
||||||
|
|
||||||
|
// Spawn the server loop in a background task
|
||||||
|
let server_task = tokio::spawn(async move {
|
||||||
|
let svc = service_fn(move |req: Request<hyper::body::Incoming>| {
|
||||||
|
// Get User Agent Header and send it back in the response
|
||||||
|
let user_agent = req
|
||||||
|
.headers()
|
||||||
|
.get(USER_AGENT)
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.map(ToString::to_string)
|
||||||
|
.unwrap_or_default(); // Empty Default
|
||||||
|
future::ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from(user_agent))))
|
||||||
|
});
|
||||||
|
// Start Server (not wrapped in loop {} since we want a single response server)
|
||||||
|
// If you want server to accept multiple connections, wrap it in loop {}
|
||||||
|
let (socket, _) = listener.accept().await.unwrap();
|
||||||
|
let socket = TokioIo::new(socket);
|
||||||
|
tokio::task::spawn(async move {
|
||||||
|
http1::Builder::new()
|
||||||
|
.serve_connection(socket, svc)
|
||||||
|
.with_upgrades()
|
||||||
|
.await
|
||||||
|
.expect("Server Started");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// Add some representative markers for an Ubuntu CI runner
|
// Add some representative markers for an Ubuntu CI runner
|
||||||
let markers = MarkerEnvironment::try_from(MarkerEnvironmentBuilder {
|
let markers = MarkerEnvironment::try_from(MarkerEnvironmentBuilder {
|
||||||
|
|
@ -149,10 +123,11 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
||||||
python_full_version: "3.12.2",
|
python_full_version: "3.12.2",
|
||||||
python_version: "3.12",
|
python_version: "3.12",
|
||||||
sys_platform: "linux",
|
sys_platform: "linux",
|
||||||
})?;
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
// Initialize uv-client
|
// Initialize uv-client
|
||||||
let cache = Cache::temp()?.init().await?;
|
let cache = Cache::temp()?.init()?;
|
||||||
let mut builder =
|
let mut builder =
|
||||||
RegistryClientBuilder::new(BaseClientBuilder::default(), cache).markers(&markers);
|
RegistryClientBuilder::new(BaseClientBuilder::default(), cache).markers(&markers);
|
||||||
|
|
||||||
|
|
@ -194,7 +169,7 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
||||||
let body = res.text().await?;
|
let body = res.text().await?;
|
||||||
|
|
||||||
// Wait for the server task to complete, to be a good citizen.
|
// Wait for the server task to complete, to be a good citizen.
|
||||||
let _ = server_task.await?;
|
server_task.await?;
|
||||||
|
|
||||||
// Unpack User-Agent with linehaul
|
// Unpack User-Agent with linehaul
|
||||||
let (uv_version, uv_linehaul) = body
|
let (uv_version, uv_linehaul) = body
|
||||||
|
|
@ -215,12 +190,11 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
||||||
assert_json_snapshot!(&linehaul, {
|
assert_json_snapshot!(&linehaul, {
|
||||||
".distro" => "[distro]",
|
".distro" => "[distro]",
|
||||||
".ci" => "[ci]"
|
".ci" => "[ci]"
|
||||||
}, @r#"
|
}, @r###"
|
||||||
{
|
{
|
||||||
"installer": {
|
"installer": {
|
||||||
"name": "uv",
|
"name": "uv",
|
||||||
"version": "[VERSION]",
|
"version": "[VERSION]"
|
||||||
"subcommand": null
|
|
||||||
},
|
},
|
||||||
"python": "3.12.2",
|
"python": "3.12.2",
|
||||||
"implementation": {
|
"implementation": {
|
||||||
|
|
@ -238,7 +212,7 @@ async fn test_user_agent_has_linehaul() -> Result<()> {
|
||||||
"rustc_version": null,
|
"rustc_version": null,
|
||||||
"ci": "[ci]"
|
"ci": "[ci]"
|
||||||
}
|
}
|
||||||
"#);
|
"###);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Assert distro
|
// Assert distro
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-configuration"
|
name = "uv-configuration"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-configuration
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-configuration).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,135 +0,0 @@
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
/// A collection of `.env` file paths.
|
|
||||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub struct EnvFile(Vec<PathBuf>);
|
|
||||||
|
|
||||||
impl EnvFile {
|
|
||||||
/// Parse the env file paths from command-line arguments.
|
|
||||||
pub fn from_args(env_file: Vec<String>, no_env_file: bool) -> Self {
|
|
||||||
if no_env_file {
|
|
||||||
return Self::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
if env_file.is_empty() {
|
|
||||||
return Self::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut paths = Vec::new();
|
|
||||||
|
|
||||||
// Split on spaces, but respect backslashes.
|
|
||||||
for env_file in env_file {
|
|
||||||
let mut current = String::new();
|
|
||||||
let mut escape = false;
|
|
||||||
for c in env_file.chars() {
|
|
||||||
if escape {
|
|
||||||
current.push(c);
|
|
||||||
escape = false;
|
|
||||||
} else if c == '\\' {
|
|
||||||
escape = true;
|
|
||||||
} else if c.is_whitespace() {
|
|
||||||
if !current.is_empty() {
|
|
||||||
paths.push(PathBuf::from(current));
|
|
||||||
current = String::new();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
current.push(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !current.is_empty() {
|
|
||||||
paths.push(PathBuf::from(current));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Self(paths)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Iterate over the paths in the env file.
|
|
||||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &PathBuf> {
|
|
||||||
self.0.iter()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_default() {
|
|
||||||
let env_file = EnvFile::from_args(vec![], false);
|
|
||||||
assert_eq!(env_file, EnvFile::default());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_no_env_file() {
|
|
||||||
let env_file = EnvFile::from_args(vec!["path1 path2".to_string()], true);
|
|
||||||
assert_eq!(env_file, EnvFile::default());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_empty_string() {
|
|
||||||
let env_file = EnvFile::from_args(vec![String::new()], false);
|
|
||||||
assert_eq!(env_file, EnvFile::default());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_whitespace_only() {
|
|
||||||
let env_file = EnvFile::from_args(vec![" ".to_string()], false);
|
|
||||||
assert_eq!(env_file, EnvFile::default());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_single_path() {
|
|
||||||
let env_file = EnvFile::from_args(vec!["path1".to_string()], false);
|
|
||||||
assert_eq!(env_file.0, vec![PathBuf::from("path1")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_multiple_paths() {
|
|
||||||
let env_file = EnvFile::from_args(vec!["path1 path2 path3".to_string()], false);
|
|
||||||
assert_eq!(
|
|
||||||
env_file.0,
|
|
||||||
vec![
|
|
||||||
PathBuf::from("path1"),
|
|
||||||
PathBuf::from("path2"),
|
|
||||||
PathBuf::from("path3")
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_escaped_spaces() {
|
|
||||||
let env_file = EnvFile::from_args(vec![r"path\ with\ spaces".to_string()], false);
|
|
||||||
assert_eq!(env_file.0, vec![PathBuf::from("path with spaces")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_mixed_escaped_and_normal() {
|
|
||||||
let env_file =
|
|
||||||
EnvFile::from_args(vec![r"path1 path\ with\ spaces path2".to_string()], false);
|
|
||||||
assert_eq!(
|
|
||||||
env_file.0,
|
|
||||||
vec![
|
|
||||||
PathBuf::from("path1"),
|
|
||||||
PathBuf::from("path with spaces"),
|
|
||||||
PathBuf::from("path2")
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_from_args_escaped_backslash() {
|
|
||||||
let env_file = EnvFile::from_args(vec![r"path\\with\\backslashes".to_string()], false);
|
|
||||||
assert_eq!(env_file.0, vec![PathBuf::from(r"path\with\backslashes")]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_iter() {
|
|
||||||
let env_file = EnvFile(vec![PathBuf::from("path1"), PathBuf::from("path2")]);
|
|
||||||
let paths: Vec<_> = env_file.iter().collect();
|
|
||||||
assert_eq!(
|
|
||||||
paths,
|
|
||||||
vec![&PathBuf::from("path1"), &PathBuf::from("path2")]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,25 +0,0 @@
|
||||||
use rustc_hash::FxHashSet;
|
|
||||||
|
|
||||||
use uv_normalize::PackageName;
|
|
||||||
|
|
||||||
/// A set of packages to exclude from resolution.
|
|
||||||
#[derive(Debug, Default, Clone)]
|
|
||||||
pub struct Excludes(FxHashSet<PackageName>);
|
|
||||||
|
|
||||||
impl Excludes {
|
|
||||||
/// Return an iterator over all package names in the exclusion set.
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &PackageName> {
|
|
||||||
self.0.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Check if a package is excluded.
|
|
||||||
pub fn contains(&self, name: &PackageName) -> bool {
|
|
||||||
self.0.contains(name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromIterator<PackageName> for Excludes {
|
|
||||||
fn from_iter<I: IntoIterator<Item = PackageName>>(iter: I) -> Self {
|
|
||||||
Self(iter.into_iter().collect())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -15,30 +15,4 @@ pub enum ExportFormat {
|
||||||
#[serde(rename = "pylock.toml", alias = "pylock-toml")]
|
#[serde(rename = "pylock.toml", alias = "pylock-toml")]
|
||||||
#[cfg_attr(feature = "clap", clap(name = "pylock.toml", alias = "pylock-toml"))]
|
#[cfg_attr(feature = "clap", clap(name = "pylock.toml", alias = "pylock-toml"))]
|
||||||
PylockToml,
|
PylockToml,
|
||||||
/// Export in `CycloneDX` v1.5 JSON format.
|
|
||||||
#[serde(rename = "cyclonedx1.5")]
|
|
||||||
#[cfg_attr(
|
|
||||||
feature = "clap",
|
|
||||||
clap(name = "cyclonedx1.5", alias = "cyclonedx1.5+json")
|
|
||||||
)]
|
|
||||||
CycloneDX1_5,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The output format to use in `uv pip compile`.
|
|
||||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
|
|
||||||
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
|
|
||||||
#[cfg_attr(feature = "clap", derive(clap::ValueEnum))]
|
|
||||||
pub enum PipCompileFormat {
|
|
||||||
/// Export in `requirements.txt` format.
|
|
||||||
#[default]
|
|
||||||
#[serde(rename = "requirements.txt", alias = "requirements-txt")]
|
|
||||||
#[cfg_attr(
|
|
||||||
feature = "clap",
|
|
||||||
clap(name = "requirements.txt", alias = "requirements-txt")
|
|
||||||
)]
|
|
||||||
RequirementsTxt,
|
|
||||||
/// Export in `pylock.toml` format.
|
|
||||||
#[serde(rename = "pylock.toml", alias = "pylock-toml")]
|
|
||||||
#[cfg_attr(feature = "clap", clap(name = "pylock.toml", alias = "pylock-toml"))]
|
|
||||||
PylockToml,
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -17,43 +17,26 @@ pub struct InstallTarget<'a> {
|
||||||
pub struct InstallOptions {
|
pub struct InstallOptions {
|
||||||
/// Omit the project itself from the resolution.
|
/// Omit the project itself from the resolution.
|
||||||
pub no_install_project: bool,
|
pub no_install_project: bool,
|
||||||
/// Include only the project itself in the resolution.
|
|
||||||
pub only_install_project: bool,
|
|
||||||
/// Omit all workspace members (including the project itself) from the resolution.
|
/// Omit all workspace members (including the project itself) from the resolution.
|
||||||
pub no_install_workspace: bool,
|
pub no_install_workspace: bool,
|
||||||
/// Include only workspace members (including the project itself) in the resolution.
|
|
||||||
pub only_install_workspace: bool,
|
|
||||||
/// Omit all local packages from the resolution.
|
/// Omit all local packages from the resolution.
|
||||||
pub no_install_local: bool,
|
pub no_install_local: bool,
|
||||||
/// Include only local packages in the resolution.
|
|
||||||
pub only_install_local: bool,
|
|
||||||
/// Omit the specified packages from the resolution.
|
/// Omit the specified packages from the resolution.
|
||||||
pub no_install_package: Vec<PackageName>,
|
pub no_install_package: Vec<PackageName>,
|
||||||
/// Include only the specified packages in the resolution.
|
|
||||||
pub only_install_package: Vec<PackageName>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InstallOptions {
|
impl InstallOptions {
|
||||||
#[allow(clippy::fn_params_excessive_bools)]
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
no_install_project: bool,
|
no_install_project: bool,
|
||||||
only_install_project: bool,
|
|
||||||
no_install_workspace: bool,
|
no_install_workspace: bool,
|
||||||
only_install_workspace: bool,
|
|
||||||
no_install_local: bool,
|
no_install_local: bool,
|
||||||
only_install_local: bool,
|
|
||||||
no_install_package: Vec<PackageName>,
|
no_install_package: Vec<PackageName>,
|
||||||
only_install_package: Vec<PackageName>,
|
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
no_install_project,
|
no_install_project,
|
||||||
only_install_project,
|
|
||||||
no_install_workspace,
|
no_install_workspace,
|
||||||
only_install_workspace,
|
|
||||||
no_install_local,
|
no_install_local,
|
||||||
only_install_local,
|
|
||||||
no_install_package,
|
no_install_package,
|
||||||
only_install_package,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -65,55 +48,6 @@ impl InstallOptions {
|
||||||
members: &BTreeSet<PackageName>,
|
members: &BTreeSet<PackageName>,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let package_name = target.name;
|
let package_name = target.name;
|
||||||
|
|
||||||
// If `--only-install-package` is set, only include specified packages.
|
|
||||||
if !self.only_install_package.is_empty() {
|
|
||||||
if self.only_install_package.contains(package_name) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-package`");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If `--only-install-local` is set, only include local packages.
|
|
||||||
if self.only_install_local {
|
|
||||||
if target.is_local {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-local`");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If `--only-install-workspace` is set, only include the project and workspace members.
|
|
||||||
if self.only_install_workspace {
|
|
||||||
// Check if it's the project itself
|
|
||||||
if let Some(project_name) = project_name {
|
|
||||||
if package_name == project_name {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if it's a workspace member
|
|
||||||
if members.contains(package_name) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, exclude it
|
|
||||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-workspace`");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If `--only-install-project` is set, only include the project itself.
|
|
||||||
if self.only_install_project {
|
|
||||||
if let Some(project_name) = project_name {
|
|
||||||
if package_name == project_name {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
debug!("Omitting `{package_name}` from resolution due to `--only-install-project`");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If `--no-install-project` is set, remove the project itself.
|
// If `--no-install-project` is set, remove the project itself.
|
||||||
if self.no_install_project {
|
if self.no_install_project {
|
||||||
if let Some(project_name) = project_name {
|
if let Some(project_name) = project_name {
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,6 @@ pub use constraints::*;
|
||||||
pub use dependency_groups::*;
|
pub use dependency_groups::*;
|
||||||
pub use dry_run::*;
|
pub use dry_run::*;
|
||||||
pub use editable::*;
|
pub use editable::*;
|
||||||
pub use env_file::*;
|
|
||||||
pub use excludes::*;
|
|
||||||
pub use export_format::*;
|
pub use export_format::*;
|
||||||
pub use extras::*;
|
pub use extras::*;
|
||||||
pub use hash::*;
|
pub use hash::*;
|
||||||
|
|
@ -30,8 +28,6 @@ mod constraints;
|
||||||
mod dependency_groups;
|
mod dependency_groups;
|
||||||
mod dry_run;
|
mod dry_run;
|
||||||
mod editable;
|
mod editable;
|
||||||
mod env_file;
|
|
||||||
mod excludes;
|
|
||||||
mod export_format;
|
mod export_format;
|
||||||
mod extras;
|
mod extras;
|
||||||
mod hash;
|
mod hash;
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue