mirror of https://github.com/astral-sh/uv
Compare commits
No commits in common. "main" and "0.8.6" have entirely different histories.
|
|
@ -1,81 +0,0 @@
|
||||||
# /// script
|
|
||||||
# requires-python = ">=3.12"
|
|
||||||
# dependencies = []
|
|
||||||
# ///
|
|
||||||
|
|
||||||
"""Post-edit hook to auto-format files after Claude edits."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def format_rust(file_path: str, cwd: str) -> None:
|
|
||||||
"""Format Rust files with cargo fmt."""
|
|
||||||
try:
|
|
||||||
subprocess.run(
|
|
||||||
["cargo", "fmt", "--", file_path],
|
|
||||||
cwd=cwd,
|
|
||||||
capture_output=True,
|
|
||||||
)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def format_python(file_path: str, cwd: str) -> None:
|
|
||||||
"""Format Python files with ruff."""
|
|
||||||
try:
|
|
||||||
subprocess.run(
|
|
||||||
["uvx", "ruff", "format", file_path],
|
|
||||||
cwd=cwd,
|
|
||||||
capture_output=True,
|
|
||||||
)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def format_prettier(file_path: str, cwd: str, prose_wrap: bool = False) -> None:
|
|
||||||
"""Format files with prettier."""
|
|
||||||
args = ["npx", "prettier", "--write"]
|
|
||||||
if prose_wrap:
|
|
||||||
args.extend(["--prose-wrap", "always"])
|
|
||||||
args.append(file_path)
|
|
||||||
try:
|
|
||||||
subprocess.run(args, cwd=cwd, capture_output=True)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
import os
|
|
||||||
|
|
||||||
input_data = json.load(sys.stdin)
|
|
||||||
|
|
||||||
tool_name = input_data.get("tool_name")
|
|
||||||
tool_input = input_data.get("tool_input", {})
|
|
||||||
file_path = tool_input.get("file_path")
|
|
||||||
|
|
||||||
# Only process Write, Edit, and MultiEdit tools
|
|
||||||
if tool_name not in ("Write", "Edit", "MultiEdit"):
|
|
||||||
return
|
|
||||||
|
|
||||||
if not file_path:
|
|
||||||
return
|
|
||||||
|
|
||||||
cwd = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
||||||
path = Path(file_path)
|
|
||||||
ext = path.suffix
|
|
||||||
|
|
||||||
if ext == ".rs":
|
|
||||||
format_rust(file_path, cwd)
|
|
||||||
elif ext in (".py", ".pyi"):
|
|
||||||
format_python(file_path, cwd)
|
|
||||||
elif ext in (".json5", ".yaml", ".yml"):
|
|
||||||
format_prettier(file_path, cwd)
|
|
||||||
elif ext == ".md":
|
|
||||||
format_prettier(file_path, cwd, prose_wrap=True)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
{
|
|
||||||
"hooks": {
|
|
||||||
"PostToolUse": [
|
|
||||||
{
|
|
||||||
"matcher": "Edit|Write|MultiEdit",
|
|
||||||
"hooks": [
|
|
||||||
{
|
|
||||||
"type": "command",
|
|
||||||
"command": "uv run .claude/hooks/post-edit-format.py"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -2,14 +2,3 @@
|
||||||
# Mark tests that take longer than 10s as slow.
|
# Mark tests that take longer than 10s as slow.
|
||||||
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
||||||
slow-timeout = { period = "10s", terminate-after = 12 }
|
slow-timeout = { period = "10s", terminate-after = 12 }
|
||||||
|
|
||||||
[test-groups]
|
|
||||||
serial = { max-threads = 1 }
|
|
||||||
|
|
||||||
[[profile.default.overrides]]
|
|
||||||
filter = 'test(native_auth)'
|
|
||||||
test-group = 'serial'
|
|
||||||
|
|
||||||
[[profile.default.overrides]]
|
|
||||||
filter = 'package(uv-keyring)'
|
|
||||||
test-group = 'serial'
|
|
||||||
|
|
|
||||||
|
|
@ -3,19 +3,20 @@
|
||||||
dependencyDashboard: true,
|
dependencyDashboard: true,
|
||||||
suppressNotifications: ["prEditedNotification"],
|
suppressNotifications: ["prEditedNotification"],
|
||||||
extends: [
|
extends: [
|
||||||
"github>astral-sh/renovate-config",
|
"config:recommended",
|
||||||
// For tool versions defined in GitHub Actions:
|
// For tool versions defined in GitHub Actions:
|
||||||
"customManagers:githubActionsVersions",
|
"customManagers:githubActionsVersions",
|
||||||
],
|
],
|
||||||
labels: ["internal"],
|
labels: ["internal"],
|
||||||
schedule: ["* 0-3 * * 1"],
|
schedule: ["before 4am on Monday"],
|
||||||
semanticCommits: "disabled",
|
semanticCommits: "disabled",
|
||||||
separateMajorMinor: false,
|
separateMajorMinor: false,
|
||||||
|
prHourlyLimit: 10,
|
||||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
||||||
cargo: {
|
cargo: {
|
||||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||||
rangeStrategy: "update-lockfile",
|
rangeStrategy: "update-lockfile",
|
||||||
managerFilePatterns: ["/^Cargo\\.toml$/", "/^crates/.*Cargo\\.toml$/"],
|
fileMatch: ["^crates/.*Cargo\\.toml$"],
|
||||||
},
|
},
|
||||||
"pre-commit": {
|
"pre-commit": {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
|
@ -85,61 +86,18 @@
|
||||||
description: "Weekly update of pyo3 dependencies",
|
description: "Weekly update of pyo3 dependencies",
|
||||||
enabled: false,
|
enabled: false,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
groupName: "pubgrub",
|
|
||||||
matchManagers: ["cargo"],
|
|
||||||
matchDepNames: ["pubgrub", "version-ranges"],
|
|
||||||
description: "version-ranges and pubgrub are in the same Git repository",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
commitMessageTopic: "MSRV",
|
|
||||||
matchManagers: ["custom.regex"],
|
|
||||||
matchDepNames: ["msrv"],
|
|
||||||
// We have a rolling support policy for the MSRV
|
|
||||||
// 2 releases back * 6 weeks per release * 7 days per week + 1
|
|
||||||
minimumReleaseAge: "85 days",
|
|
||||||
internalChecksFilter: "strict",
|
|
||||||
groupName: "MSRV",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
matchManagers: ["custom.regex"],
|
|
||||||
matchDepNames: ["rust"],
|
|
||||||
commitMessageTopic: "Rust",
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
customManagers: [
|
customManagers: [
|
||||||
// Update major GitHub actions references in documentation.
|
// Update major GitHub actions references in documentation.
|
||||||
{
|
{
|
||||||
customType: "regex",
|
customType: "regex",
|
||||||
managerFilePatterns: ["/^docs/.*\\.md$/"],
|
fileMatch: ["^docs/.*\\.md$"],
|
||||||
matchStrings: [
|
matchStrings: [
|
||||||
"\\suses: (?<depName>[\\w-]+/[\\w-]+)(?<path>/.*)?@(?<currentValue>.+?)\\s",
|
"\\suses: (?<depName>[\\w-]+/[\\w-]+)(?<path>/.*)?@(?<currentValue>.+?)\\s",
|
||||||
],
|
],
|
||||||
datasourceTemplate: "github-tags",
|
datasourceTemplate: "github-tags",
|
||||||
versioningTemplate: "regex:^v(?<major>\\d+)$",
|
versioningTemplate: "regex:^v(?<major>\\d+)$",
|
||||||
},
|
},
|
||||||
// Minimum supported Rust toolchain version
|
|
||||||
{
|
|
||||||
customType: "regex",
|
|
||||||
managerFilePatterns: ["/(^|/)Cargo\\.toml?$/"],
|
|
||||||
matchStrings: [
|
|
||||||
'rust-version\\s*=\\s*"(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)"',
|
|
||||||
],
|
|
||||||
depNameTemplate: "msrv",
|
|
||||||
packageNameTemplate: "rust-lang/rust",
|
|
||||||
datasourceTemplate: "github-releases",
|
|
||||||
},
|
|
||||||
// Rust toolchain version
|
|
||||||
{
|
|
||||||
customType: "regex",
|
|
||||||
managerFilePatterns: ["/(^|/)rust-toolchain\\.toml?$/"],
|
|
||||||
matchStrings: [
|
|
||||||
'channel\\s*=\\s*"(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)"',
|
|
||||||
],
|
|
||||||
depNameTemplate: "rust",
|
|
||||||
packageNameTemplate: "rust-lang/rust",
|
|
||||||
datasourceTemplate: "github-releases",
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
vulnerabilityAlerts: {
|
vulnerabilityAlerts: {
|
||||||
commitMessageSuffix: "",
|
commitMessageSuffix: "",
|
||||||
|
|
|
||||||
|
|
@ -59,9 +59,8 @@ jobs:
|
||||||
- name: "Prep README.md"
|
- name: "Prep README.md"
|
||||||
run: python scripts/transform_readme.py --target pypi
|
run: python scripts/transform_readme.py --target pypi
|
||||||
- name: "Build sdist"
|
- name: "Build sdist"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
command: sdist
|
command: sdist
|
||||||
args: --out dist
|
args: --out dist
|
||||||
- name: "Test sdist"
|
- name: "Test sdist"
|
||||||
|
|
@ -80,9 +79,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build sdist uv-build"
|
- name: "Build sdist uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
command: sdist
|
command: sdist
|
||||||
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Test sdist uv-build"
|
- name: "Test sdist uv-build"
|
||||||
|
|
@ -98,7 +96,7 @@ jobs:
|
||||||
|
|
||||||
macos-x86_64:
|
macos-x86_64:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-macos-14
|
runs-on: macos-14
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
|
@ -113,9 +111,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels - x86_64"
|
- name: "Build wheels - x86_64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
- name: "Upload wheels"
|
- name: "Upload wheels"
|
||||||
|
|
@ -144,9 +141,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build - x86_64"
|
- name: "Build wheels uv-build - x86_64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: x86_64
|
target: x86_64
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Upload wheels uv-build"
|
- name: "Upload wheels uv-build"
|
||||||
|
|
@ -157,7 +153,7 @@ jobs:
|
||||||
|
|
||||||
macos-aarch64:
|
macos-aarch64:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-macos-14
|
runs-on: macos-14
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
|
|
@ -172,9 +168,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels - aarch64"
|
- name: "Build wheels - aarch64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: aarch64
|
target: aarch64
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
- name: "Test wheel - aarch64"
|
- name: "Test wheel - aarch64"
|
||||||
|
|
@ -209,9 +204,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build - aarch64"
|
- name: "Build wheels uv-build - aarch64"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: aarch64
|
target: aarch64
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Test wheel - aarch64"
|
- name: "Test wheel - aarch64"
|
||||||
|
|
@ -251,9 +245,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --release --locked --out dist --features self-update,windows-gui-bin
|
args: --release --locked --out dist --features self-update,windows-gui-bin
|
||||||
- name: "Test wheel"
|
- name: "Test wheel"
|
||||||
|
|
@ -290,9 +283,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
- name: "Test wheel uv-build"
|
- name: "Test wheel uv-build"
|
||||||
|
|
@ -330,9 +322,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
# Generally, we try to build in a target docker container. In this case however, a
|
# Generally, we try to build in a target docker container. In this case however, a
|
||||||
# 32-bit compiler runs out of memory (4GB memory limit for 32-bit), so we cross compile
|
# 32-bit compiler runs out of memory (4GB memory limit for 32-bit), so we cross compile
|
||||||
|
|
@ -397,9 +388,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
|
|
@ -417,7 +407,7 @@ jobs:
|
||||||
|
|
||||||
linux-arm:
|
linux-arm:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-ubuntu-22.04-8
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 30
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
|
@ -445,9 +435,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||||
|
|
@ -499,9 +488,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||||
|
|
@ -554,14 +542,15 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
rust-toolchain: ${{ matrix.platform.toolchain || null }}
|
# Until the llvm updates hit stable
|
||||||
|
# https://github.com/rust-lang/rust/issues/141287
|
||||||
|
rust-toolchain: nightly-2025-05-25
|
||||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||||
if: matrix.platform.arch != 'ppc64'
|
if: matrix.platform.arch != 'ppc64'
|
||||||
name: "Test wheel"
|
name: "Test wheel"
|
||||||
|
|
@ -609,9 +598,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -669,9 +657,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -728,9 +715,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -773,9 +759,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -827,9 +812,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: auto
|
manylinux: auto
|
||||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||||
|
|
@ -880,9 +864,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --release --locked --out dist --features self-update
|
args: --release --locked --out dist --features self-update
|
||||||
|
|
@ -929,9 +912,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels uv-build"
|
- name: "Build wheels uv-build"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
|
|
@ -956,7 +938,7 @@ jobs:
|
||||||
|
|
||||||
musllinux-cross:
|
musllinux-cross:
|
||||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||||
runs-on: depot-ubuntu-22.04-8
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
platform:
|
platform:
|
||||||
|
|
@ -980,9 +962,8 @@ jobs:
|
||||||
|
|
||||||
# uv
|
# uv
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --release --locked --out dist --features self-update ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}}
|
args: --release --locked --out dist --features self-update ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}}
|
||||||
|
|
@ -1053,9 +1034,8 @@ jobs:
|
||||||
|
|
||||||
# uv-build
|
# uv-build
|
||||||
- name: "Build wheels"
|
- name: "Build wheels"
|
||||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||||
with:
|
with:
|
||||||
maturin-version: v1.9.6
|
|
||||||
target: ${{ matrix.platform.target }}
|
target: ${{ matrix.platform.target }}
|
||||||
manylinux: musllinux_1_1
|
manylinux: musllinux_1_1
|
||||||
args: --profile minimal-size --locked ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
args: --profile minimal-size --locked ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||||
|
|
|
||||||
|
|
@ -40,8 +40,6 @@ env:
|
||||||
UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv
|
UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv
|
||||||
UV_DOCKERHUB_IMAGE: docker.io/astral/uv
|
UV_DOCKERHUB_IMAGE: docker.io/astral/uv
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker-plan:
|
docker-plan:
|
||||||
name: plan
|
name: plan
|
||||||
|
|
@ -59,13 +57,13 @@ jobs:
|
||||||
IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
||||||
id: plan
|
id: plan
|
||||||
run: |
|
run: |
|
||||||
if [ "${DRY_RUN}" == "false" ]; then
|
if [ "${{ env.DRY_RUN }}" == "false" ]; then
|
||||||
echo "login=true" >> "$GITHUB_OUTPUT"
|
echo "login=true" >> "$GITHUB_OUTPUT"
|
||||||
echo "push=true" >> "$GITHUB_OUTPUT"
|
echo "push=true" >> "$GITHUB_OUTPUT"
|
||||||
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
|
echo "tag=${{ env.TAG }}" >> "$GITHUB_OUTPUT"
|
||||||
echo "action=build and publish" >> "$GITHUB_OUTPUT"
|
echo "action=build and publish" >> "$GITHUB_OUTPUT"
|
||||||
else
|
else
|
||||||
echo "login=${IS_LOCAL_PR}" >> "$GITHUB_OUTPUT"
|
echo "login=${{ env.IS_LOCAL_PR }}" >> "$GITHUB_OUTPUT"
|
||||||
echo "push=false" >> "$GITHUB_OUTPUT"
|
echo "push=false" >> "$GITHUB_OUTPUT"
|
||||||
echo "tag=dry-run" >> "$GITHUB_OUTPUT"
|
echo "tag=dry-run" >> "$GITHUB_OUTPUT"
|
||||||
echo "action=build" >> "$GITHUB_OUTPUT"
|
echo "action=build" >> "$GITHUB_OUTPUT"
|
||||||
|
|
@ -93,16 +91,15 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||||
with:
|
with:
|
||||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||||
|
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|
@ -114,20 +111,18 @@ jobs:
|
||||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||||
run: |
|
run: |
|
||||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||||
if [ "${TAG}" != "${version}" ]; then
|
if [ "${{ needs.docker-plan.outputs.tag }}" != "${version}" ]; then
|
||||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||||
echo "${TAG}" >&2
|
echo "${{ needs.docker-plan.outputs.tag }}" >&2
|
||||||
echo "${version}" >&2
|
echo "${version}" >&2
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo "Releasing ${version}"
|
echo "Releasing ${version}"
|
||||||
fi
|
fi
|
||||||
env:
|
|
||||||
TAG: ${{ needs.docker-plan.outputs.tag }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||||
with:
|
with:
|
||||||
|
|
@ -142,7 +137,7 @@ jobs:
|
||||||
|
|
||||||
- name: Build and push by digest
|
- name: Build and push by digest
|
||||||
id: build
|
id: build
|
||||||
uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # v1.16.2
|
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||||
with:
|
with:
|
||||||
project: 7hd4vdzmw5 # astral-sh/uv
|
project: 7hd4vdzmw5 # astral-sh/uv
|
||||||
context: .
|
context: .
|
||||||
|
|
@ -178,39 +173,24 @@ jobs:
|
||||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||||
image-mapping:
|
image-mapping:
|
||||||
- alpine:3.22,alpine3.22,alpine
|
- alpine:3.21,alpine3.21,alpine
|
||||||
- alpine:3.21,alpine3.21
|
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||||
- debian:trixie-slim,trixie-slim,debian-slim
|
- buildpack-deps:bookworm,bookworm,debian
|
||||||
- buildpack-deps:trixie,trixie,debian
|
- python:3.14-rc-alpine,python3.14-rc-alpine
|
||||||
- debian:bookworm-slim,bookworm-slim
|
- python:3.13-alpine,python3.13-alpine
|
||||||
- buildpack-deps:bookworm,bookworm
|
- python:3.12-alpine,python3.12-alpine
|
||||||
- python:3.14-alpine3.23,python3.14-alpine3.23,python3.14-alpine
|
- python:3.11-alpine,python3.11-alpine
|
||||||
- python:3.13-alpine3.23,python3.13-alpine3.23,python3.13-alpine
|
- python:3.10-alpine,python3.10-alpine
|
||||||
- python:3.12-alpine3.23,python3.12-alpine3.23,python3.12-alpine
|
- python:3.9-alpine,python3.9-alpine
|
||||||
- python:3.11-alpine3.23,python3.11-alpine3.23,python3.11-alpine
|
- python:3.8-alpine,python3.8-alpine
|
||||||
- python:3.10-alpine3.23,python3.10-alpine3.23,python3.10-alpine
|
- python:3.14-rc-bookworm,python3.14-rc-bookworm
|
||||||
- python:3.9-alpine3.22,python3.9-alpine3.22,python3.9-alpine
|
|
||||||
- python:3.8-alpine3.20,python3.8-alpine3.20,python3.8-alpine
|
|
||||||
- python:3.14-trixie,python3.14-trixie
|
|
||||||
- python:3.13-trixie,python3.13-trixie
|
|
||||||
- python:3.12-trixie,python3.12-trixie
|
|
||||||
- python:3.11-trixie,python3.11-trixie
|
|
||||||
- python:3.10-trixie,python3.10-trixie
|
|
||||||
- python:3.9-trixie,python3.9-trixie
|
|
||||||
- python:3.14-slim-trixie,python3.14-trixie-slim
|
|
||||||
- python:3.13-slim-trixie,python3.13-trixie-slim
|
|
||||||
- python:3.12-slim-trixie,python3.12-trixie-slim
|
|
||||||
- python:3.11-slim-trixie,python3.11-trixie-slim
|
|
||||||
- python:3.10-slim-trixie,python3.10-trixie-slim
|
|
||||||
- python:3.9-slim-trixie,python3.9-trixie-slim
|
|
||||||
- python:3.14-bookworm,python3.14-bookworm
|
|
||||||
- python:3.13-bookworm,python3.13-bookworm
|
- python:3.13-bookworm,python3.13-bookworm
|
||||||
- python:3.12-bookworm,python3.12-bookworm
|
- python:3.12-bookworm,python3.12-bookworm
|
||||||
- python:3.11-bookworm,python3.11-bookworm
|
- python:3.11-bookworm,python3.11-bookworm
|
||||||
- python:3.10-bookworm,python3.10-bookworm
|
- python:3.10-bookworm,python3.10-bookworm
|
||||||
- python:3.9-bookworm,python3.9-bookworm
|
- python:3.9-bookworm,python3.9-bookworm
|
||||||
- python:3.8-bookworm,python3.8-bookworm
|
- python:3.8-bookworm,python3.8-bookworm
|
||||||
- python:3.14-slim-bookworm,python3.14-bookworm-slim
|
- python:3.14-rc-slim-bookworm,python3.14-rc-bookworm-slim
|
||||||
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
||||||
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
||||||
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
||||||
|
|
@ -219,13 +199,13 @@ jobs:
|
||||||
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
||||||
steps:
|
steps:
|
||||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||||
with:
|
with:
|
||||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||||
|
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|
@ -244,7 +224,7 @@ jobs:
|
||||||
# Generate Dockerfile content
|
# Generate Dockerfile content
|
||||||
cat <<EOF > Dockerfile
|
cat <<EOF > Dockerfile
|
||||||
FROM ${BASE_IMAGE}
|
FROM ${BASE_IMAGE}
|
||||||
COPY --from=${UV_GHCR_IMAGE}:latest /uv /uvx /usr/local/bin/
|
COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/
|
||||||
ENV UV_TOOL_BIN_DIR="/usr/local/bin"
|
ENV UV_TOOL_BIN_DIR="/usr/local/bin"
|
||||||
ENTRYPOINT []
|
ENTRYPOINT []
|
||||||
CMD ["/usr/local/bin/uv"]
|
CMD ["/usr/local/bin/uv"]
|
||||||
|
|
@ -256,8 +236,8 @@ jobs:
|
||||||
# Loop through all base tags and append its docker metadata pattern to the list
|
# Loop through all base tags and append its docker metadata pattern to the list
|
||||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${VERSION}\n"
|
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${VERSION}\n"
|
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
@ -270,12 +250,10 @@ jobs:
|
||||||
echo -e "${TAG_PATTERNS}"
|
echo -e "${TAG_PATTERNS}"
|
||||||
echo EOF
|
echo EOF
|
||||||
} >> $GITHUB_ENV
|
} >> $GITHUB_ENV
|
||||||
env:
|
|
||||||
VERSION: ${{ needs.docker-plan.outputs.tag }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels) for Docker
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||||
# ghcr.io prefers index level annotations
|
# ghcr.io prefers index level annotations
|
||||||
env:
|
env:
|
||||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||||
|
|
@ -290,7 +268,7 @@ jobs:
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: build-and-push
|
id: build-and-push
|
||||||
uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # v1.16.2
|
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
project: 7hd4vdzmw5 # astral-sh/uv
|
project: 7hd4vdzmw5 # astral-sh/uv
|
||||||
|
|
@ -356,11 +334,6 @@ jobs:
|
||||||
docker-annotate-base:
|
docker-annotate-base:
|
||||||
name: annotate uv
|
name: annotate uv
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
id-token: write # for GHCR signing
|
|
||||||
packages: write # for GHCR image pushes
|
|
||||||
attestations: write # for GHCR attestations
|
|
||||||
environment:
|
environment:
|
||||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||||
needs:
|
needs:
|
||||||
|
|
@ -369,12 +342,12 @@ jobs:
|
||||||
- docker-publish-extra
|
- docker-publish-extra
|
||||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
username: astral
|
username: astral
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
||||||
|
|
||||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,33 +0,0 @@
|
||||||
# Publish a release to crates.io.
|
|
||||||
#
|
|
||||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
|
||||||
# within `cargo-dist`.
|
|
||||||
name: "Publish to crates.io"
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
plan:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
crates-publish-uv:
|
|
||||||
name: Upload uv to crates.io
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment:
|
|
||||||
name: release
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
# TODO(zanieb): Switch to trusted publishing once published
|
|
||||||
# - uses: rust-lang/crates-io-auth-action@v1
|
|
||||||
# id: auth
|
|
||||||
- name: Publish workspace crates
|
|
||||||
# Note `--no-verify` is safe because we do a publish dry-run elsewhere in CI
|
|
||||||
run: cargo publish --workspace --no-verify
|
|
||||||
env:
|
|
||||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_TOKEN }}
|
|
||||||
|
|
@ -36,14 +36,6 @@ jobs:
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
|
|
||||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
|
||||||
|
|
||||||
- name: "Generate reference documentation"
|
|
||||||
run: |
|
|
||||||
cargo dev generate-options-reference
|
|
||||||
cargo dev generate-cli-reference
|
|
||||||
cargo dev generate-env-vars-reference
|
|
||||||
|
|
||||||
- name: "Set docs display name"
|
- name: "Set docs display name"
|
||||||
run: |
|
run: |
|
||||||
version="${VERSION}"
|
version="${VERSION}"
|
||||||
|
|
|
||||||
|
|
@ -18,10 +18,11 @@ jobs:
|
||||||
environment:
|
environment:
|
||||||
name: release
|
name: release
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # For PyPI's trusted publishing
|
# For PyPI's trusted publishing.
|
||||||
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
with:
|
with:
|
||||||
pattern: wheels_uv-*
|
pattern: wheels_uv-*
|
||||||
|
|
@ -36,10 +37,11 @@ jobs:
|
||||||
environment:
|
environment:
|
||||||
name: release
|
name: release
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write # For PyPI's trusted publishing
|
# For PyPI's trusted publishing.
|
||||||
|
id-token: write
|
||||||
steps:
|
steps:
|
||||||
- name: "Install uv"
|
- name: "Install uv"
|
||||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||||
with:
|
with:
|
||||||
pattern: wheels_uv_build-*
|
pattern: wheels_uv_build-*
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
||||||
# This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist
|
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||||
#
|
#
|
||||||
# Copyright 2022-2024, axodotdev
|
# Copyright 2022-2024, axodotdev
|
||||||
|
# Copyright 2025 Astral Software Inc.
|
||||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||||
#
|
#
|
||||||
# CI that:
|
# CI that:
|
||||||
|
|
@ -68,7 +69,7 @@ jobs:
|
||||||
# we specify bash to get pipefail; it guards against the `curl` command
|
# we specify bash to get pipefail; it guards against the `curl` command
|
||||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||||
shell: bash
|
shell: bash
|
||||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7/cargo-dist-installer.sh | sh"
|
||||||
- name: Cache dist
|
- name: Cache dist
|
||||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||||
with:
|
with:
|
||||||
|
|
@ -168,8 +169,8 @@ jobs:
|
||||||
- custom-build-binaries
|
- custom-build-binaries
|
||||||
- custom-build-docker
|
- custom-build-docker
|
||||||
- build-global-artifacts
|
- build-global-artifacts
|
||||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "depot-ubuntu-latest-4"
|
||||||
|
|
@ -222,36 +223,17 @@ jobs:
|
||||||
"id-token": "write"
|
"id-token": "write"
|
||||||
"packages": "write"
|
"packages": "write"
|
||||||
|
|
||||||
custom-publish-crates:
|
|
||||||
needs:
|
|
||||||
- plan
|
|
||||||
- host
|
|
||||||
- custom-publish-pypi # DIRTY: see #16989
|
|
||||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
|
||||||
uses: ./.github/workflows/publish-crates.yml
|
|
||||||
with:
|
|
||||||
plan: ${{ needs.plan.outputs.val }}
|
|
||||||
secrets: inherit
|
|
||||||
# publish jobs get escalated permissions
|
|
||||||
permissions:
|
|
||||||
"contents": "read"
|
|
||||||
|
|
||||||
# Create a GitHub Release while uploading all files to it
|
# Create a GitHub Release while uploading all files to it
|
||||||
announce:
|
announce:
|
||||||
needs:
|
needs:
|
||||||
- plan
|
- plan
|
||||||
- host
|
- host
|
||||||
- custom-publish-pypi
|
- custom-publish-pypi
|
||||||
- custom-publish-crates
|
|
||||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||||
# "host" however must run to completion, no skipping allowed!
|
# "host" however must run to completion, no skipping allowed!
|
||||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-crates.result == 'skipped' || needs.custom-publish-crates.result == 'success') }}
|
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||||
runs-on: "depot-ubuntu-latest-4"
|
runs-on: "depot-ubuntu-latest-4"
|
||||||
permissions:
|
|
||||||
"attestations": "write"
|
|
||||||
"contents": "write"
|
|
||||||
"id-token": "write"
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
|
|
@ -270,15 +252,6 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
# Remove the granular manifests
|
# Remove the granular manifests
|
||||||
rm -f artifacts/*-dist-manifest.json
|
rm -f artifacts/*-dist-manifest.json
|
||||||
- name: Attest
|
|
||||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2
|
|
||||||
with:
|
|
||||||
subject-path: |
|
|
||||||
artifacts/*.json
|
|
||||||
artifacts/*.sh
|
|
||||||
artifacts/*.ps1
|
|
||||||
artifacts/*.zip
|
|
||||||
artifacts/*.tar.gz
|
|
||||||
- name: Create GitHub Release
|
- name: Create GitHub Release
|
||||||
env:
|
env:
|
||||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||||
|
|
|
||||||
|
|
@ -24,7 +24,7 @@ if ($env:DEPOT_RUNNER -eq "1") {
|
||||||
# Create VHD and configure drive using diskpart
|
# Create VHD and configure drive using diskpart
|
||||||
$vhdPath = "C:\uv_dev_drive.vhdx"
|
$vhdPath = "C:\uv_dev_drive.vhdx"
|
||||||
@"
|
@"
|
||||||
create vdisk file="$vhdPath" maximum=25600 type=expandable
|
create vdisk file="$vhdPath" maximum=20480 type=expandable
|
||||||
attach vdisk
|
attach vdisk
|
||||||
create partition primary
|
create partition primary
|
||||||
active
|
active
|
||||||
|
|
@ -41,9 +41,9 @@ assign letter=V
|
||||||
Write-Output "Using existing drive at D:"
|
Write-Output "Using existing drive at D:"
|
||||||
$Drive = "D:"
|
$Drive = "D:"
|
||||||
} else {
|
} else {
|
||||||
# The size (25 GB) is chosen empirically to be large enough for our
|
# The size (20 GB) is chosen empirically to be large enough for our
|
||||||
# workflows; larger drives can take longer to set up.
|
# workflows; larger drives can take longer to set up.
|
||||||
$Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 25GB |
|
$Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 20GB |
|
||||||
Mount-VHD -Passthru |
|
Mount-VHD -Passthru |
|
||||||
Initialize-Disk -Passthru |
|
Initialize-Disk -Passthru |
|
||||||
New-Partition -AssignDriveLetter -UseMaximumSize |
|
New-Partition -AssignDriveLetter -UseMaximumSize |
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
|
||||||
with:
|
with:
|
||||||
version: "latest"
|
version: "latest"
|
||||||
enable-cache: true
|
enable-cache: true
|
||||||
|
|
@ -49,4 +49,3 @@ jobs:
|
||||||
title: "Sync latest Python releases"
|
title: "Sync latest Python releases"
|
||||||
body: "Automated update for Python releases."
|
body: "Automated update for Python releases."
|
||||||
base: "main"
|
base: "main"
|
||||||
draft: true
|
|
||||||
|
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
name: zizmor
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: ["main"]
|
|
||||||
pull_request:
|
|
||||||
branches: ["**"]
|
|
||||||
|
|
||||||
permissions: {}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
zizmor:
|
|
||||||
name: Run zizmor
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
security-events: write
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
|
||||||
with:
|
|
||||||
persist-credentials: false
|
|
||||||
|
|
||||||
- name: Run zizmor
|
|
||||||
uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2
|
|
||||||
|
|
@ -37,11 +37,6 @@ profile.json.gz
|
||||||
# MkDocs
|
# MkDocs
|
||||||
/site
|
/site
|
||||||
|
|
||||||
# Generated reference docs (use `cargo dev generate-all` to regenerate)
|
|
||||||
/docs/reference/cli.md
|
|
||||||
/docs/reference/environment.md
|
|
||||||
/docs/reference/settings.md
|
|
||||||
|
|
||||||
# macOS
|
# macOS
|
||||||
**/.DS_Store
|
**/.DS_Store
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ repos:
|
||||||
- id: validate-pyproject
|
- id: validate-pyproject
|
||||||
|
|
||||||
- repo: https://github.com/crate-ci/typos
|
- repo: https://github.com/crate-ci/typos
|
||||||
rev: v1.37.2
|
rev: v1.34.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: typos
|
- id: typos
|
||||||
|
|
||||||
|
|
@ -42,7 +42,7 @@ repos:
|
||||||
types_or: [yaml, json5]
|
types_or: [yaml, json5]
|
||||||
|
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.13.3
|
rev: v0.12.7
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
- id: ruff
|
- id: ruff
|
||||||
|
|
|
||||||
|
|
@ -4,5 +4,5 @@ PREVIEW-CHANGELOG.md
|
||||||
docs/reference/cli.md
|
docs/reference/cli.md
|
||||||
docs/reference/settings.md
|
docs/reference/settings.md
|
||||||
docs/reference/environment.md
|
docs/reference/environment.md
|
||||||
test/ecosystem/home-assistant-core/LICENSE.md
|
ecosystem/home-assistant-core/LICENSE.md
|
||||||
docs/guides/integration/gitlab.md
|
docs/guides/integration/gitlab.md
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,3 @@
|
||||||
# These are versions of Python required for running uv's own test suite. You can add or remove
|
|
||||||
# versions here as needed for tests; this doesn't impact uv's own functionality. They can be
|
|
||||||
# installed through any means you like, e.g. `uv python install` if you already have a build of uv,
|
|
||||||
# `cargo run python install`, or through some other installer.
|
|
||||||
#
|
|
||||||
# In uv's CI in GitHub Actions, they are bootstrapped by an existing released version of uv,
|
|
||||||
# installed by the astral-sh/setup-uv action If you need a newer or different version, you will
|
|
||||||
# first need to complete a uv release capable of installing that version, get it picked up by
|
|
||||||
# astral-sh/setup-uv, and update its hash in .github/workflows.
|
|
||||||
|
|
||||||
3.14.0
|
|
||||||
3.13.2
|
3.13.2
|
||||||
3.12.9
|
3.12.9
|
||||||
3.11.11
|
3.11.11
|
||||||
|
|
@ -20,5 +9,3 @@
|
||||||
3.9.12
|
3.9.12
|
||||||
# The following is needed for `==3.13` request tests
|
# The following is needed for `==3.13` request tests
|
||||||
3.13.0
|
3.13.0
|
||||||
# A pre-release version required for testing
|
|
||||||
3.14.0rc2
|
|
||||||
|
|
|
||||||
778
CHANGELOG.md
778
CHANGELOG.md
|
|
@ -2,551 +2,327 @@
|
||||||
|
|
||||||
<!-- prettier-ignore-start -->
|
<!-- prettier-ignore-start -->
|
||||||
|
|
||||||
|
## 0.8.6
|
||||||
|
|
||||||
## 0.9.18
|
This release contains hardening measures to address differentials in behavior between uv and Python's built-in ZIP parser (CVE-2025-54368).
|
||||||
|
|
||||||
Released on 2025-12-16.
|
Prior to this release, attackers could construct ZIP files that would be extracted differently by pip, uv, and other tools. As a result, ZIPs could be constructed that would be considered harmless by (e.g.) scanners, but contain a malicious payload when extracted by uv. As of v0.8.6, uv now applies additional checks to reject such ZIPs.
|
||||||
|
|
||||||
### Enhancements
|
Thanks to a triage effort with the [Python Security Response Team](https://devguide.python.org/developer-workflow/psrt/) and PyPI maintainers, we were able to determine that these differentials **were not exploited** via PyPI during the time they were present. The PyPI team has also implemented similar checks and now guards against these parsing differentials on upload.
|
||||||
|
|
||||||
- Add value hints to command line arguments to improve shell completion accuracy ([#17080](https://github.com/astral-sh/uv/pull/17080))
|
Although the practical risk of exploitation is low, we take the _hypothetical_ risk of parser differentials very seriously. Out of an abundance of caution, we have assigned this advisory a CVE identifier and have given it a "moderate" severity suggestion.
|
||||||
- Improve error handling in `uv publish` ([#17096](https://github.com/astral-sh/uv/pull/17096))
|
|
||||||
- Improve rendering of multiline error messages ([#17132](https://github.com/astral-sh/uv/pull/17132))
|
|
||||||
- Support redirects in `uv publish` ([#17130](https://github.com/astral-sh/uv/pull/17130))
|
|
||||||
- Include Docker images with the alpine version, e.g., `python3.x-alpine3.23` ([#17100](https://github.com/astral-sh/uv/pull/17100))
|
|
||||||
|
|
||||||
### Configuration
|
These changes have been validated against the top 15,000 PyPI packages; however, it's plausible that a non-malicious ZIP could be falsely rejected with this additional hardening. As an escape hatch, users who do encounter breaking changes can enable `UV_INSECURE_NO_ZIP_VALIDATION` to restore the previous behavior. If you encounter such a rejection, please file an issue in uv and to the upstream package.
|
||||||
|
|
||||||
- Accept `--torch-backend` in `[tool.uv]` ([#17116](https://github.com/astral-sh/uv/pull/17116))
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
|
|
||||||
- Speed up `uv cache size` ([#17015](https://github.com/astral-sh/uv/pull/17015))
|
|
||||||
- Initialize S3 signer once ([#17092](https://github.com/astral-sh/uv/pull/17092))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Avoid panics due to reads on failed requests ([#17098](https://github.com/astral-sh/uv/pull/17098))
|
|
||||||
- Enforce latest-version in `@latest` requests ([#17114](https://github.com/astral-sh/uv/pull/17114))
|
|
||||||
- Explicitly set `EntryType` for file entries in tar ([#17043](https://github.com/astral-sh/uv/pull/17043))
|
|
||||||
- Ignore `pyproject.toml` index username in lockfile comparison ([#16995](https://github.com/astral-sh/uv/pull/16995))
|
|
||||||
- Relax error when using `uv add` with `UV_GIT_LFS` set ([#17127](https://github.com/astral-sh/uv/pull/17127))
|
|
||||||
- Support file locks on ExFAT on macOS ([#17115](https://github.com/astral-sh/uv/pull/17115))
|
|
||||||
- Change schema for `exclude-newer` into optional string ([#17121](https://github.com/astral-sh/uv/pull/17121))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Drop arm musl caveat from Docker documentation ([#17111](https://github.com/astral-sh/uv/pull/17111))
|
|
||||||
- Fix version reference in resolver example ([#17085](https://github.com/astral-sh/uv/pull/17085))
|
|
||||||
- Better documentation for `exclude-newer*` ([#17079](https://github.com/astral-sh/uv/pull/17079))
|
|
||||||
|
|
||||||
## 0.9.17
|
|
||||||
|
|
||||||
Released on 2025-12-09.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add `torch-tensorrt` and `torchao` to the PyTorch list ([#17053](https://github.com/astral-sh/uv/pull/17053))
|
|
||||||
- Add hint for misplaced `--verbose` in `uv tool run` ([#17020](https://github.com/astral-sh/uv/pull/17020))
|
|
||||||
- Add support for relative durations in `exclude-newer` (a.k.a., dependency cooldowns) ([#16814](https://github.com/astral-sh/uv/pull/16814))
|
|
||||||
- Add support for relocatable nushell activation script ([#17036](https://github.com/astral-sh/uv/pull/17036))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Respect dropped (but explicit) indexes in dependency groups ([#17012](https://github.com/astral-sh/uv/pull/17012))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Improve `source-exclude` reference docs ([#16832](https://github.com/astral-sh/uv/pull/16832))
|
|
||||||
- Recommend `UV_NO_DEV` in Docker installs ([#17030](https://github.com/astral-sh/uv/pull/17030))
|
|
||||||
- Update `UV_VERSION` in docs for GitLab CI/CD ([#17040](https://github.com/astral-sh/uv/pull/17040))
|
|
||||||
|
|
||||||
## 0.9.16
|
|
||||||
|
|
||||||
Released on 2025-12-06.
|
|
||||||
|
|
||||||
### Python
|
|
||||||
|
|
||||||
- Add CPython 3.14.2
|
|
||||||
- Add CPython 3.13.11
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add a 5m default timeout to acquiring file locks to fail faster on deadlock ([#16342](https://github.com/astral-sh/uv/pull/16342))
|
|
||||||
- Add a stub `debug` subcommand to `uv pip` announcing its intentional absence ([#16966](https://github.com/astral-sh/uv/pull/16966))
|
|
||||||
- Add bounds in `uv add --script` ([#16954](https://github.com/astral-sh/uv/pull/16954))
|
|
||||||
- Add brew specific message for `uv self update` ([#16838](https://github.com/astral-sh/uv/pull/16838))
|
|
||||||
- Error when built wheel is for the wrong platform ([#16074](https://github.com/astral-sh/uv/pull/16074))
|
|
||||||
- Filter wheels from PEP 751 files based on `--no-binary` et al in `uv pip compile` ([#16956](https://github.com/astral-sh/uv/pull/16956))
|
|
||||||
- Support `--target` and `--prefix` in `uv pip list`, `uv pip freeze`, and `uv pip show` ([#16955](https://github.com/astral-sh/uv/pull/16955))
|
|
||||||
- Tweak language for build backend validation errors ([#16720](https://github.com/astral-sh/uv/pull/16720))
|
|
||||||
- Use explicit credentials cache instead of global static ([#16768](https://github.com/astral-sh/uv/pull/16768))
|
|
||||||
- Enable SIMD in HTML parsing ([#17010](https://github.com/astral-sh/uv/pull/17010))
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Fix missing preview warning in `uv workspace metadata` ([#16988](https://github.com/astral-sh/uv/pull/16988))
|
|
||||||
- Add a `uv auth helper --protocol bazel` command ([#16886](https://github.com/astral-sh/uv/pull/16886))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix Pyston wheel compatibility tags ([#16972](https://github.com/astral-sh/uv/pull/16972))
|
|
||||||
- Allow redundant entries in `tool.uv.build-backend.module-name` but emit warnings ([#16928](https://github.com/astral-sh/uv/pull/16928))
|
|
||||||
- Fix infinite loop in non-attribute re-treats during HTML parsing ([#17010](https://github.com/astral-sh/uv/pull/17010))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Clarify `--project` flag help text to indicate project discovery ([#16965](https://github.com/astral-sh/uv/pull/16965))
|
|
||||||
- Regenerate the crates.io READMEs on release ([#16992](https://github.com/astral-sh/uv/pull/16992))
|
|
||||||
- Update Docker integration guide to prefer `COPY` over `ADD` for simple cases ([#16883](https://github.com/astral-sh/uv/pull/16883))
|
|
||||||
- Update PyTorch documentation to include information about supporting CUDA 13.0.x ([#16957](https://github.com/astral-sh/uv/pull/16957))
|
|
||||||
- Update the versioning policy ([#16710](https://github.com/astral-sh/uv/pull/16710))
|
|
||||||
- Upgrade PyTorch documentation to latest versions ([#16970](https://github.com/astral-sh/uv/pull/16970))
|
|
||||||
|
|
||||||
## 0.9.15
|
|
||||||
|
|
||||||
Released on 2025-12-02.
|
|
||||||
|
|
||||||
### Python
|
|
||||||
|
|
||||||
- Add CPython 3.14.1
|
|
||||||
- Add CPython 3.13.10
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add ROCm 6.4 to `--torch-backend=auto` ([#16919](https://github.com/astral-sh/uv/pull/16919))
|
|
||||||
- Add a Windows manifest to uv binaries ([#16894](https://github.com/astral-sh/uv/pull/16894))
|
|
||||||
- Add LFS toggle to Git sources ([#16143](https://github.com/astral-sh/uv/pull/16143))
|
|
||||||
- Cache source reads during resolution ([#16888](https://github.com/astral-sh/uv/pull/16888))
|
|
||||||
- Allow reading requirements from scripts without an extension ([#16923](https://github.com/astral-sh/uv/pull/16923))
|
|
||||||
- Allow reading requirements from scripts with HTTP(S) paths ([#16891](https://github.com/astral-sh/uv/pull/16891))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Add `UV_HIDE_BUILD_OUTPUT` to omit build logs ([#16885](https://github.com/astral-sh/uv/pull/16885))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix `uv-trampoline-builder` builds from crates.io by moving bundled executables ([#16922](https://github.com/astral-sh/uv/pull/16922))
|
|
||||||
- Respect `NO_COLOR` and always show the command as a header when paging `uv help` output ([#16908](https://github.com/astral-sh/uv/pull/16908))
|
|
||||||
- Use `0o666` permissions for flock files instead of `0o777` ([#16845](https://github.com/astral-sh/uv/pull/16845))
|
|
||||||
- Revert "Bump `astral-tl` to v0.7.10 (#16887)" to narrow down a regression causing hangs in metadata retrieval ([#16938](https://github.com/astral-sh/uv/pull/16938))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Link to the uv version in crates.io member READMEs ([#16939](https://github.com/astral-sh/uv/pull/16939))
|
|
||||||
|
|
||||||
## 0.9.14
|
|
||||||
|
|
||||||
Released on 2025-12-01.
|
|
||||||
|
|
||||||
### Performance
|
|
||||||
|
|
||||||
- Bump `astral-tl` to v0.7.10 to enable SIMD for HTML parsing ([#16887](https://github.com/astral-sh/uv/pull/16887))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Allow earlier post releases with exclusive ordering ([#16881](https://github.com/astral-sh/uv/pull/16881))
|
|
||||||
- Prefer updating existing `.zshenv` over creating a new one in `tool update-shell` ([#16866](https://github.com/astral-sh/uv/pull/16866))
|
|
||||||
- Respect `-e` flags in `uv add` ([#16882](https://github.com/astral-sh/uv/pull/16882))
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Attach subcommand to User-Agent string ([#16837](https://github.com/astral-sh/uv/pull/16837))
|
|
||||||
- Prefer `UV_WORKING_DIR` over `UV_WORKING_DIRECTORY` for consistency ([#16884](https://github.com/astral-sh/uv/pull/16884))
|
|
||||||
|
|
||||||
## 0.9.13
|
|
||||||
|
|
||||||
Released on 2025-11-26.
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Revert "Allow `--with-requirements` to load extensionless inline-metadata scripts" to fix reading of requirements files from streams ([#16861](https://github.com/astral-sh/uv/pull/16861))
|
|
||||||
- Validate URL wheel tags against `Requires-Python` and required environments ([#16824](https://github.com/astral-sh/uv/pull/16824))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Drop unpublished crates from the uv crates.io README ([#16847](https://github.com/astral-sh/uv/pull/16847))
|
|
||||||
- Fix the links to uv in crates.io member READMEs ([#16848](https://github.com/astral-sh/uv/pull/16848))
|
|
||||||
|
|
||||||
## 0.9.12
|
|
||||||
|
|
||||||
Released on 2025-11-24.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Allow `--with-requirements` to load extensionless inline-metadata scripts ([#16744](https://github.com/astral-sh/uv/pull/16744))
|
|
||||||
- Collect and upload PEP 740 attestations during `uv publish` ([#16731](https://github.com/astral-sh/uv/pull/16731))
|
|
||||||
- Prevent `uv export` from overwriting `pyproject.toml` ([#16745](https://github.com/astral-sh/uv/pull/16745))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add a crates.io README for uv ([#16809](https://github.com/astral-sh/uv/pull/16809))
|
|
||||||
- Add documentation for intermediate Docker layers in a workspace ([#16787](https://github.com/astral-sh/uv/pull/16787))
|
|
||||||
- Enumerate workspace members in the uv crate README ([#16811](https://github.com/astral-sh/uv/pull/16811))
|
|
||||||
- Fix documentation links for crates ([#16801](https://github.com/astral-sh/uv/pull/16801))
|
|
||||||
- Generate a crates.io README for uv workspace members ([#16812](https://github.com/astral-sh/uv/pull/16812))
|
|
||||||
- Move the "Export" guide to the projects concept section ([#16835](https://github.com/astral-sh/uv/pull/16835))
|
|
||||||
- Update the cargo install recommendation to use crates ([#16800](https://github.com/astral-sh/uv/pull/16800))
|
|
||||||
- Use the word "internal" in crate descriptions ([#16810](https://github.com/astral-sh/uv/pull/16810))
|
|
||||||
|
|
||||||
## 0.9.11
|
|
||||||
|
|
||||||
Released on 2025-11-20.
|
|
||||||
|
|
||||||
### Python
|
|
||||||
|
|
||||||
- Add CPython 3.15.0a2
|
|
||||||
|
|
||||||
See the [`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20251120) for details.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add SBOM support to `uv export` ([#16523](https://github.com/astral-sh/uv/pull/16523))
|
|
||||||
- Publish to `crates.io` ([#16770](https://github.com/astral-sh/uv/pull/16770))
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Add `uv workspace list --paths` ([#16776](https://github.com/astral-sh/uv/pull/16776))
|
|
||||||
- Fix the preview warning on `uv workspace dir` ([#16775](https://github.com/astral-sh/uv/pull/16775))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix `uv init` author serialization via `toml_edit` inline tables ([#16778](https://github.com/astral-sh/uv/pull/16778))
|
|
||||||
- Fix status messages without TTY ([#16785](https://github.com/astral-sh/uv/pull/16785))
|
|
||||||
- Preserve end-of-line comment whitespace when editing `pyproject.toml` ([#16734](https://github.com/astral-sh/uv/pull/16734))
|
|
||||||
- Disable `always-authenticate` when running under Dependabot ([#16773](https://github.com/astral-sh/uv/pull/16773))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Document the new behavior for free-threaded python versions ([#16781](https://github.com/astral-sh/uv/pull/16781))
|
|
||||||
- Improve note about build system in publish guide ([#16788](https://github.com/astral-sh/uv/pull/16788))
|
|
||||||
- Move do not upload publish note out of the guide into concepts ([#16789](https://github.com/astral-sh/uv/pull/16789))
|
|
||||||
|
|
||||||
## 0.9.10
|
|
||||||
|
|
||||||
Released on 2025-11-17.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add support for `SSL_CERT_DIR` ([#16473](https://github.com/astral-sh/uv/pull/16473))
|
|
||||||
- Enforce UTF‑8-encoded license files during `uv build` ([#16699](https://github.com/astral-sh/uv/pull/16699))
|
|
||||||
- Error when a `project.license-files` glob matches nothing ([#16697](https://github.com/astral-sh/uv/pull/16697))
|
|
||||||
- `pip install --target` (and `sync`) install Python if necessary ([#16694](https://github.com/astral-sh/uv/pull/16694))
|
|
||||||
- Account for `python_downloads_json_url` in pre-release Python version warnings ([#16737](https://github.com/astral-sh/uv/pull/16737))
|
|
||||||
- Support HTTP/HTTPS URLs in `uv python --python-downloads-json-url` ([#16542](https://github.com/astral-sh/uv/pull/16542))
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Add support for `--upgrade` in `uv python install` ([#16676](https://github.com/astral-sh/uv/pull/16676))
|
|
||||||
- Fix handling of `python install --default` for pre-release Python versions ([#16706](https://github.com/astral-sh/uv/pull/16706))
|
|
||||||
- Add `uv workspace list` to list workspace members ([#16691](https://github.com/astral-sh/uv/pull/16691))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Don't check file URLs for ambiguously parsed credentials ([#16759](https://github.com/astral-sh/uv/pull/16759))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add a "storage" reference document ([#15954](https://github.com/astral-sh/uv/pull/15954))
|
|
||||||
|
|
||||||
## 0.9.9
|
|
||||||
|
|
||||||
Released on 2025-11-12.
|
|
||||||
|
|
||||||
### Deprecations
|
|
||||||
|
|
||||||
- Deprecate use of `--project` in `uv init` ([#16674](https://github.com/astral-sh/uv/pull/16674))
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add iOS support to Python interpreter discovery ([#16686](https://github.com/astral-sh/uv/pull/16686))
|
|
||||||
- Reject ambiguously parsed URLs ([#16622](https://github.com/astral-sh/uv/pull/16622))
|
|
||||||
- Allow explicit values in `uv version --bump` ([#16555](https://github.com/astral-sh/uv/pull/16555))
|
|
||||||
- Warn on use of managed pre-release Python versions when a stable version is available ([#16619](https://github.com/astral-sh/uv/pull/16619))
|
|
||||||
- Allow signing trampolines on Windows by using `.rcdata` to store metadata ([#15068](https://github.com/astral-sh/uv/pull/15068))
|
|
||||||
- Add `--only-emit-workspace` and similar variants to `uv export` ([#16681](https://github.com/astral-sh/uv/pull/16681))
|
|
||||||
|
|
||||||
### Preview features
|
|
||||||
|
|
||||||
- Add `uv workspace dir` command ([#16678](https://github.com/astral-sh/uv/pull/16678))
|
|
||||||
- Add `uv workspace metadata` command ([#16516](https://github.com/astral-sh/uv/pull/16516))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Add `UV_NO_DEFAULT_GROUPS` environment variable ([#16645](https://github.com/astral-sh/uv/pull/16645))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Remove `torch-model-archiver` and `torch-tb-profiler` from PyTorch backend ([#16655](https://github.com/astral-sh/uv/pull/16655))
|
|
||||||
- Fix Pixi environment detection ([#16585](https://github.com/astral-sh/uv/pull/16585))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Fix `CMD` path in FastAPI Dockerfile ([#16701](https://github.com/astral-sh/uv/pull/16701))
|
|
||||||
|
|
||||||
## 0.9.8
|
|
||||||
|
|
||||||
Released on 2025-11-07.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Accept multiple packages in `uv export` ([#16603](https://github.com/astral-sh/uv/pull/16603))
|
|
||||||
- Accept multiple packages in `uv sync` ([#16543](https://github.com/astral-sh/uv/pull/16543))
|
|
||||||
- Add a `uv cache size` command ([#16032](https://github.com/astral-sh/uv/pull/16032))
|
|
||||||
- Add prerelease guidance for build-system resolution failures ([#16550](https://github.com/astral-sh/uv/pull/16550))
|
|
||||||
- Allow Python requests to include `+gil` to require a GIL-enabled interpreter ([#16537](https://github.com/astral-sh/uv/pull/16537))
|
|
||||||
- Avoid pluralizing 'retry' for single value ([#16535](https://github.com/astral-sh/uv/pull/16535))
|
|
||||||
- Enable first-class dependency exclusions ([#16528](https://github.com/astral-sh/uv/pull/16528))
|
|
||||||
- Fix inclusive constraints on available package versions in resolver errors ([#16629](https://github.com/astral-sh/uv/pull/16629))
|
|
||||||
- Improve `uv init` error for invalid directory names ([#16554](https://github.com/astral-sh/uv/pull/16554))
|
|
||||||
- Show help on `uv build -h` ([#16632](https://github.com/astral-sh/uv/pull/16632))
|
|
||||||
- Include the Python variant suffix in "Using Python ..." messages ([#16536](https://github.com/astral-sh/uv/pull/16536))
|
|
||||||
- Log most recently modified file for cache-keys ([#16338](https://github.com/astral-sh/uv/pull/16338))
|
|
||||||
- Update Docker builds to use nightly Rust toolchain with musl v1.2.5 ([#16584](https://github.com/astral-sh/uv/pull/16584))
|
|
||||||
- Add GitHub attestations for uv release artifacts ([#11357](https://github.com/astral-sh/uv/pull/11357))
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
- Expose `UV_NO_GROUP` as an environment variable ([#16529](https://github.com/astral-sh/uv/pull/16529))
|
|
||||||
- Add `UV_NO_SOURCES` as an environment variable ([#15883](https://github.com/astral-sh/uv/pull/15883))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Allow `--check` and `--locked` to be used together in `uv lock` ([#16538](https://github.com/astral-sh/uv/pull/16538))
|
|
||||||
- Allow for unnormalized names in the METADATA file (#16547) ([#16548](https://github.com/astral-sh/uv/pull/16548))
|
|
||||||
- Fix missing value_type for `default-groups` in schema ([#16575](https://github.com/astral-sh/uv/pull/16575))
|
|
||||||
- Respect multi-GPU outputs in `nvidia-smi` ([#15460](https://github.com/astral-sh/uv/pull/15460))
|
|
||||||
- Fix DNS lookup errors in Docker containers ([#8450](https://github.com/astral-sh/uv/issues/8450))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Fix typo in uv tool list doc ([#16625](https://github.com/astral-sh/uv/pull/16625))
|
|
||||||
- Note `uv pip list` name normalization in docs ([#13210](https://github.com/astral-sh/uv/pull/13210))
|
|
||||||
|
|
||||||
### Other changes
|
|
||||||
|
|
||||||
- Update Rust toolchain to 1.91 and MSRV to 1.89 ([#16531](https://github.com/astral-sh/uv/pull/16531))
|
|
||||||
|
|
||||||
## 0.9.7
|
|
||||||
|
|
||||||
Released on 2025-10-30.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add Windows x86-32 emulation support to interpreter architecture checks ([#13475](https://github.com/astral-sh/uv/pull/13475))
|
|
||||||
- Improve readability of progress bars ([#16509](https://github.com/astral-sh/uv/pull/16509))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Drop terminal coloring from `uv auth token` output ([#16504](https://github.com/astral-sh/uv/pull/16504))
|
|
||||||
- Don't use UV_LOCKED to enable `--check` flag ([#16521](https://github.com/astral-sh/uv/pull/16521))
|
|
||||||
|
|
||||||
## 0.9.6
|
|
||||||
|
|
||||||
Released on 2025-10-29.
|
|
||||||
|
|
||||||
This release contains an upgrade to Astral's fork of `async_zip`, which addresses potential sources of ZIP parsing differentials between uv and other Python packaging tooling. See [GHSA-pqhf-p39g-3x64](https://github.com/astral-sh/uv/security/advisories/GHSA-pqhf-p39g-3x64) for additional details.
|
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
|
|
||||||
* Address ZIP parsing differentials ([GHSA-pqhf-p39g-3x64](https://github.com/astral-sh/uv/security/advisories/GHSA-pqhf-p39g-3x64))
|
- Harden ZIP streaming to reject repeated entries and other malformed ZIP files ([#15136](https://github.com/astral-sh/uv/pull/15136))
|
||||||
|
|
||||||
### Python
|
|
||||||
|
|
||||||
- Upgrade GraalPy to 25.0.1 ([#16401](https://github.com/astral-sh/uv/pull/16401))
|
|
||||||
|
|
||||||
### Enhancements
|
### Enhancements
|
||||||
|
|
||||||
- Add `--clear` to `uv build` to remove old build artifacts ([#16371](https://github.com/astral-sh/uv/pull/16371))
|
- Sync latest Python releases ([#15135](https://github.com/astral-sh/uv/pull/15135))
|
||||||
- Add `--no-create-gitignore` to `uv build` ([#16369](https://github.com/astral-sh/uv/pull/16369))
|
|
||||||
- Do not error when a virtual environment directory cannot be removed due to a busy error ([#16394](https://github.com/astral-sh/uv/pull/16394))
|
|
||||||
- Improve hint on `pip install --system` when externally managed ([#16392](https://github.com/astral-sh/uv/pull/16392))
|
|
||||||
- Running `uv lock --check` with outdated lockfile will print that `--check` was passed, instead of `--locked` ([#16322](https://github.com/astral-sh/uv/pull/16322))
|
|
||||||
- Update `uv init` template for Maturin ([#16449](https://github.com/astral-sh/uv/pull/16449))
|
|
||||||
- Improve ordering of Python sources in logs ([#16463](https://github.com/astral-sh/uv/pull/16463))
|
|
||||||
- Restore DockerHub release images and annotations ([#16441](https://github.com/astral-sh/uv/pull/16441))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Check for matching Python implementation during `uv python upgrade` ([#16420](https://github.com/astral-sh/uv/pull/16420))
|
|
||||||
- Deterministically order `--find-links` distributions ([#16446](https://github.com/astral-sh/uv/pull/16446))
|
|
||||||
- Don't panic in `uv export --frozen` when the lockfile is outdated ([#16407](https://github.com/astral-sh/uv/pull/16407))
|
|
||||||
- Fix root of `uv tree` when `--package` is used with circular dependencies ([#15908](https://github.com/astral-sh/uv/pull/15908))
|
|
||||||
- Show package list with `pip freeze --quiet` ([#16491](https://github.com/astral-sh/uv/pull/16491))
|
|
||||||
- Limit `uv auth login pyx.dev` retries to 60s ([#16498](https://github.com/astral-sh/uv/pull/16498))
|
|
||||||
- Add an empty group with `uv add --group ... -r ...` ([#16490](https://github.com/astral-sh/uv/pull/16490))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Update docs for maturin build backend init template ([#16469](https://github.com/astral-sh/uv/pull/16469))
|
|
||||||
- Update docs to reflect previous changes to signal forwarding semantics ([#16430](https://github.com/astral-sh/uv/pull/16430))
|
|
||||||
- Add instructions for installing via MacPorts ([#16039](https://github.com/astral-sh/uv/pull/16039))
|
|
||||||
|
|
||||||
## 0.9.5
|
|
||||||
|
|
||||||
Released on 2025-10-21.
|
|
||||||
|
|
||||||
This release contains an upgrade to `astral-tokio-tar`, which addresses a vulnerability in tar extraction on malformed archives with mismatching size information between the ustar header and PAX extensions. While the `astral-tokio-tar` advisory has been graded as "high" due its potential broader impact, the *specific* impact to uv is **low** due to a lack of novel attacker capability. Specifically, uv only processes tar archives from source distributions, which already possess the capability for full arbitrary code execution by design, meaning that an attacker gains no additional capabilities through `astral-tokio-tar`.
|
|
||||||
|
|
||||||
Regardless, we take the hypothetical risk of parser differentials very seriously. Out of an abundance of caution, we have assigned this upgrade an advisory: https://github.com/astral-sh/uv/security/advisories/GHSA-w476-p2h3-79g9
|
|
||||||
|
|
||||||
### Security
|
|
||||||
|
|
||||||
* Upgrade `astral-tokio-tar` to 0.5.6 to address a parsing differential ([#16387](https://github.com/astral-sh/uv/pull/16387))
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add required environment marker example to hint ([#16244](https://github.com/astral-sh/uv/pull/16244))
|
|
||||||
- Fix typo in MissingTopLevel warning ([#16351](https://github.com/astral-sh/uv/pull/16351))
|
|
||||||
- Improve 403 Forbidden error message to indicate package may not exist ([#16353](https://github.com/astral-sh/uv/pull/16353))
|
|
||||||
- Add a hint on `uv pip install` failure if the `--system` flag is used to select an externally managed interpreter ([#16318](https://github.com/astral-sh/uv/pull/16318))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix backtick escaping for PowerShell ([#16307](https://github.com/astral-sh/uv/pull/16307))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Document metadata consistency expectation ([#15683](https://github.com/astral-sh/uv/pull/15683))
|
|
||||||
- Remove outdated aarch64 musl note ([#16385](https://github.com/astral-sh/uv/pull/16385))
|
|
||||||
|
|
||||||
## 0.9.4
|
|
||||||
|
|
||||||
Released on 2025-10-17.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Add CUDA 13.0 support ([#16321](https://github.com/astral-sh/uv/pull/16321))
|
|
||||||
- Add auto-detection for Intel GPU on Windows ([#16280](https://github.com/astral-sh/uv/pull/16280))
|
|
||||||
- Implement display of RFC 9457 HTTP error contexts ([#16199](https://github.com/astral-sh/uv/pull/16199))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Avoid obfuscating pyx tokens in `uv auth token` output ([#16345](https://github.com/astral-sh/uv/pull/16345))
|
|
||||||
|
|
||||||
## 0.9.3
|
|
||||||
|
|
||||||
Released on 2025-10-14.
|
|
||||||
|
|
||||||
### Python
|
|
||||||
|
|
||||||
- Add CPython 3.15.0a1
|
|
||||||
- Add CPython 3.13.9
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Obfuscate secret token values in logs ([#16164](https://github.com/astral-sh/uv/pull/16164))
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
- Fix workspace with relative pathing ([#16296](https://github.com/astral-sh/uv/pull/16296))
|
|
||||||
|
|
||||||
## 0.9.2
|
|
||||||
|
|
||||||
Released on 2025-10-10.
|
|
||||||
|
|
||||||
### Python
|
|
||||||
|
|
||||||
- Add CPython 3.9.24.
|
|
||||||
- Add CPython 3.10.19.
|
|
||||||
- Add CPython 3.11.14.
|
|
||||||
- Add CPython 3.12.12.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Avoid inferring check URLs for pyx in `uv publish` ([#16234](https://github.com/astral-sh/uv/pull/16234))
|
|
||||||
- Add `uv tool list --show-python` ([#15814](https://github.com/astral-sh/uv/pull/15814))
|
|
||||||
|
|
||||||
### Documentation
|
|
||||||
|
|
||||||
- Add missing "added in" to new environment variables in reference ([#16217](https://github.com/astral-sh/uv/pull/16217))
|
|
||||||
|
|
||||||
## 0.9.1
|
|
||||||
|
|
||||||
Released on 2025-10-09.
|
|
||||||
|
|
||||||
### Enhancements
|
|
||||||
|
|
||||||
- Log Python choice in `uv init` ([#16182](https://github.com/astral-sh/uv/pull/16182))
|
|
||||||
- Fix `pylock.toml` config conflict error messages ([#16211](https://github.com/astral-sh/uv/pull/16211))
|
|
||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
- Add `UV_UPLOAD_HTTP_TIMEOUT` and respect `UV_HTTP_TIMEOUT` in uploads ([#16040](https://github.com/astral-sh/uv/pull/16040))
|
- Add support for per-project build-time environment variables ([#15095](https://github.com/astral-sh/uv/pull/15095))
|
||||||
- Support `UV_WORKING_DIRECTORY` for setting `--directory` ([#16125](https://github.com/astral-sh/uv/pull/16125))
|
|
||||||
|
|
||||||
### Bug fixes
|
### Bug fixes
|
||||||
|
|
||||||
- Allow missing `Scripts` directory ([#16206](https://github.com/astral-sh/uv/pull/16206))
|
- Avoid invalid simplification with conflict markers ([#15041](https://github.com/astral-sh/uv/pull/15041))
|
||||||
- Fix handling of Python requests with pre-releases in ranges ([#16208](https://github.com/astral-sh/uv/pull/16208))
|
- Respect `UV_HTTP_RETRIES` in `uv publish` ([#15106](https://github.com/astral-sh/uv/pull/15106))
|
||||||
- Preserve comments on version bump ([#16141](https://github.com/astral-sh/uv/pull/16141))
|
- Support `UV_NO_EDITABLE` where `--no-editable` is supported ([#15107](https://github.com/astral-sh/uv/pull/15107))
|
||||||
- Retry all HTTP/2 errors ([#16038](https://github.com/astral-sh/uv/pull/16038))
|
- Upgrade `cargo-dist` to add `UV_INSTALLER_URL` to PowerShell installer ([#15114](https://github.com/astral-sh/uv/pull/15114))
|
||||||
- Treat deleted Windows registry keys as equivalent to missing ones ([#16194](https://github.com/astral-sh/uv/pull/16194))
|
- Upgrade `h2` again to avoid `too_many_internal_resets` errors ([#15111](https://github.com/astral-sh/uv/pull/15111))
|
||||||
- Ignore pre-release Python versions when a patch version is requested ([#16210](https://github.com/astral-sh/uv/pull/16210))
|
|
||||||
|
|
||||||
### Documentation
|
### Documentation
|
||||||
|
|
||||||
- Document why uv discards upper bounds on `requires-python` ([#15927](https://github.com/astral-sh/uv/pull/15927))
|
- Ensure symlink warning is shown ([#15126](https://github.com/astral-sh/uv/pull/15126))
|
||||||
- Document uv version environment variables were added in ([#15196](https://github.com/astral-sh/uv/pull/15196))
|
|
||||||
|
|
||||||
## 0.9.0
|
## 0.8.5
|
||||||
|
|
||||||
Released on 2025-10-07.
|
### Enhancements
|
||||||
|
|
||||||
This breaking release is primarily motivated by the release of Python 3.14, which contains some breaking changes (we recommend reading the ["What's new in Python 3.14"](https://docs.python.org/3/whatsnew/3.14.html) page). uv may use Python 3.14 in cases where it previously used 3.13, e.g., if you have not pinned your Python version and do not have any Python versions installed on your machine. While we think this is uncommon, we prefer to be cautious. We've included some additional small changes that could break workflows.
|
- Enable `uv run` with a GitHub Gist ([#15058](https://github.com/astral-sh/uv/pull/15058))
|
||||||
|
- Improve HTTP response caching log messages ([#15067](https://github.com/astral-sh/uv/pull/15067))
|
||||||
|
- Show wheel tag hints in install plan ([#15066](https://github.com/astral-sh/uv/pull/15066))
|
||||||
|
- Support installing additional executables in `uv tool install` ([#14014](https://github.com/astral-sh/uv/pull/14014))
|
||||||
|
|
||||||
See our [Python 3.14](https://astral.sh/blog/python-3.14) blog post for some discussion of features we're excited about!
|
### Preview features
|
||||||
|
|
||||||
There are no breaking changes to [`uv_build`](https://docs.astral.sh/uv/concepts/build-backend/). If you have an upper bound in your `[build-system]` table, you should update it.
|
- Enable extra build dependencies to 'match runtime' versions ([#15036](https://github.com/astral-sh/uv/pull/15036))
|
||||||
|
- Remove duplicate `extra-build-dependencies` warnings for `uv pip` ([#15088](https://github.com/astral-sh/uv/pull/15088))
|
||||||
|
- Use "option" instead of "setting" in `pylock` warning ([#15089](https://github.com/astral-sh/uv/pull/15089))
|
||||||
|
- Respect extra build requires when reading from wheel cache ([#15030](https://github.com/astral-sh/uv/pull/15030))
|
||||||
|
- Preserve lowered extra build dependencies ([#15038](https://github.com/astral-sh/uv/pull/15038))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Add Python versions to markers implied from wheels ([#14913](https://github.com/astral-sh/uv/pull/14913))
|
||||||
|
- Ensure consistent indentation when adding dependencies ([#14991](https://github.com/astral-sh/uv/pull/14991))
|
||||||
|
- Fix handling of `python-preference = system` when managed interpreters are on the PATH ([#15059](https://github.com/astral-sh/uv/pull/15059))
|
||||||
|
- Fix symlink preservation in virtual environment creation ([#14933](https://github.com/astral-sh/uv/pull/14933))
|
||||||
|
- Gracefully handle entrypoint permission errors ([#15026](https://github.com/astral-sh/uv/pull/15026))
|
||||||
|
- Include wheel hashes from local Simple indexes ([#14993](https://github.com/astral-sh/uv/pull/14993))
|
||||||
|
- Prefer system Python installations over managed ones when `--system` is used ([#15061](https://github.com/astral-sh/uv/pull/15061))
|
||||||
|
- Remove retry wrapper when matching on error kind ([#14996](https://github.com/astral-sh/uv/pull/14996))
|
||||||
|
- Revert `h2` upgrade ([#15079](https://github.com/astral-sh/uv/pull/15079))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Improve visibility of copy and line separator in dark mode ([#14987](https://github.com/astral-sh/uv/pull/14987))
|
||||||
|
|
||||||
|
## 0.8.4
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
|
||||||
|
- Improve styling of warning cause chains ([#14934](https://github.com/astral-sh/uv/pull/14934))
|
||||||
|
- Extend wheel filtering to Android tags ([#14977](https://github.com/astral-sh/uv/pull/14977))
|
||||||
|
- Perform wheel lockfile filtering based on platform and OS intersection ([#14976](https://github.com/astral-sh/uv/pull/14976))
|
||||||
|
- Clarify messaging when a new resolution needs to be performed ([#14938](https://github.com/astral-sh/uv/pull/14938))
|
||||||
|
|
||||||
|
### Preview features
|
||||||
|
|
||||||
|
- Add support for extending package's build dependencies with `extra-build-dependencies` ([#14735](https://github.com/astral-sh/uv/pull/14735))
|
||||||
|
- Split preview mode into separate feature flags ([#14823](https://github.com/astral-sh/uv/pull/14823))
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
- Add support for package specific `exclude-newer` dates via `exclude-newer-package` ([#14489](https://github.com/astral-sh/uv/pull/14489))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Avoid invalidating lockfile when path or workspace dependencies define explicit indexes ([#14876](https://github.com/astral-sh/uv/pull/14876))
|
||||||
|
- Copy entrypoints that have a shebang that differs in `python` vs `python3` ([#14970](https://github.com/astral-sh/uv/pull/14970))
|
||||||
|
- Fix incorrect file permissions in wheel packages ([#14930](https://github.com/astral-sh/uv/pull/14930))
|
||||||
|
- Update validation for `environments` and `required-environments` in `uv.toml` ([#14905](https://github.com/astral-sh/uv/pull/14905))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Show `uv_build` in projects documentation ([#14968](https://github.com/astral-sh/uv/pull/14968))
|
||||||
|
- Add `UV_` prefix to installer environment variables ([#14964](https://github.com/astral-sh/uv/pull/14964))
|
||||||
|
- Un-hide `uv` from `--build-backend` options ([#14939](https://github.com/astral-sh/uv/pull/14939))
|
||||||
|
- Update documentation for preview flags ([#14902](https://github.com/astral-sh/uv/pull/14902))
|
||||||
|
|
||||||
|
## 0.8.3
|
||||||
|
|
||||||
|
### Python
|
||||||
|
|
||||||
|
- Add CPython 3.14.0rc1
|
||||||
|
|
||||||
|
See the [`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250723) for more details.
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
|
||||||
|
- Allow non-standard entrypoint names in `uv_build` ([#14867](https://github.com/astral-sh/uv/pull/14867))
|
||||||
|
- Publish riscv64 wheels to PyPI ([#14852](https://github.com/astral-sh/uv/pull/14852))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Avoid writing redacted credentials to tool receipt ([#14855](https://github.com/astral-sh/uv/pull/14855))
|
||||||
|
- Respect `--with` versions over base environment versions ([#14863](https://github.com/astral-sh/uv/pull/14863))
|
||||||
|
- Respect credentials from all defined indexes ([#14858](https://github.com/astral-sh/uv/pull/14858))
|
||||||
|
- Fix missed stabilization of removal of registry entry during Python uninstall ([#14859](https://github.com/astral-sh/uv/pull/14859))
|
||||||
|
- Improve concurrency safety of Python downloads into cache ([#14846](https://github.com/astral-sh/uv/pull/14846))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Fix typos in `uv_build` reference documentation ([#14853](https://github.com/astral-sh/uv/pull/14853))
|
||||||
|
- Move the "Cargo" install method further down in docs ([#14842](https://github.com/astral-sh/uv/pull/14842))
|
||||||
|
|
||||||
|
## 0.8.2
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
|
||||||
|
- Add derivation chains for dependency errors ([#14824](https://github.com/astral-sh/uv/pull/14824))
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
- Add `UV_INIT_BUILD_BACKEND` ([#14821](https://github.com/astral-sh/uv/pull/14821))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Avoid reading files in the environment bin that are not entrypoints ([#14830](https://github.com/astral-sh/uv/pull/14830))
|
||||||
|
- Avoid removing empty directories when constructing virtual environments ([#14822](https://github.com/astral-sh/uv/pull/14822))
|
||||||
|
- Preserve index URL priority order when writing to pyproject.toml ([#14831](https://github.com/astral-sh/uv/pull/14831))
|
||||||
|
|
||||||
|
### Rust API
|
||||||
|
|
||||||
|
- Expose `tls_built_in_root_certs` for client ([#14816](https://github.com/astral-sh/uv/pull/14816))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Archive the 0.7.x changelog ([#14819](https://github.com/astral-sh/uv/pull/14819))
|
||||||
|
|
||||||
|
## 0.8.1
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
|
||||||
|
- Add support for `HF_TOKEN` ([#14797](https://github.com/astral-sh/uv/pull/14797))
|
||||||
|
- Allow `--config-settings-package` to apply configuration settings at the package level ([#14573](https://github.com/astral-sh/uv/pull/14573))
|
||||||
|
- Create (e.g.) `python3.13t` executables in `uv venv` ([#14764](https://github.com/astral-sh/uv/pull/14764))
|
||||||
|
- Disallow writing symlinks outside the source distribution target directory ([#12259](https://github.com/astral-sh/uv/pull/12259))
|
||||||
|
- Elide traceback when `python -m uv` in interrupted with Ctrl-C on Windows ([#14715](https://github.com/astral-sh/uv/pull/14715))
|
||||||
|
- Match `--bounds` formatting for `uv_build` bounds in `uv init` ([#14731](https://github.com/astral-sh/uv/pull/14731))
|
||||||
|
- Support `extras` and `dependency_groups` markers in PEP 508 grammar ([#14753](https://github.com/astral-sh/uv/pull/14753))
|
||||||
|
- Support `extras` and `dependency_groups` markers on `uv pip install` and `uv pip sync` ([#14755](https://github.com/astral-sh/uv/pull/14755))
|
||||||
|
- Add hint to use `uv self version` when `uv version` cannot find a project ([#14738](https://github.com/astral-sh/uv/pull/14738))
|
||||||
|
- Improve error reporting when removing Python versions from the Windows registry ([#14722](https://github.com/astral-sh/uv/pull/14722))
|
||||||
|
- Make warnings about masked `[tool.uv]` fields more precise ([#14325](https://github.com/astral-sh/uv/pull/14325))
|
||||||
|
|
||||||
|
### Preview features
|
||||||
|
|
||||||
|
- Emit JSON output in `uv sync` with `--quiet` ([#14810](https://github.com/astral-sh/uv/pull/14810))
|
||||||
|
|
||||||
|
### Bug fixes
|
||||||
|
|
||||||
|
- Allow removal of virtual environments with missing interpreters ([#14812](https://github.com/astral-sh/uv/pull/14812))
|
||||||
|
- Apply `Cache-Control` overrides to response, not request headers ([#14736](https://github.com/astral-sh/uv/pull/14736))
|
||||||
|
- Copy entry points into ephemeral environments to ensure layers are respected ([#14790](https://github.com/astral-sh/uv/pull/14790))
|
||||||
|
- Workaround Jupyter Lab application directory discovery in ephemeral environments ([#14790](https://github.com/astral-sh/uv/pull/14790))
|
||||||
|
- Enforce `requires-python` in `pylock.toml` ([#14787](https://github.com/astral-sh/uv/pull/14787))
|
||||||
|
- Fix kebab casing of `README` variants in build backend ([#14762](https://github.com/astral-sh/uv/pull/14762))
|
||||||
|
- Improve concurrency resilience of removing Python versions from the Windows registry ([#14717](https://github.com/astral-sh/uv/pull/14717))
|
||||||
|
- Retry HTTP requests on invalid data errors ([#14703](https://github.com/astral-sh/uv/pull/14703))
|
||||||
|
- Update virtual environment removal to delete `pyvenv.cfg` last ([#14808](https://github.com/astral-sh/uv/pull/14808))
|
||||||
|
- Error on unknown fields in `dependency-metadata` ([#14801](https://github.com/astral-sh/uv/pull/14801))
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
|
||||||
|
- Recommend installing `setup-uv` after `setup-python` in Github Actions integration guide ([#14741](https://github.com/astral-sh/uv/pull/14741))
|
||||||
|
- Clarify which portions of `requires-python` behavior are consistent with pip ([#14752](https://github.com/astral-sh/uv/pull/14752))
|
||||||
|
|
||||||
|
## 0.8.0
|
||||||
|
|
||||||
|
Since we released uv [0.7.0](https://github.com/astral-sh/uv/releases/tag/0.7.0) in April, we've accumulated various changes that improve correctness and user experience, but could break some workflows. This release contains those changes; many have been marked as breaking out of an abundance of caution. We expect most users to be able to upgrade without making changes.
|
||||||
|
|
||||||
|
This release also includes the stabilization of a couple `uv python install` features, which have been available under preview since late last year.
|
||||||
|
|
||||||
### Breaking changes
|
### Breaking changes
|
||||||
|
|
||||||
- **Python 3.14 is now the default stable version**
|
- **Install Python executables into a directory on the `PATH` ([#14626](https://github.com/astral-sh/uv/pull/14626))**
|
||||||
|
|
||||||
The default Python version has changed from 3.13 to 3.14. This applies to Python version installation when no Python version is requested, e.g., `uv python install`. By default, uv will use the system Python version if present, so this may not cause changes to general use of uv. For example, if Python 3.13 is installed already, then `uv venv` will use that version. If no Python versions are installed on a machine and automatic downloads are enabled, uv will now use 3.14 instead of 3.13, e.g., for `uv venv` or `uvx python`. This change will not affect users who are using a `.python-version` file to pin to a specific Python version.
|
`uv python install` now installs a versioned Python executable (e.g., `python3.13`) into a directory on the `PATH` (e.g., `~/.local/bin`) by default. This behavior has been available under the `--preview` flag since [Oct 2024](https://github.com/astral-sh/uv/pull/8458). This change should not be breaking unless it shadows a Python executable elsewhere on the `PATH`.
|
||||||
- **Allow use of free-threaded variants in Python 3.14+ without explicit opt-in** ([#16142](https://github.com/astral-sh/uv/pull/16142))
|
|
||||||
|
|
||||||
Previously, free-threaded variants of Python were considered experimental and required explicit opt-in (i.e., with `3.14t`) for usage. Now uv will allow use of free-threaded Python 3.14+ interpreters without explicit selection. The GIL-enabled build of Python will still be preferred, e.g., when performing an installation with `uv python install 3.14`. However, e.g., if a free-threaded interpreter comes before a GIL-enabled build on the `PATH`, it will be used. This change does not apply to free-threaded Python 3.13 interpreters, which will continue to require opt-in.
|
To install unversioned executables, i.e., `python3` and `python`, use the `--default` flag. The `--default` flag has also been in preview, but is not stabilized in this release.
|
||||||
- **Use Python 3.14 stable Docker images** ([#16150](https://github.com/astral-sh/uv/pull/16150))
|
|
||||||
|
|
||||||
Previously, the Python 3.14 images had an `-rc` suffix, e.g., `python:3.14-rc-alpine` or
|
Note that these executables point to the base Python installation and only include the standard library. That means they will not include dependencies from your current project (use `uv run python` instead) and you cannot install packages into their environment (use `uvx --with <package> python` instead).
|
||||||
`python:3.14-rc-trixie`. Now, the `-rc` suffix has been removed to match the stable
|
|
||||||
[upstream images](https://hub.docker.com/_/python). The `-rc` images tags will no longer be
|
|
||||||
updated. This change should not break existing workflows.
|
|
||||||
- **Upgrade Alpine Docker image to Alpine 3.22**
|
|
||||||
|
|
||||||
Previously, the `uv:alpine` Docker image was based on Alpine 3.21. Now, this image is based on Alpine 3.22. The previous image can be recovered with `uv:alpine3.21` and will continue to be updated until a future release.
|
As with tool installation, the target directory respects common variables like `XDG_BIN_HOME` and can be overridden with a `UV_PYTHON_BIN_DIR` variable.
|
||||||
- **Upgrade Debian Docker images to Debian 13 "Trixie"**
|
|
||||||
|
|
||||||
Previously, the `uv:debian` and `uv:debian-slim` Docker images were based on Debian 12 "Bookworm". Now, these images are based on Debian 13 "Trixie". The previous images can be recovered with `uv:bookworm` and `uv:bookworm-slim` and will continue to be updated until a future release.
|
You can opt out of this behavior with `uv python install --no-bin` or `UV_PYTHON_INSTALL_BIN=0`.
|
||||||
- **Fix incorrect output path when a trailing `/` is used in `uv build`** ([#15133](https://github.com/astral-sh/uv/pull/15133))
|
|
||||||
|
|
||||||
When using `uv build` in a workspace, the artifacts are intended to be written to a `dist` directory in the workspace root. A bug caused workspace root determination to fail when the input path included a trailing `/` causing the `dist` directory to be placed in the child directory. This bug has been fixed in this release. For example, `uv build child/` is used, the output path will now be in `<workspace root>/dist/` rather than `<workspace root>/child/dist/`.
|
See the [documentation on installing Python executables](https://docs.astral.sh/uv/concepts/python-versions/#installing-python-executables) for more details.
|
||||||
|
- **Register Python versions with the Windows Registry ([#14625](https://github.com/astral-sh/uv/pull/14625))**
|
||||||
|
|
||||||
### Python
|
`uv python install` now registers the installed Python version with the Windows Registry as specified by [PEP 514](https://peps.python.org/pep-0514/). This allows using uv installed Python versions via the `py` launcher. This behavior has been available under the `--preview` flag since [Jan 2025](https://github.com/astral-sh/uv/pull/10634). This change should not be breaking, as using the uv Python versions with `py` requires explicit opt in.
|
||||||
|
|
||||||
- Add CPython 3.14.0
|
You can opt out of this behavior with `uv python install --no-registry` or `UV_PYTHON_INSTALL_REGISTRY=0`.
|
||||||
- Add CPython 3.13.8
|
- **Prompt before removing an existing directory in `uv venv` ([#14309](https://github.com/astral-sh/uv/pull/14309))**
|
||||||
|
|
||||||
### Enhancements
|
Previously, `uv venv` would remove an existing virtual environment without confirmation. While this is consistent with the behavior of project commands (e.g., `uv sync`), it's surprising to users that are using imperative workflows (i.e., `uv pip`). Now, `uv venv` will prompt for confirmation before removing an existing virtual environment. **If not in an interactive context, uv will still remove the virtual environment for backwards compatibility. However, this behavior is likely to change in a future release.**
|
||||||
|
|
||||||
- Don't warn when a dependency is constrained by another dependency ([#16149](https://github.com/astral-sh/uv/pull/16149))
|
The behavior for other commands (e.g., `uv sync`) is unchanged.
|
||||||
|
|
||||||
### Bug fixes
|
You can opt out of this behavior by setting `UV_VENV_CLEAR=1` or passing the `--clear` flag.
|
||||||
|
- **Validate that discovered interpreters meet the Python preference ([#7934](https://github.com/astral-sh/uv/pull/7934))**
|
||||||
|
|
||||||
- Fix `uv python upgrade / install` output when there is a no-op for one request ([#16158](https://github.com/astral-sh/uv/pull/16158))
|
uv allows opting out of its managed Python versions with the `--no-managed-python` and `python-preference` options.
|
||||||
- Surface pinned-version hint when `uv tool upgrade` can’t move the tool ([#16081](https://github.com/astral-sh/uv/pull/16081))
|
|
||||||
- Ban pre-release versions in `uv python upgrade` requests ([#16160](https://github.com/astral-sh/uv/pull/16160))
|
|
||||||
- Fix `uv python upgrade` replacement of installed binaries on pre-release to stable ([#16159](https://github.com/astral-sh/uv/pull/16159))
|
|
||||||
|
|
||||||
### Documentation
|
Previously, uv would not enforce this option for Python interpreters discovered on the `PATH`. For example, if a symlink to a managed Python interpreter was created, uv would allow it to be used even if `--no-managed-python` was provided. Now, uv ignores Python interpreters that do not match the Python preference *unless* they are in an active virtual environment or are explicitly requested, e.g., with `--python /path/to/python3.13`.
|
||||||
|
|
||||||
- Update `uv pip compile` args in `layout.md` ([#16155](https://github.com/astral-sh/uv/pull/16155))
|
Similarly, uv would previously not invalidate existing project environments if they did not match the Python preference. Now, uv will invalidate and recreate project environments when the Python preference changes.
|
||||||
|
|
||||||
## 0.8.x
|
You can opt out of this behavior by providing the explicit path to the Python interpreter providing `--managed-python` / `--no-managed-python` matching the interpreter you want.
|
||||||
|
- **Install dependencies without build systems when they are `path` sources ([#14413](https://github.com/astral-sh/uv/pull/14413))**
|
||||||
|
|
||||||
See [changelogs/0.8.x](./changelogs/0.8.x.md)
|
When working on a project, uv uses the [presence of a build system](https://docs.astral.sh/uv/concepts/projects/config/#build-systems) to determine if it should be built and installed into the environment. However, when a project is a dependency of another project, it can be surprising for the dependency to be missing from the environment.
|
||||||
|
|
||||||
|
Previously, uv would not build and install dependencies with [`path` sources](https://docs.astral.sh/uv/concepts/projects/dependencies/#path) unless they declared a build system or set `tool.uv.package = true`. Now, dependencies with `path` sources are built and installed regardless of the presence of a build system. If a build system is not present, the `setuptools.build_meta:__legacy__ ` backend will be used (per [PEP 517](https://peps.python.org/pep-0517/#source-trees)).
|
||||||
|
|
||||||
|
You can opt out of this behavior by setting `package = false` in the source declaration, e.g.:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[tool.uv.sources]
|
||||||
|
foo = { path = "./foo", package = false }
|
||||||
|
```
|
||||||
|
|
||||||
|
Or, by setting `tool.uv.package = false` in the dependent `pyproject.toml`.
|
||||||
|
|
||||||
|
See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
|
||||||
|
- **Install dependencies without build systems when they are workspace members ([#14663](https://github.com/astral-sh/uv/pull/14663))**
|
||||||
|
|
||||||
|
As described above for dependencies with `path` sources, uv previously would not build and install workspace members that did not declare a build system. Now, uv will build and install workspace members that are a dependency of *another* workspace member regardless of the presence of a build system. The behavior is unchanged for workspace members that are not included in the `project.dependencies`, `project.optional-dependencies`, or `dependency-groups` tables of another workspace member.
|
||||||
|
|
||||||
|
You can opt out of this behavior by setting `tool.uv.package = false` in the workspace member's `pyproject.toml`.
|
||||||
|
|
||||||
|
See the documentation on [virtual dependencies](https://docs.astral.sh/uv/concepts/projects/dependencies/#virtual-dependencies) for details.
|
||||||
|
- **Bump `--python-platform linux` to `manylinux_2_28` ([#14300](https://github.com/astral-sh/uv/pull/14300))**
|
||||||
|
|
||||||
|
uv allows performing [platform-specific resolution](https://docs.astral.sh/uv/concepts/resolution/#platform-specific-resolution) for explicit targets and provides short aliases, e.g., `linux`, for common targets.
|
||||||
|
|
||||||
|
Previously, the default target for `--python-platform linux` was `manylinux_2_17`, which is compatible with most Linux distributions from 2014 or newer. We now default to `manylinux_2_28`, which is compatible with most Linux distributions from 2019 or newer. This change follows the lead of other tools, such as `cibuildwheel`, which changed their default to `manylinux_2_28` in [Mar 2025](https://github.com/pypa/cibuildwheel/pull/2330).
|
||||||
|
|
||||||
|
This change only affects users requesting a specific target platform. Otherwise, uv detects the `manylinux` target from your local glibc version.
|
||||||
|
|
||||||
|
You can opt out of this behavior by using `--python-platform x86_64-manylinux_2_17` instead.
|
||||||
|
- **Remove `uv version` fallback ([#14161](https://github.com/astral-sh/uv/pull/14161))**
|
||||||
|
|
||||||
|
In [Apr 2025](https://github.com/astral-sh/uv/pull/12349), uv changed the `uv version` command to an interface for viewing and updating the version of the current project. However, when outside a project, `uv version` would continue to display uv's version for backwards compatibility. Now, when used outside of a project, `uv version` will fail.
|
||||||
|
|
||||||
|
You cannot opt out of this behavior. Use `uv self version` instead.
|
||||||
|
- **Require `--global` for removal of the global Python pin ([#14169](https://github.com/astral-sh/uv/pull/14169))**
|
||||||
|
|
||||||
|
Previously, `uv python pin --rm` would allow you to remove the global Python pin without opt in. Now, uv requires the `--global` flag to remove the global Python pin.
|
||||||
|
|
||||||
|
You cannot opt out of this behavior. Use the `--global` flag instead.
|
||||||
|
- **Support conflicting editable settings across groups ([#14197](https://github.com/astral-sh/uv/pull/14197))**
|
||||||
|
|
||||||
|
Previously, uv would always treat a package as editable if any requirement requested it as editable. However, this prevented users from declaring `path` sources that toggled the `editable` setting across dependency groups. Now, uv allows declaring different `editable` values for conflicting groups. However, if a project includes a path dependency twice, once with `editable = true` and once without any editable annotation, those are now considered conflicting, and uv will exit with an error.
|
||||||
|
|
||||||
|
You cannot opt out of this behavior. Use consistent `editable` settings or [mark groups as conflicting](https://docs.astral.sh/uv/concepts/projects/config/#conflicting-dependencies).
|
||||||
|
- **Make `uv_build` the default build backend in `uv init` ([#14661](https://github.com/astral-sh/uv/pull/14661))**
|
||||||
|
|
||||||
|
The uv build backend (`uv_build`) was [stabilized in uv 0.7.19](https://github.com/astral-sh/uv/releases/tag/0.7.19). Now, it is the default build backend for `uv init --package` and `uv init --lib`. Previously, `hatchling` was the default build backend. A build backend is still not used without opt-in in `uv init`, but we expect to change this in a future release.
|
||||||
|
|
||||||
|
You can opt out of this behavior with `uv init --build-backend hatchling`.
|
||||||
|
- **Set default `UV_TOOL_BIN_DIR` on Docker images ([#13391](https://github.com/astral-sh/uv/pull/13391))**
|
||||||
|
|
||||||
|
Previously, `UV_TOOL_BIN_DIR` was not set in Docker images which meant that `uv tool install` did not install tools into a directory on the `PATH` without additional configuration. Now, `UV_TOOL_BIN_DIR` is set to `/usr/local/bin` in all Docker derived images.
|
||||||
|
|
||||||
|
When the default image user is overridden (e.g. `USER <UID>`) with a less privileged user, this may cause `uv tool install` to fail.
|
||||||
|
|
||||||
|
You can opt out of this behavior by setting an alternative `UV_TOOL_BIN_DIR`.
|
||||||
|
- **Update `--check` to return an exit code of 1 ([#14167](https://github.com/astral-sh/uv/pull/14167))**
|
||||||
|
|
||||||
|
uv uses an exit code of 1 to indicate a "successful failure" and an exit code of 2 to indicate an "error".
|
||||||
|
|
||||||
|
Previously, `uv lock --check` and `uv sync --check` would exit with a code of 2 when the lockfile or environment were outdated. Now, uv will exit with a code of 1.
|
||||||
|
|
||||||
|
You cannot opt out of this behavior.
|
||||||
|
- **Use an ephemeral environment for `uv run --with` invocations ([#14447](https://github.com/astral-sh/uv/pull/14447))**
|
||||||
|
|
||||||
|
When using `uv run --with`, uv layers the requirements requested using `--with` into another virtual environment and caches it. Previously, uv would invoke the Python interpreter in this layered environment. However, this allows poisoning the cached environment and introduces race conditions for concurrent invocations. Now, uv will layer *another* empty virtual environment on top of the cached environment and invoke the Python interpreter there. This should only cause breakage in cases where the environment is being inspected at runtime.
|
||||||
|
|
||||||
|
You cannot opt out of this behavior.
|
||||||
|
- **Restructure the `uv venv` command output and exit codes ([#14546](https://github.com/astral-sh/uv/pull/14546))**
|
||||||
|
|
||||||
|
Previously, uv used `miette` to format the `uv venv` output. However, this was inconsistent with most of the uv CLI. Now, the output is a little different and the exit code has switched from 1 to 2 for some error cases.
|
||||||
|
|
||||||
|
You cannot opt out of this behavior.
|
||||||
|
- **Default to `--workspace` when adding subdirectories ([#14529](https://github.com/astral-sh/uv/pull/14529))**
|
||||||
|
|
||||||
|
When using `uv add` to add a subdirectory in a workspace, uv now defaults to adding the target as a workspace member.
|
||||||
|
|
||||||
|
You can opt out of this behavior by providing `--no-workspace`.
|
||||||
|
- **Add missing validations for disallowed `uv.toml` fields ([#14322](https://github.com/astral-sh/uv/pull/14322))**
|
||||||
|
|
||||||
|
uv does not allow some settings in the `uv.toml`. Previously, some settings were silently ignored when present in the `uv.toml`. Now, uv will error.
|
||||||
|
|
||||||
|
You cannot opt out of this behavior. Use `--no-config` or remove the invalid settings.
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
- Add support for toggling Python bin and registry install options via env vars ([#14662](https://github.com/astral-sh/uv/pull/14662))
|
||||||
|
|
||||||
## 0.7.x
|
## 0.7.x
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,125 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
- [Our Pledge](#our-pledge)
|
|
||||||
- [Our Standards](#our-standards)
|
|
||||||
- [Enforcement Responsibilities](#enforcement-responsibilities)
|
|
||||||
- [Scope](#scope)
|
|
||||||
- [Enforcement](#enforcement)
|
|
||||||
- [Enforcement Guidelines](#enforcement-guidelines)
|
|
||||||
- [1. Correction](#1-correction)
|
|
||||||
- [2. Warning](#2-warning)
|
|
||||||
- [3. Temporary Ban](#3-temporary-ban)
|
|
||||||
- [4. Permanent Ban](#4-permanent-ban)
|
|
||||||
- [Attribution](#attribution)
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
We as members, contributors, and leaders pledge to make participation in our community a
|
|
||||||
harassment-free experience for everyone, regardless of age, body size, visible or invisible
|
|
||||||
disability, ethnicity, sex characteristics, gender identity and expression, level of experience,
|
|
||||||
education, socio-economic status, nationality, personal appearance, race, religion, or sexual
|
|
||||||
identity and orientation.
|
|
||||||
|
|
||||||
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and
|
|
||||||
healthy community.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to a positive environment for our community include:
|
|
||||||
|
|
||||||
- Demonstrating empathy and kindness toward other people
|
|
||||||
- Being respectful of differing opinions, viewpoints, and experiences
|
|
||||||
- Giving and gracefully accepting constructive feedback
|
|
||||||
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the
|
|
||||||
experience
|
|
||||||
- Focusing on what is best not just for us as individuals, but for the overall community
|
|
||||||
|
|
||||||
Examples of unacceptable behavior include:
|
|
||||||
|
|
||||||
- The use of sexualized language or imagery, and sexual attention or advances of any kind
|
|
||||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
|
||||||
- Public or private harassment
|
|
||||||
- Publishing others' private information, such as a physical or email address, without their
|
|
||||||
explicit permission
|
|
||||||
- Other conduct which could reasonably be considered inappropriate in a professional setting
|
|
||||||
|
|
||||||
## Enforcement Responsibilities
|
|
||||||
|
|
||||||
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior
|
|
||||||
and will take appropriate and fair corrective action in response to any behavior that they deem
|
|
||||||
inappropriate, threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
Community leaders have the right and responsibility to remove, edit, or reject comments, commits,
|
|
||||||
code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and
|
|
||||||
will communicate reasons for moderation decisions when appropriate.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies within all community spaces, and also applies when an individual is
|
|
||||||
officially representing the community in public spaces. Examples of representing our community
|
|
||||||
include using an official e-mail address, posting via an official social media account, or acting as
|
|
||||||
an appointed representative at an online or offline event.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community
|
|
||||||
leaders responsible for enforcement at <hey@astral.sh>. All complaints will be reviewed and
|
|
||||||
investigated promptly and fairly.
|
|
||||||
|
|
||||||
All community leaders are obligated to respect the privacy and security of the reporter of any
|
|
||||||
incident.
|
|
||||||
|
|
||||||
## Enforcement Guidelines
|
|
||||||
|
|
||||||
Community leaders will follow these Community Impact Guidelines in determining the consequences for
|
|
||||||
any action they deem in violation of this Code of Conduct:
|
|
||||||
|
|
||||||
### 1. Correction
|
|
||||||
|
|
||||||
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or
|
|
||||||
unwelcome in the community.
|
|
||||||
|
|
||||||
**Consequence**: A private, written warning from community leaders, providing clarity around the
|
|
||||||
nature of the violation and an explanation of why the behavior was inappropriate. A public apology
|
|
||||||
may be requested.
|
|
||||||
|
|
||||||
### 2. Warning
|
|
||||||
|
|
||||||
**Community Impact**: A violation through a single incident or series of actions.
|
|
||||||
|
|
||||||
**Consequence**: A warning with consequences for continued behavior. No interaction with the people
|
|
||||||
involved, including unsolicited interaction with those enforcing the Code of Conduct, for a
|
|
||||||
specified period of time. This includes avoiding interactions in community spaces as well as
|
|
||||||
external channels like social media. Violating these terms may lead to a temporary or permanent ban.
|
|
||||||
|
|
||||||
### 3. Temporary Ban
|
|
||||||
|
|
||||||
**Community Impact**: A serious violation of community standards, including sustained inappropriate
|
|
||||||
behavior.
|
|
||||||
|
|
||||||
**Consequence**: A temporary ban from any sort of interaction or public communication with the
|
|
||||||
community for a specified period of time. No public or private interaction with the people involved,
|
|
||||||
including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this
|
|
||||||
period. Violating these terms may lead to a permanent ban.
|
|
||||||
|
|
||||||
### 4. Permanent Ban
|
|
||||||
|
|
||||||
**Community Impact**: Demonstrating a pattern of violation of community standards, including
|
|
||||||
sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement
|
|
||||||
of classes of individuals.
|
|
||||||
|
|
||||||
**Consequence**: A permanent ban from any sort of public interaction within the community.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available
|
|
||||||
[here](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
|
||||||
|
|
||||||
Community Impact Guidelines were inspired by
|
|
||||||
[Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity).
|
|
||||||
|
|
||||||
For answers to common questions about this code of conduct, see the
|
|
||||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available
|
|
||||||
[here](https://www.contributor-covenant.org/translations).
|
|
||||||
|
|
||||||
[homepage]: https://www.contributor-covenant.org
|
|
||||||
|
|
@ -1,34 +1,10 @@
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
## Finding ways to help
|
We have issues labeled as
|
||||||
|
[Good First Issue](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
||||||
We label issues that would be good for a first time contributor as
|
and
|
||||||
[`good first issue`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
|
[Help Wanted](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||||
These usually do not require significant experience with Rust or the uv code base.
|
which are good opportunities for new contributors.
|
||||||
|
|
||||||
We label issues that we think are a good opportunity for subsequent contributions as
|
|
||||||
[`help wanted`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22).
|
|
||||||
These require varying levels of experience with Rust and uv. Often, we want to accomplish these
|
|
||||||
tasks but do not have the resources to do so ourselves.
|
|
||||||
|
|
||||||
You don't need our permission to start on an issue we have labeled as appropriate for community
|
|
||||||
contribution as described above. However, it's a good idea to indicate that you are going to work on
|
|
||||||
an issue to avoid concurrent attempts to solve the same problem.
|
|
||||||
|
|
||||||
Please check in with us before starting work on an issue that has not been labeled as appropriate
|
|
||||||
for community contribution. We're happy to receive contributions for other issues, but it's
|
|
||||||
important to make sure we have consensus on the solution to the problem first.
|
|
||||||
|
|
||||||
Outside of issues with the labels above, issues labeled as
|
|
||||||
[`bug`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22bug%22) are the
|
|
||||||
best candidates for contribution. In contrast, issues labeled with `needs-decision` or
|
|
||||||
`needs-design` are _not_ good candidates for contribution. Please do not open pull requests for
|
|
||||||
issues with these labels.
|
|
||||||
|
|
||||||
Please do not open pull requests for new features without prior discussion. While we appreciate
|
|
||||||
exploration of new features, we will almost always close these pull requests immediately. Adding a
|
|
||||||
new feature to uv creates a long-term maintenance burden and requires strong consensus from the uv
|
|
||||||
team before it is appropriate to begin work on an implementation.
|
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
|
|
||||||
|
|
@ -40,12 +16,6 @@ On Ubuntu and other Debian-based distributions, you can install a C compiler wit
|
||||||
sudo apt install build-essential
|
sudo apt install build-essential
|
||||||
```
|
```
|
||||||
|
|
||||||
On Fedora-based distributions, you can install a C compiler with:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo dnf install gcc
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
For running tests, we recommend [nextest](https://nexte.st/).
|
For running tests, we recommend [nextest](https://nexte.st/).
|
||||||
|
|
@ -86,13 +56,6 @@ cargo test --package <package> --test <test> -- <test_name> -- --exact
|
||||||
cargo insta review
|
cargo insta review
|
||||||
```
|
```
|
||||||
|
|
||||||
### Git and Git LFS
|
|
||||||
|
|
||||||
A subset of uv tests require both [Git](https://git-scm.com) and [Git LFS](https://git-lfs.com/) to
|
|
||||||
execute properly.
|
|
||||||
|
|
||||||
These tests can be disabled by turning off either `git` or `git-lfs` uv features.
|
|
||||||
|
|
||||||
### Local testing
|
### Local testing
|
||||||
|
|
||||||
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
||||||
|
|
@ -102,15 +65,6 @@ cargo run -- venv
|
||||||
cargo run -- pip install requests
|
cargo run -- pip install requests
|
||||||
```
|
```
|
||||||
|
|
||||||
## Crate structure
|
|
||||||
|
|
||||||
Rust does not allow circular dependencies between crates. To visualize the crate hierarchy, install
|
|
||||||
[cargo-depgraph](https://github.com/jplatte/cargo-depgraph) and graphviz, then run:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
cargo depgraph --dedup-transitive-deps --workspace-only | dot -Tpng > graph.png
|
|
||||||
```
|
|
||||||
|
|
||||||
## Running inside a Docker container
|
## Running inside a Docker container
|
||||||
|
|
||||||
Source distributions can run arbitrary code on build and can make unwanted modifications to your
|
Source distributions can run arbitrary code on build and can make unwanted modifications to your
|
||||||
|
|
@ -136,7 +90,7 @@ Please refer to Ruff's
|
||||||
it applies to uv, too.
|
it applies to uv, too.
|
||||||
|
|
||||||
We provide diverse sets of requirements for testing and benchmarking the resolver in
|
We provide diverse sets of requirements for testing and benchmarking the resolver in
|
||||||
`test/requirements` and for the installer in `test/requirements/compiled`.
|
`scripts/requirements` and for the installer in `scripts/requirements/compiled`.
|
||||||
|
|
||||||
You can use `scripts/benchmark` to benchmark predefined workloads between uv versions and with other
|
You can use `scripts/benchmark` to benchmark predefined workloads between uv versions and with other
|
||||||
tools, e.g., from the `scripts/benchmark` directory:
|
tools, e.g., from the `scripts/benchmark` directory:
|
||||||
|
|
@ -147,7 +101,7 @@ uv run resolver \
|
||||||
--poetry \
|
--poetry \
|
||||||
--benchmark \
|
--benchmark \
|
||||||
resolve-cold \
|
resolve-cold \
|
||||||
../test/requirements/trio.in
|
../scripts/requirements/trio.in
|
||||||
```
|
```
|
||||||
|
|
||||||
### Analyzing concurrency
|
### Analyzing concurrency
|
||||||
|
|
@ -157,7 +111,7 @@ visualize parallel requests and find any spots where uv is CPU-bound. Example us
|
||||||
`uv-dev` respectively:
|
`uv-dev` respectively:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile test/requirements/jupyter.in
|
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile scripts/requirements/jupyter.in
|
||||||
```
|
```
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
187
Cargo.toml
187
Cargo.toml
|
|
@ -4,88 +4,85 @@ exclude = [
|
||||||
"scripts",
|
"scripts",
|
||||||
# Needs nightly
|
# Needs nightly
|
||||||
"crates/uv-trampoline",
|
"crates/uv-trampoline",
|
||||||
|
# Only used to pull in features, allocators, etc. — we specifically don't want them
|
||||||
|
# to be part of a workspace-wide cargo check, cargo clippy, etc.
|
||||||
|
"crates/uv-performance-memory-allocator",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
rust-version = "1.89"
|
rust-version = "1.86"
|
||||||
homepage = "https://pypi.org/project/uv/"
|
homepage = "https://pypi.org/project/uv/"
|
||||||
|
documentation = "https://pypi.org/project/uv/"
|
||||||
repository = "https://github.com/astral-sh/uv"
|
repository = "https://github.com/astral-sh/uv"
|
||||||
authors = ["uv"]
|
authors = ["uv"]
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
uv-auth = { version = "0.0.8", path = "crates/uv-auth" }
|
uv-auth = { path = "crates/uv-auth" }
|
||||||
uv-bin-install = { version = "0.0.8", path = "crates/uv-bin-install" }
|
uv-build-backend = { path = "crates/uv-build-backend" }
|
||||||
uv-build-backend = { version = "0.0.8", path = "crates/uv-build-backend" }
|
uv-build-frontend = { path = "crates/uv-build-frontend" }
|
||||||
uv-build-frontend = { version = "0.0.8", path = "crates/uv-build-frontend" }
|
uv-cache = { path = "crates/uv-cache" }
|
||||||
uv-cache = { version = "0.0.8", path = "crates/uv-cache" }
|
uv-cache-info = { path = "crates/uv-cache-info" }
|
||||||
uv-cache-info = { version = "0.0.8", path = "crates/uv-cache-info" }
|
uv-cache-key = { path = "crates/uv-cache-key" }
|
||||||
uv-cache-key = { version = "0.0.8", path = "crates/uv-cache-key" }
|
uv-cli = { path = "crates/uv-cli" }
|
||||||
uv-cli = { version = "0.0.8", path = "crates/uv-cli" }
|
uv-client = { path = "crates/uv-client" }
|
||||||
uv-client = { version = "0.0.8", path = "crates/uv-client" }
|
uv-configuration = { path = "crates/uv-configuration" }
|
||||||
uv-configuration = { version = "0.0.8", path = "crates/uv-configuration" }
|
uv-console = { path = "crates/uv-console" }
|
||||||
uv-console = { version = "0.0.8", path = "crates/uv-console" }
|
uv-dirs = { path = "crates/uv-dirs" }
|
||||||
uv-dirs = { version = "0.0.8", path = "crates/uv-dirs" }
|
uv-dispatch = { path = "crates/uv-dispatch" }
|
||||||
uv-dispatch = { version = "0.0.8", path = "crates/uv-dispatch" }
|
uv-distribution = { path = "crates/uv-distribution" }
|
||||||
uv-distribution = { version = "0.0.8", path = "crates/uv-distribution" }
|
uv-distribution-filename = { path = "crates/uv-distribution-filename" }
|
||||||
uv-distribution-filename = { version = "0.0.8", path = "crates/uv-distribution-filename" }
|
uv-distribution-types = { path = "crates/uv-distribution-types" }
|
||||||
uv-distribution-types = { version = "0.0.8", path = "crates/uv-distribution-types" }
|
uv-extract = { path = "crates/uv-extract" }
|
||||||
uv-extract = { version = "0.0.8", path = "crates/uv-extract" }
|
uv-fs = { path = "crates/uv-fs", features = ["serde", "tokio"] }
|
||||||
uv-flags = { version = "0.0.8", path = "crates/uv-flags" }
|
uv-git = { path = "crates/uv-git" }
|
||||||
uv-fs = { version = "0.0.8", path = "crates/uv-fs", features = ["serde", "tokio"] }
|
uv-git-types = { path = "crates/uv-git-types" }
|
||||||
uv-git = { version = "0.0.8", path = "crates/uv-git" }
|
uv-globfilter = { path = "crates/uv-globfilter" }
|
||||||
uv-git-types = { version = "0.0.8", path = "crates/uv-git-types" }
|
uv-install-wheel = { path = "crates/uv-install-wheel", default-features = false }
|
||||||
uv-globfilter = { version = "0.0.8", path = "crates/uv-globfilter" }
|
uv-installer = { path = "crates/uv-installer" }
|
||||||
uv-install-wheel = { version = "0.0.8", path = "crates/uv-install-wheel", default-features = false }
|
uv-macros = { path = "crates/uv-macros" }
|
||||||
uv-installer = { version = "0.0.8", path = "crates/uv-installer" }
|
uv-metadata = { path = "crates/uv-metadata" }
|
||||||
uv-keyring = { version = "0.0.8", path = "crates/uv-keyring" }
|
uv-normalize = { path = "crates/uv-normalize" }
|
||||||
uv-logging = { version = "0.0.8", path = "crates/uv-logging" }
|
uv-once-map = { path = "crates/uv-once-map" }
|
||||||
uv-macros = { version = "0.0.8", path = "crates/uv-macros" }
|
uv-options-metadata = { path = "crates/uv-options-metadata" }
|
||||||
uv-metadata = { version = "0.0.8", path = "crates/uv-metadata" }
|
uv-pep440 = { path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
||||||
uv-normalize = { version = "0.0.8", path = "crates/uv-normalize" }
|
uv-pep508 = { path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||||
uv-once-map = { version = "0.0.8", path = "crates/uv-once-map" }
|
uv-platform = { path = "crates/uv-platform" }
|
||||||
uv-options-metadata = { version = "0.0.8", path = "crates/uv-options-metadata" }
|
uv-platform-tags = { path = "crates/uv-platform-tags" }
|
||||||
uv-performance-memory-allocator = { version = "0.0.8", path = "crates/uv-performance-memory-allocator" }
|
uv-publish = { path = "crates/uv-publish" }
|
||||||
uv-pep440 = { version = "0.0.8", path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
uv-pypi-types = { path = "crates/uv-pypi-types" }
|
||||||
uv-pep508 = { version = "0.0.8", path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
uv-python = { path = "crates/uv-python" }
|
||||||
uv-platform = { version = "0.0.8", path = "crates/uv-platform" }
|
uv-redacted = { path = "crates/uv-redacted" }
|
||||||
uv-platform-tags = { version = "0.0.8", path = "crates/uv-platform-tags" }
|
uv-requirements = { path = "crates/uv-requirements" }
|
||||||
uv-preview = { version = "0.0.8", path = "crates/uv-preview" }
|
uv-requirements-txt = { path = "crates/uv-requirements-txt" }
|
||||||
uv-publish = { version = "0.0.8", path = "crates/uv-publish" }
|
uv-resolver = { path = "crates/uv-resolver" }
|
||||||
uv-pypi-types = { version = "0.0.8", path = "crates/uv-pypi-types" }
|
uv-scripts = { path = "crates/uv-scripts" }
|
||||||
uv-python = { version = "0.0.8", path = "crates/uv-python" }
|
uv-settings = { path = "crates/uv-settings" }
|
||||||
uv-redacted = { version = "0.0.8", path = "crates/uv-redacted" }
|
uv-shell = { path = "crates/uv-shell" }
|
||||||
uv-requirements = { version = "0.0.8", path = "crates/uv-requirements" }
|
uv-small-str = { path = "crates/uv-small-str" }
|
||||||
uv-requirements-txt = { version = "0.0.8", path = "crates/uv-requirements-txt" }
|
uv-state = { path = "crates/uv-state" }
|
||||||
uv-resolver = { version = "0.0.8", path = "crates/uv-resolver" }
|
uv-static = { path = "crates/uv-static" }
|
||||||
uv-scripts = { version = "0.0.8", path = "crates/uv-scripts" }
|
uv-tool = { path = "crates/uv-tool" }
|
||||||
uv-settings = { version = "0.0.8", path = "crates/uv-settings" }
|
uv-torch = { path = "crates/uv-torch" }
|
||||||
uv-shell = { version = "0.0.8", path = "crates/uv-shell" }
|
uv-trampoline-builder = { path = "crates/uv-trampoline-builder" }
|
||||||
uv-small-str = { version = "0.0.8", path = "crates/uv-small-str" }
|
uv-types = { path = "crates/uv-types" }
|
||||||
uv-state = { version = "0.0.8", path = "crates/uv-state" }
|
uv-version = { path = "crates/uv-version" }
|
||||||
uv-static = { version = "0.0.8", path = "crates/uv-static" }
|
uv-virtualenv = { path = "crates/uv-virtualenv" }
|
||||||
uv-tool = { version = "0.0.8", path = "crates/uv-tool" }
|
uv-warnings = { path = "crates/uv-warnings" }
|
||||||
uv-torch = { version = "0.0.8", path = "crates/uv-torch" }
|
uv-workspace = { path = "crates/uv-workspace" }
|
||||||
uv-trampoline-builder = { version = "0.0.8", path = "crates/uv-trampoline-builder" }
|
|
||||||
uv-types = { version = "0.0.8", path = "crates/uv-types" }
|
|
||||||
uv-version = { version = "0.9.18", path = "crates/uv-version" }
|
|
||||||
uv-virtualenv = { version = "0.0.8", path = "crates/uv-virtualenv" }
|
|
||||||
uv-warnings = { version = "0.0.8", path = "crates/uv-warnings" }
|
|
||||||
uv-workspace = { version = "0.0.8", path = "crates/uv-workspace" }
|
|
||||||
|
|
||||||
ambient-id = { version = "0.0.7", default-features = false, features = ["astral-reqwest-middleware"] }
|
|
||||||
anstream = { version = "0.6.15" }
|
anstream = { version = "0.6.15" }
|
||||||
anyhow = { version = "1.0.89" }
|
anyhow = { version = "1.0.89" }
|
||||||
arcstr = { version = "1.2.0" }
|
arcstr = { version = "1.2.0" }
|
||||||
arrayvec = { version = "0.7.6" }
|
arrayvec = { version = "0.7.6" }
|
||||||
astral-tokio-tar = { version = "0.5.6" }
|
astral-tokio-tar = { version = "0.5.2" }
|
||||||
async-channel = { version = "2.3.1" }
|
async-channel = { version = "2.3.1" }
|
||||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||||
async-trait = { version = "0.1.82" }
|
async-trait = { version = "0.1.82" }
|
||||||
async_http_range_reader = { version = "0.9.1", package = "astral_async_http_range_reader" }
|
async_http_range_reader = { version = "0.9.1" }
|
||||||
async_zip = { version = "0.0.17", package = "astral_async_zip", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "285e48742b74ab109887d62e1ae79e7c15fd4878", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||||
axoupdater = { version = "0.9.0", default-features = false }
|
axoupdater = { version = "0.9.0", default-features = false }
|
||||||
backon = { version = "1.3.0" }
|
backon = { version = "1.3.0" }
|
||||||
base64 = { version = "0.22.1" }
|
base64 = { version = "0.22.1" }
|
||||||
|
|
@ -100,27 +97,25 @@ configparser = { version = "3.1.0" }
|
||||||
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
||||||
csv = { version = "1.3.0" }
|
csv = { version = "1.3.0" }
|
||||||
ctrlc = { version = "3.4.5" }
|
ctrlc = { version = "3.4.5" }
|
||||||
cyclonedx-bom = { version = "0.8.0" }
|
|
||||||
dashmap = { version = "6.1.0" }
|
dashmap = { version = "6.1.0" }
|
||||||
data-encoding = { version = "2.6.0" }
|
data-encoding = { version = "2.6.0" }
|
||||||
diskus = { version = "0.9.0", default-features = false }
|
|
||||||
dotenvy = { version = "0.15.7" }
|
dotenvy = { version = "0.15.7" }
|
||||||
dunce = { version = "1.0.5" }
|
dunce = { version = "1.0.5" }
|
||||||
either = { version = "1.13.0" }
|
either = { version = "1.13.0" }
|
||||||
encoding_rs_io = { version = "0.1.7" }
|
encoding_rs_io = { version = "0.1.7" }
|
||||||
embed-manifest = { version = "1.5.0" }
|
etcetera = { version = "0.10.0" }
|
||||||
etcetera = { version = "0.11.0" }
|
|
||||||
fastrand = { version = "2.3.0" }
|
|
||||||
flate2 = { version = "1.0.33", default-features = false, features = ["zlib-rs"] }
|
flate2 = { version = "1.0.33", default-features = false, features = ["zlib-rs"] }
|
||||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
|
fs2 = { version = "0.4.3" }
|
||||||
futures = { version = "0.3.30" }
|
futures = { version = "0.3.30" }
|
||||||
glob = { version = "0.3.1" }
|
glob = { version = "0.3.1" }
|
||||||
globset = { version = "0.4.15" }
|
globset = { version = "0.4.15" }
|
||||||
globwalk = { version = "0.9.1" }
|
globwalk = { version = "0.9.1" }
|
||||||
goblin = { version = "0.10.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] }
|
goblin = { version = "0.10.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] }
|
||||||
h2 = { version = "0.4.7" }
|
h2 = { version = "0.4.7" }
|
||||||
hashbrown = { version = "0.16.0" }
|
hashbrown = { version = "0.15.1" }
|
||||||
hex = { version = "0.4.3" }
|
hex = { version = "0.4.3" }
|
||||||
|
home = { version = "0.5.9" }
|
||||||
html-escape = { version = "0.2.13" }
|
html-escape = { version = "0.2.13" }
|
||||||
http = { version = "1.1.0" }
|
http = { version = "1.1.0" }
|
||||||
indexmap = { version = "2.5.0" }
|
indexmap = { version = "2.5.0" }
|
||||||
|
|
@ -135,7 +130,7 @@ memchr = { version = "2.7.4" }
|
||||||
miette = { version = "7.2.0", features = ["fancy-no-backtrace"] }
|
miette = { version = "7.2.0", features = ["fancy-no-backtrace"] }
|
||||||
nanoid = { version = "0.4.0" }
|
nanoid = { version = "0.4.0" }
|
||||||
nix = { version = "0.30.0", features = ["signal"] }
|
nix = { version = "0.30.0", features = ["signal"] }
|
||||||
open = { version = "5.3.2" }
|
once_cell = { version = "1.20.2" }
|
||||||
owo-colors = { version = "4.1.0" }
|
owo-colors = { version = "4.1.0" }
|
||||||
path-slash = { version = "0.2.1" }
|
path-slash = { version = "0.2.1" }
|
||||||
pathdiff = { version = "0.2.1" }
|
pathdiff = { version = "0.2.1" }
|
||||||
|
|
@ -143,17 +138,16 @@ percent-encoding = { version = "2.3.1" }
|
||||||
petgraph = { version = "0.8.0" }
|
petgraph = { version = "0.8.0" }
|
||||||
proc-macro2 = { version = "1.0.86" }
|
proc-macro2 = { version = "1.0.86" }
|
||||||
procfs = { version = "0.17.0", default-features = false, features = ["flate2"] }
|
procfs = { version = "0.17.0", default-features = false, features = ["flate2"] }
|
||||||
pubgrub = { version = "0.3.3" , package = "astral-pubgrub" }
|
pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||||
quote = { version = "1.0.37" }
|
quote = { version = "1.0.37" }
|
||||||
rayon = { version = "1.10.0" }
|
rayon = { version = "1.10.0" }
|
||||||
ref-cast = { version = "1.0.24" }
|
ref-cast = { version = "1.0.24" }
|
||||||
reflink-copy = { version = "0.1.19" }
|
reflink-copy = { version = "0.1.19" }
|
||||||
regex = { version = "1.10.6" }
|
regex = { version = "1.10.6" }
|
||||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||||
reqsign = { version = "0.18.0", features = ["aws", "default-context"], default-features = false }
|
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "system-proxy", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] }
|
||||||
reqwest-middleware = { version = "0.4.2", package = "astral-reqwest-middleware", features = ["multipart"] }
|
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||||
reqwest-retry = { version = "0.7.0", package = "astral-reqwest-retry" }
|
|
||||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||||
rmp-serde = { version = "1.3.0" }
|
rmp-serde = { version = "1.3.0" }
|
||||||
rust-netrc = { version = "0.1.2" }
|
rust-netrc = { version = "0.1.2" }
|
||||||
|
|
@ -162,15 +156,13 @@ rustix = { version = "1.0.0", default-features = false, features = ["fs", "std"]
|
||||||
same-file = { version = "1.0.6" }
|
same-file = { version = "1.0.6" }
|
||||||
schemars = { version = "1.0.0", features = ["url2"] }
|
schemars = { version = "1.0.0", features = ["url2"] }
|
||||||
seahash = { version = "4.1.0" }
|
seahash = { version = "4.1.0" }
|
||||||
secret-service = { version = "5.0.0", features = ["rt-tokio-crypto-rust"] }
|
|
||||||
security-framework = { version = "3" }
|
|
||||||
self-replace = { version = "1.5.0" }
|
self-replace = { version = "1.5.0" }
|
||||||
serde = { version = "1.0.210", features = ["derive", "rc"] }
|
serde = { version = "1.0.210", features = ["derive", "rc"] }
|
||||||
serde-untagged = { version = "0.1.6" }
|
serde-untagged = { version = "0.1.6" }
|
||||||
serde_json = { version = "1.0.128" }
|
serde_json = { version = "1.0.128" }
|
||||||
sha2 = { version = "0.10.8" }
|
sha2 = { version = "0.10.8" }
|
||||||
smallvec = { version = "1.13.2" }
|
smallvec = { version = "1.13.2" }
|
||||||
spdx = { version = "0.13.0" }
|
spdx = { version = "0.10.6" }
|
||||||
syn = { version = "2.0.77" }
|
syn = { version = "2.0.77" }
|
||||||
sys-info = { version = "0.9.1" }
|
sys-info = { version = "0.9.1" }
|
||||||
tar = { version = "0.4.43" }
|
tar = { version = "0.4.43" }
|
||||||
|
|
@ -178,32 +170,31 @@ target-lexicon = { version = "0.13.0" }
|
||||||
tempfile = { version = "3.14.0" }
|
tempfile = { version = "3.14.0" }
|
||||||
textwrap = { version = "0.16.1" }
|
textwrap = { version = "0.16.1" }
|
||||||
thiserror = { version = "2.0.0" }
|
thiserror = { version = "2.0.0" }
|
||||||
astral-tl = { version = "0.7.11" }
|
tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101a8ff9f591ef51f314ec" }
|
||||||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync", "time"] }
|
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] }
|
||||||
tokio-stream = { version = "0.1.16" }
|
tokio-stream = { version = "0.1.16" }
|
||||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||||
toml_edit = { version = "0.23.2", features = ["serde"] }
|
toml_edit = { version = "0.23.2", features = ["serde"] }
|
||||||
tracing = { version = "0.1.40" }
|
tracing = { version = "0.1.40" }
|
||||||
tracing-durations-export = { version = "0.3.0", features = ["plot"] }
|
tracing-durations-export = { version = "0.3.0", features = ["plot"] }
|
||||||
tracing-subscriber = { version = "0.3.18" } # Default feature set for uv_build, uv activates extra features
|
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry"] }
|
||||||
tracing-test = { version = "0.2.5" }
|
tracing-test = { version = "0.2.5" }
|
||||||
tracing-tree = { version = "0.4.0" }
|
tracing-tree = { version = "0.4.0" }
|
||||||
unicode-width = { version = "0.2.0" }
|
unicode-width = { version = "0.2.0" }
|
||||||
unscanny = { version = "0.1.0" }
|
unscanny = { version = "0.1.0" }
|
||||||
url = { version = "2.5.2", features = ["serde"] }
|
url = { version = "2.5.2", features = ["serde"] }
|
||||||
uuid = { version = "1.16.0" }
|
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||||
version-ranges = { version = "0.1.3", package = "astral-version-ranges" }
|
|
||||||
walkdir = { version = "2.5.0" }
|
walkdir = { version = "2.5.0" }
|
||||||
which = { version = "8.0.0", features = ["regex"] }
|
which = { version = "8.0.0", features = ["regex"] }
|
||||||
windows = { version = "0.59.0", features = ["std", "Win32_Globalization", "Win32_System_LibraryLoader", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem", "Win32_Security", "Win32_System_Registry", "Win32_System_IO", "Win32_System_Ioctl"] }
|
windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] }
|
||||||
|
windows-core = { version = "0.59.0" }
|
||||||
windows-registry = { version = "0.5.0" }
|
windows-registry = { version = "0.5.0" }
|
||||||
|
windows-result = { version = "0.3.0" }
|
||||||
|
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Ioctl", "Win32_System_IO", "Win32_System_Registry"] }
|
||||||
wiremock = { version = "0.6.4" }
|
wiremock = { version = "0.6.4" }
|
||||||
wmi = { version = "0.16.0", default-features = false }
|
|
||||||
xz2 = { version = "0.1.7" }
|
xz2 = { version = "0.1.7" }
|
||||||
zeroize = { version = "1.8.1" }
|
|
||||||
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
||||||
zstd = { version = "0.13.3" }
|
|
||||||
|
|
||||||
# dev-dependencies
|
# dev-dependencies
|
||||||
assert_cmd = { version = "2.0.16" }
|
assert_cmd = { version = "2.0.16" }
|
||||||
|
|
@ -212,19 +203,19 @@ byteorder = { version = "1.5.0" }
|
||||||
filetime = { version = "0.2.25" }
|
filetime = { version = "0.2.25" }
|
||||||
http-body-util = { version = "0.1.2" }
|
http-body-util = { version = "0.1.2" }
|
||||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||||
hyper-util = { version = "0.1.8", features = ["tokio", "server", "http1"] }
|
hyper-util = { version = "0.1.8", features = ["tokio"] }
|
||||||
ignore = { version = "0.4.23" }
|
ignore = { version = "0.4.23" }
|
||||||
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
||||||
predicates = { version = "3.1.2" }
|
predicates = { version = "3.1.2" }
|
||||||
rcgen = { version = "0.14.5", features = ["crypto", "pem", "ring"], default-features = false }
|
|
||||||
rustls = { version = "0.23.29", default-features = false }
|
|
||||||
similar = { version = "2.6.0" }
|
similar = { version = "2.6.0" }
|
||||||
temp-env = { version = "0.3.6" }
|
temp-env = { version = "0.3.6" }
|
||||||
test-case = { version = "3.3.1" }
|
test-case = { version = "3.3.1" }
|
||||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||||
tokio-rustls = { version = "0.26.2", default-features = false }
|
|
||||||
whoami = { version = "1.6.0" }
|
whoami = { version = "1.6.0" }
|
||||||
|
|
||||||
|
[workspace.metadata.cargo-shear]
|
||||||
|
ignored = ["flate2", "xz2", "h2"]
|
||||||
|
|
||||||
[workspace.lints.rust]
|
[workspace.lints.rust]
|
||||||
unsafe_code = "warn"
|
unsafe_code = "warn"
|
||||||
unreachable_pub = "warn"
|
unreachable_pub = "warn"
|
||||||
|
|
@ -310,18 +301,8 @@ strip = false
|
||||||
debug = "full"
|
debug = "full"
|
||||||
lto = false
|
lto = false
|
||||||
|
|
||||||
# Profile for fast test execution: Skip debug info generation, and
|
|
||||||
# apply basic optimization, which speed up build and running tests.
|
|
||||||
[profile.fast-build]
|
[profile.fast-build]
|
||||||
inherits = "dev"
|
inherits = "dev"
|
||||||
opt-level = 1
|
|
||||||
debug = 0
|
|
||||||
strip = "debuginfo"
|
|
||||||
|
|
||||||
# Profile for faster builds: Skip debug info generation, for faster
|
|
||||||
# builds of smaller binaries.
|
|
||||||
[profile.no-debug]
|
|
||||||
inherits = "dev"
|
|
||||||
debug = 0
|
debug = 0
|
||||||
strip = "debuginfo"
|
strip = "debuginfo"
|
||||||
|
|
||||||
|
|
@ -336,3 +317,7 @@ codegen-units = 1
|
||||||
# The profile that 'cargo dist' will build with.
|
# The profile that 'cargo dist' will build with.
|
||||||
[profile.dist]
|
[profile.dist]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
|
|
||||||
|
[patch.crates-io]
|
||||||
|
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||||
|
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||||
|
|
|
||||||
10
Dockerfile
10
Dockerfile
|
|
@ -7,6 +7,7 @@ RUN apt update \
|
||||||
build-essential \
|
build-essential \
|
||||||
curl \
|
curl \
|
||||||
python3-venv \
|
python3-venv \
|
||||||
|
cmake \
|
||||||
&& apt clean \
|
&& apt clean \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
|
@ -23,15 +24,8 @@ RUN case "$TARGETPLATFORM" in \
|
||||||
*) exit 1 ;; \
|
*) exit 1 ;; \
|
||||||
esac
|
esac
|
||||||
|
|
||||||
# Temporarily using nightly-2025-11-02 for bundled musl v1.2.5
|
|
||||||
# Ref: https://github.com/rust-lang/rust/pull/142682
|
|
||||||
# TODO(samypr100): Remove when toolchain updates to 1.93
|
|
||||||
COPY <<EOF rust-toolchain.toml
|
|
||||||
[toolchain]
|
|
||||||
channel = "nightly-2025-11-02"
|
|
||||||
EOF
|
|
||||||
# Update rustup whenever we bump the rust version
|
# Update rustup whenever we bump the rust version
|
||||||
# COPY rust-toolchain.toml rust-toolchain.toml
|
COPY rust-toolchain.toml rust-toolchain.toml
|
||||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
||||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
ENV PATH="$HOME/.cargo/bin:$PATH"
|
||||||
# Install the toolchain then the musl target
|
# Install the toolchain then the musl target
|
||||||
|
|
|
||||||
33
README.md
33
README.md
|
|
@ -42,7 +42,7 @@ An extremely fast Python package and project manager, written in Rust.
|
||||||
- 🖥️ Supports macOS, Linux, and Windows.
|
- 🖥️ Supports macOS, Linux, and Windows.
|
||||||
|
|
||||||
uv is backed by [Astral](https://astral.sh), the creators of
|
uv is backed by [Astral](https://astral.sh), the creators of
|
||||||
[Ruff](https://github.com/astral-sh/ruff) and [ty](https://github.com/astral-sh/ty).
|
[Ruff](https://github.com/astral-sh/ruff).
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
|
@ -192,12 +192,14 @@ uv installs Python and allows quickly switching between versions.
|
||||||
Install multiple Python versions:
|
Install multiple Python versions:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ uv python install 3.12 3.13 3.14
|
$ uv python install 3.10 3.11 3.12
|
||||||
Installed 3 versions in 972ms
|
Searching for Python versions matching: Python 3.10
|
||||||
+ cpython-3.12.12-macos-aarch64-none (python3.12)
|
Searching for Python versions matching: Python 3.11
|
||||||
+ cpython-3.13.9-macos-aarch64-none (python3.13)
|
Searching for Python versions matching: Python 3.12
|
||||||
+ cpython-3.14.0-macos-aarch64-none (python3.14)
|
Installed 3 versions in 3.42s
|
||||||
|
+ cpython-3.10.14-macos-aarch64-none
|
||||||
|
+ cpython-3.11.9-macos-aarch64-none
|
||||||
|
+ cpython-3.12.4-macos-aarch64-none
|
||||||
```
|
```
|
||||||
|
|
||||||
Download Python versions as needed:
|
Download Python versions as needed:
|
||||||
|
|
@ -268,6 +270,14 @@ Installed 43 packages in 208ms
|
||||||
|
|
||||||
See the [pip interface documentation](https://docs.astral.sh/uv/pip/index/) to get started.
|
See the [pip interface documentation](https://docs.astral.sh/uv/pip/index/) to get started.
|
||||||
|
|
||||||
|
## Platform support
|
||||||
|
|
||||||
|
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
||||||
|
|
||||||
|
## Versioning policy
|
||||||
|
|
||||||
|
See uv's [versioning policy](https://docs.astral.sh/uv/reference/versioning/) document.
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
We are passionate about supporting contributors of all levels of experience and would love to see
|
We are passionate about supporting contributors of all levels of experience and would love to see
|
||||||
|
|
@ -284,15 +294,6 @@ It's pronounced as "you - vee" ([`/juː viː/`](https://en.wikipedia.org/wiki/He
|
||||||
|
|
||||||
Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details.
|
Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details.
|
||||||
|
|
||||||
#### What platforms does uv support?
|
|
||||||
|
|
||||||
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
|
||||||
|
|
||||||
#### Is uv ready for production?
|
|
||||||
|
|
||||||
Yes, uv is stable and widely used in production. See uv's
|
|
||||||
[versioning policy](https://docs.astral.sh/uv/reference/versioning/) document for details.
|
|
||||||
|
|
||||||
## Acknowledgements
|
## Acknowledgements
|
||||||
|
|
||||||
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're
|
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're
|
||||||
|
|
|
||||||
2
STYLE.md
2
STYLE.md
|
|
@ -16,7 +16,7 @@ documentation_.
|
||||||
1. If a message ends with a single relevant value, precede it with a colon, e.g.,
|
1. If a message ends with a single relevant value, precede it with a colon, e.g.,
|
||||||
`This is the value: value`. If the value is a literal, wrap it in backticks.
|
`This is the value: value`. If the value is a literal, wrap it in backticks.
|
||||||
1. Markdown files should be wrapped at 100 characters.
|
1. Markdown files should be wrapped at 100 characters.
|
||||||
1. Use a space, not an equals sign, for command-line arguments with a value, e.g.
|
1. Use a space, not an equals sign, for command line arguments with a value, e.g.
|
||||||
`--resolution lowest`, not `--resolution=lowest`.
|
`--resolution lowest`, not `--resolution=lowest`.
|
||||||
|
|
||||||
## Styling uv
|
## Styling uv
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,8 @@
|
||||||
[files]
|
[files]
|
||||||
extend-exclude = [
|
extend-exclude = [
|
||||||
"**/snapshots/",
|
"**/snapshots/",
|
||||||
"test/ecosystem/**",
|
"ecosystem/**",
|
||||||
"test/requirements/**/*.in",
|
"scripts/**/*.in",
|
||||||
"crates/uv-build-frontend/src/pipreqs/mapping",
|
|
||||||
]
|
]
|
||||||
ignore-hidden = false
|
ignore-hidden = false
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -982,7 +982,7 @@ for more details.
|
||||||
([#9135](https://github.com/astral-sh/uv/pull/9135))
|
([#9135](https://github.com/astral-sh/uv/pull/9135))
|
||||||
- Tweak script `--no-project` comment ([#10331](https://github.com/astral-sh/uv/pull/10331))
|
- Tweak script `--no-project` comment ([#10331](https://github.com/astral-sh/uv/pull/10331))
|
||||||
- Update copyright year ([#10297](https://github.com/astral-sh/uv/pull/10297))
|
- Update copyright year ([#10297](https://github.com/astral-sh/uv/pull/10297))
|
||||||
- Add instructions for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
- Add instructinos for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
||||||
|
|
||||||
## 0.5.16
|
## 0.5.16
|
||||||
|
|
||||||
|
|
|
||||||
1108
changelogs/0.8.x.md
1108
changelogs/0.8.x.md
File diff suppressed because it is too large
Load Diff
27
clippy.toml
27
clippy.toml
|
|
@ -8,7 +8,6 @@ doc-valid-idents = [
|
||||||
"PyTorch",
|
"PyTorch",
|
||||||
"ROCm",
|
"ROCm",
|
||||||
"XPU",
|
"XPU",
|
||||||
"PowerShell",
|
|
||||||
".." # Include the defaults
|
".." # Include the defaults
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -17,11 +16,6 @@ disallowed-types = [
|
||||||
"std::fs::File",
|
"std::fs::File",
|
||||||
"std::fs::OpenOptions",
|
"std::fs::OpenOptions",
|
||||||
"std::fs::ReadDir",
|
"std::fs::ReadDir",
|
||||||
"tokio::fs::DirBuilder",
|
|
||||||
"tokio::fs::DirEntry",
|
|
||||||
"tokio::fs::File",
|
|
||||||
"tokio::fs::OpenOptions",
|
|
||||||
"tokio::fs::ReadDir",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
disallowed-methods = [
|
disallowed-methods = [
|
||||||
|
|
@ -43,28 +37,7 @@ disallowed-methods = [
|
||||||
"std::fs::soft_link",
|
"std::fs::soft_link",
|
||||||
"std::fs::symlink_metadata",
|
"std::fs::symlink_metadata",
|
||||||
"std::fs::write",
|
"std::fs::write",
|
||||||
"tokio::fs::canonicalize",
|
|
||||||
"tokio::fs::copy",
|
|
||||||
"tokio::fs::create_dir",
|
|
||||||
"tokio::fs::create_dir_all",
|
|
||||||
"tokio::fs::hard_link",
|
|
||||||
"tokio::fs::metadata",
|
|
||||||
"tokio::fs::read",
|
|
||||||
"tokio::fs::read_dir",
|
|
||||||
"tokio::fs::read_link",
|
|
||||||
"tokio::fs::read_to_string",
|
|
||||||
"tokio::fs::remove_dir",
|
|
||||||
"tokio::fs::remove_dir_all",
|
|
||||||
"tokio::fs::remove_file",
|
|
||||||
"tokio::fs::rename",
|
|
||||||
"tokio::fs::set_permissions",
|
|
||||||
"tokio::fs::symlink_metadata",
|
|
||||||
"tokio::fs::try_exists",
|
|
||||||
"tokio::fs::write",
|
|
||||||
{ path = "std::os::unix::fs::symlink", allow-invalid = true },
|
{ path = "std::os::unix::fs::symlink", allow-invalid = true },
|
||||||
{ path = "std::os::windows::fs::symlink_dir", allow-invalid = true },
|
{ path = "std::os::windows::fs::symlink_dir", allow-invalid = true },
|
||||||
{ path = "std::os::windows::fs::symlink_file", allow-invalid = true },
|
{ path = "std::os::windows::fs::symlink_file", allow-invalid = true },
|
||||||
{ path = "tokio::fs::symlink", allow-invalid = true },
|
|
||||||
{ path = "tokio::fs::symlink_dir", allow-invalid = true },
|
|
||||||
{ path = "tokio::fs::symlink_file", allow-invalid = true },
|
|
||||||
]
|
]
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-auth"
|
name = "uv-auth"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
|
||||||
homepage = { workspace = true }
|
|
||||||
repository = { workspace = true }
|
|
||||||
authors = { workspace = true }
|
|
||||||
license = { workspace = true }
|
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
@ -16,38 +10,25 @@ doctest = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
uv-cache-key = { workspace = true }
|
|
||||||
uv-fs = { workspace = true }
|
|
||||||
uv-keyring = { workspace = true, features = ["apple-native", "secret-service", "windows-native"] }
|
|
||||||
uv-once-map = { workspace = true }
|
uv-once-map = { workspace = true }
|
||||||
uv-preview = { workspace = true }
|
|
||||||
uv-redacted = { workspace = true }
|
uv-redacted = { workspace = true }
|
||||||
uv-small-str = { workspace = true }
|
uv-small-str = { workspace = true }
|
||||||
uv-state = { workspace = true }
|
|
||||||
uv-static = { workspace = true }
|
uv-static = { workspace = true }
|
||||||
uv-warnings = { workspace = true }
|
uv-warnings = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
arcstr = { workspace = true }
|
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
base64 = { workspace = true }
|
base64 = { workspace = true }
|
||||||
etcetera = { workspace = true }
|
|
||||||
fs-err = { workspace = true, features = ["tokio"] }
|
|
||||||
futures = { workspace = true }
|
futures = { workspace = true }
|
||||||
http = { workspace = true }
|
http = { workspace = true }
|
||||||
jiff = { workspace = true }
|
|
||||||
percent-encoding = { workspace = true }
|
percent-encoding = { workspace = true }
|
||||||
reqsign = { workspace = true }
|
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
reqwest-middleware = { workspace = true }
|
reqwest-middleware = { workspace = true }
|
||||||
rust-netrc = { workspace = true }
|
rust-netrc = { workspace = true }
|
||||||
rustc-hash = { workspace = true }
|
rustc-hash = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
schemars = { workspace = true, optional = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_json = { workspace = true }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
toml = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
url = { workspace = true }
|
url = { workspace = true }
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-auth
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-auth).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,34 +0,0 @@
|
||||||
/// An encoded JWT access token.
|
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct AccessToken(String);
|
|
||||||
|
|
||||||
impl AccessToken {
|
|
||||||
/// Return the [`AccessToken`] as a vector of bytes.
|
|
||||||
pub fn into_bytes(self) -> Vec<u8> {
|
|
||||||
self.0.into_bytes()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the [`AccessToken`] as a string slice.
|
|
||||||
pub fn as_str(&self) -> &str {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<String> for AccessToken {
|
|
||||||
fn from(value: String) -> Self {
|
|
||||||
Self(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<[u8]> for AccessToken {
|
|
||||||
fn as_ref(&self) -> &[u8] {
|
|
||||||
self.0.as_bytes()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for AccessToken {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "****")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -11,8 +11,8 @@ use url::Url;
|
||||||
use uv_once_map::OnceMap;
|
use uv_once_map::OnceMap;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::credentials::{Authentication, Username};
|
use crate::Realm;
|
||||||
use crate::{Credentials, Realm};
|
use crate::credentials::{Credentials, Username};
|
||||||
|
|
||||||
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
|
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||||
|
|
||||||
|
|
@ -33,14 +33,13 @@ impl Display for FetchUrl {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)] // All internal types are redacted.
|
|
||||||
pub struct CredentialsCache {
|
pub struct CredentialsCache {
|
||||||
/// A cache per realm and username
|
/// A cache per realm and username
|
||||||
realms: RwLock<FxHashMap<(Realm, Username), Arc<Authentication>>>,
|
realms: RwLock<FxHashMap<(Realm, Username), Arc<Credentials>>>,
|
||||||
/// A cache tracking the result of realm or index URL fetches from external services
|
/// A cache tracking the result of realm or index URL fetches from external services
|
||||||
pub(crate) fetches: FxOnceMap<(FetchUrl, Username), Option<Arc<Authentication>>>,
|
pub(crate) fetches: FxOnceMap<(FetchUrl, Username), Option<Arc<Credentials>>>,
|
||||||
/// A cache per URL, uses a trie for efficient prefix queries.
|
/// A cache per URL, uses a trie for efficient prefix queries.
|
||||||
urls: RwLock<UrlTrie<Arc<Authentication>>>,
|
urls: RwLock<UrlTrie>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for CredentialsCache {
|
impl Default for CredentialsCache {
|
||||||
|
|
@ -59,33 +58,8 @@ impl CredentialsCache {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
|
||||||
///
|
|
||||||
/// Returns `true` if the store was updated.
|
|
||||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
|
||||||
if let Some(credentials) = Credentials::from_url(url) {
|
|
||||||
trace!("Caching credentials for {url}");
|
|
||||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
|
||||||
///
|
|
||||||
/// Returns `true` if the store was updated.
|
|
||||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
|
||||||
trace!("Caching credentials for {url}");
|
|
||||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the credentials that should be used for a realm and username, if any.
|
/// Return the credentials that should be used for a realm and username, if any.
|
||||||
pub(crate) fn get_realm(
|
pub(crate) fn get_realm(&self, realm: Realm, username: Username) -> Option<Arc<Credentials>> {
|
||||||
&self,
|
|
||||||
realm: Realm,
|
|
||||||
username: Username,
|
|
||||||
) -> Option<Arc<Authentication>> {
|
|
||||||
let realms = self.realms.read().unwrap();
|
let realms = self.realms.read().unwrap();
|
||||||
let given_username = username.is_some();
|
let given_username = username.is_some();
|
||||||
let key = (realm, username);
|
let key = (realm, username);
|
||||||
|
|
@ -119,7 +93,7 @@ impl CredentialsCache {
|
||||||
/// Note we do not cache per username, but if a username is passed we will confirm that the
|
/// Note we do not cache per username, but if a username is passed we will confirm that the
|
||||||
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
|
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
|
||||||
/// If multiple usernames are used per URL, the realm cache should be queried instead.
|
/// If multiple usernames are used per URL, the realm cache should be queried instead.
|
||||||
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Authentication>> {
|
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Credentials>> {
|
||||||
let urls = self.urls.read().unwrap();
|
let urls = self.urls.read().unwrap();
|
||||||
let credentials = urls.get(url);
|
let credentials = urls.get(url);
|
||||||
if let Some(credentials) = credentials {
|
if let Some(credentials) = credentials {
|
||||||
|
|
@ -138,7 +112,7 @@ impl CredentialsCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update the cache with the given credentials.
|
/// Update the cache with the given credentials.
|
||||||
pub(crate) fn insert(&self, url: &Url, credentials: Arc<Authentication>) {
|
pub(crate) fn insert(&self, url: &Url, credentials: Arc<Credentials>) {
|
||||||
// Do not cache empty credentials
|
// Do not cache empty credentials
|
||||||
if credentials.is_empty() {
|
if credentials.is_empty() {
|
||||||
return;
|
return;
|
||||||
|
|
@ -165,8 +139,8 @@ impl CredentialsCache {
|
||||||
fn insert_realm(
|
fn insert_realm(
|
||||||
&self,
|
&self,
|
||||||
key: (Realm, Username),
|
key: (Realm, Username),
|
||||||
credentials: &Arc<Authentication>,
|
credentials: &Arc<Credentials>,
|
||||||
) -> Option<Arc<Authentication>> {
|
) -> Option<Arc<Credentials>> {
|
||||||
// Do not cache empty credentials
|
// Do not cache empty credentials
|
||||||
if credentials.is_empty() {
|
if credentials.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
|
|
@ -174,8 +148,8 @@ impl CredentialsCache {
|
||||||
|
|
||||||
let mut realms = self.realms.write().unwrap();
|
let mut realms = self.realms.write().unwrap();
|
||||||
|
|
||||||
// Always replace existing entries if we have a password or token
|
// Always replace existing entries if we have a password
|
||||||
if credentials.is_authenticated() {
|
if credentials.password().is_some() {
|
||||||
return realms.insert(key, credentials.clone());
|
return realms.insert(key, credentials.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -192,33 +166,24 @@ impl CredentialsCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct UrlTrie<T> {
|
struct UrlTrie {
|
||||||
states: Vec<TrieState<T>>,
|
states: Vec<TrieState>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Default)]
|
||||||
struct TrieState<T> {
|
struct TrieState {
|
||||||
children: Vec<(String, usize)>,
|
children: Vec<(String, usize)>,
|
||||||
value: Option<T>,
|
value: Option<Arc<Credentials>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Default for TrieState<T> {
|
impl UrlTrie {
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
children: vec![],
|
|
||||||
value: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> UrlTrie<T> {
|
|
||||||
fn new() -> Self {
|
fn new() -> Self {
|
||||||
let mut trie = Self { states: vec![] };
|
let mut trie = Self { states: vec![] };
|
||||||
trie.alloc();
|
trie.alloc();
|
||||||
trie
|
trie
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, url: &Url) -> Option<&T> {
|
fn get(&self, url: &Url) -> Option<&Arc<Credentials>> {
|
||||||
let mut state = 0;
|
let mut state = 0;
|
||||||
let realm = Realm::from(url).to_string();
|
let realm = Realm::from(url).to_string();
|
||||||
for component in [realm.as_str()]
|
for component in [realm.as_str()]
|
||||||
|
|
@ -233,7 +198,7 @@ impl<T> UrlTrie<T> {
|
||||||
self.states[state].value.as_ref()
|
self.states[state].value.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, url: &Url, value: T) {
|
fn insert(&mut self, url: &Url, value: Arc<Credentials>) {
|
||||||
let mut state = 0;
|
let mut state = 0;
|
||||||
let realm = Realm::from(url).to_string();
|
let realm = Realm::from(url).to_string();
|
||||||
for component in [realm.as_str()]
|
for component in [realm.as_str()]
|
||||||
|
|
@ -261,7 +226,7 @@ impl<T> UrlTrie<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> TrieState<T> {
|
impl TrieState {
|
||||||
fn get(&self, component: &str) -> Option<usize> {
|
fn get(&self, component: &str) -> Option<usize> {
|
||||||
let i = self.index(component).ok()?;
|
let i = self.index(component).ok()?;
|
||||||
Some(self.children[i].1)
|
Some(self.children[i].1)
|
||||||
|
|
@ -295,21 +260,28 @@ impl From<(Realm, Username)> for RealmUsername {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::Credentials;
|
|
||||||
use crate::credentials::Password;
|
use crate::credentials::Password;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_trie() {
|
fn test_trie() {
|
||||||
let credentials1 =
|
let credentials1 = Arc::new(Credentials::basic(
|
||||||
Credentials::basic(Some("username1".to_string()), Some("password1".to_string()));
|
Some("username1".to_string()),
|
||||||
let credentials2 =
|
Some("password1".to_string()),
|
||||||
Credentials::basic(Some("username2".to_string()), Some("password2".to_string()));
|
));
|
||||||
let credentials3 =
|
let credentials2 = Arc::new(Credentials::basic(
|
||||||
Credentials::basic(Some("username3".to_string()), Some("password3".to_string()));
|
Some("username2".to_string()),
|
||||||
let credentials4 =
|
Some("password2".to_string()),
|
||||||
Credentials::basic(Some("username4".to_string()), Some("password4".to_string()));
|
));
|
||||||
|
let credentials3 = Arc::new(Credentials::basic(
|
||||||
|
Some("username3".to_string()),
|
||||||
|
Some("password3".to_string()),
|
||||||
|
));
|
||||||
|
let credentials4 = Arc::new(Credentials::basic(
|
||||||
|
Some("username4".to_string()),
|
||||||
|
Some("password4".to_string()),
|
||||||
|
));
|
||||||
|
|
||||||
let mut trie = UrlTrie::new();
|
let mut trie = UrlTrie::new();
|
||||||
trie.insert(
|
trie.insert(
|
||||||
|
|
@ -367,10 +339,10 @@ mod tests {
|
||||||
fn test_url_with_credentials() {
|
fn test_url_with_credentials() {
|
||||||
let username = Username::new(Some(String::from("username")));
|
let username = Username::new(Some(String::from("username")));
|
||||||
let password = Password::new(String::from("password"));
|
let password = Password::new(String::from("password"));
|
||||||
let credentials = Arc::new(Authentication::from(Credentials::Basic {
|
let credentials = Arc::new(Credentials::Basic {
|
||||||
username: username.clone(),
|
username: username.clone(),
|
||||||
password: Some(password),
|
password: Some(password),
|
||||||
}));
|
});
|
||||||
let cache = CredentialsCache::default();
|
let cache = CredentialsCache::default();
|
||||||
// Insert with URL with credentials and get with redacted URL.
|
// Insert with URL with credentials and get with redacted URL.
|
||||||
let url = Url::parse("https://username:password@example.com/foobar").unwrap();
|
let url = Url::parse("https://username:password@example.com/foobar").unwrap();
|
||||||
|
|
|
||||||
|
|
@ -1,41 +1,34 @@
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::fmt;
|
|
||||||
use std::io::Read;
|
|
||||||
use std::io::Write;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use base64::prelude::BASE64_STANDARD;
|
use base64::prelude::BASE64_STANDARD;
|
||||||
use base64::read::DecoderReader;
|
use base64::read::DecoderReader;
|
||||||
use base64::write::EncoderWriter;
|
use base64::write::EncoderWriter;
|
||||||
use http::Uri;
|
use std::borrow::Cow;
|
||||||
|
use std::fmt;
|
||||||
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use netrc::Netrc;
|
use netrc::Netrc;
|
||||||
use reqsign::aws::DefaultSigner;
|
|
||||||
use reqwest::Request;
|
use reqwest::Request;
|
||||||
use reqwest::header::HeaderValue;
|
use reqwest::header::HeaderValue;
|
||||||
use serde::{Deserialize, Serialize};
|
use std::io::Read;
|
||||||
|
use std::io::Write;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub enum Credentials {
|
pub enum Credentials {
|
||||||
/// RFC 7617 HTTP Basic Authentication
|
|
||||||
Basic {
|
Basic {
|
||||||
/// The username to use for authentication.
|
/// The username to use for authentication.
|
||||||
username: Username,
|
username: Username,
|
||||||
/// The password to use for authentication.
|
/// The password to use for authentication.
|
||||||
password: Option<Password>,
|
password: Option<Password>,
|
||||||
},
|
},
|
||||||
/// RFC 6750 Bearer Token Authentication
|
|
||||||
Bearer {
|
Bearer {
|
||||||
/// The token to use for authentication.
|
/// The token to use for authentication.
|
||||||
token: Token,
|
token: Vec<u8>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Default)]
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct Username(Option<String>);
|
pub struct Username(Option<String>);
|
||||||
|
|
||||||
impl Username {
|
impl Username {
|
||||||
|
|
@ -76,8 +69,7 @@ impl From<Option<String>> for Username {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Serialize, Deserialize)]
|
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default)]
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct Password(String);
|
pub struct Password(String);
|
||||||
|
|
||||||
impl Password {
|
impl Password {
|
||||||
|
|
@ -85,15 +77,9 @@ impl Password {
|
||||||
Self(password)
|
Self(password)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the [`Password`] as a string slice.
|
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn as_str(&self) -> &str {
|
||||||
self.0.as_str()
|
self.0.as_str()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert the [`Password`] into its underlying [`String`].
|
|
||||||
pub fn into_string(self) -> String {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for Password {
|
impl fmt::Debug for Password {
|
||||||
|
|
@ -102,36 +88,6 @@ impl fmt::Debug for Password {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Deserialize)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct Token(Vec<u8>);
|
|
||||||
|
|
||||||
impl Token {
|
|
||||||
pub fn new(token: Vec<u8>) -> Self {
|
|
||||||
Self(token)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the [`Token`] as a byte slice.
|
|
||||||
pub fn as_slice(&self) -> &[u8] {
|
|
||||||
self.0.as_slice()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert the [`Token`] into its underlying [`Vec<u8>`].
|
|
||||||
pub fn into_bytes(self) -> Vec<u8> {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return whether the [`Token`] is empty.
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.0.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for Token {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(f, "****")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl Credentials {
|
impl Credentials {
|
||||||
/// Create a set of HTTP Basic Authentication credentials.
|
/// Create a set of HTTP Basic Authentication credentials.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
|
|
@ -145,9 +101,7 @@ impl Credentials {
|
||||||
/// Create a set of Bearer Authentication credentials.
|
/// Create a set of Bearer Authentication credentials.
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn bearer(token: Vec<u8>) -> Self {
|
pub fn bearer(token: Vec<u8>) -> Self {
|
||||||
Self::Bearer {
|
Self::Bearer { token }
|
||||||
token: Token::new(token),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn username(&self) -> Option<&str> {
|
pub fn username(&self) -> Option<&str> {
|
||||||
|
|
@ -178,16 +132,6 @@ impl Credentials {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_authenticated(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Basic {
|
|
||||||
username: _,
|
|
||||||
password,
|
|
||||||
} => password.is_some(),
|
|
||||||
Self::Bearer { token } => !token.is_empty(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn is_empty(&self) -> bool {
|
pub(crate) fn is_empty(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
Self::Basic { username, password } => username.is_none() && password.is_none(),
|
Self::Basic { username, password } => username.is_none() && password.is_none(),
|
||||||
|
|
@ -318,7 +262,7 @@ impl Credentials {
|
||||||
// Parse a `Bearer` authentication header.
|
// Parse a `Bearer` authentication header.
|
||||||
if let Some(token) = header.as_bytes().strip_prefix(b"Bearer ") {
|
if let Some(token) = header.as_bytes().strip_prefix(b"Bearer ") {
|
||||||
return Some(Self::Bearer {
|
return Some(Self::Bearer {
|
||||||
token: Token::new(token.to_vec()),
|
token: token.to_vec(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -382,127 +326,6 @@ impl Credentials {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub(crate) enum Authentication {
|
|
||||||
/// HTTP Basic or Bearer Authentication credentials.
|
|
||||||
Credentials(Credentials),
|
|
||||||
|
|
||||||
/// AWS Signature Version 4 signing.
|
|
||||||
Signer(DefaultSigner),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialEq for Authentication {
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
match (self, other) {
|
|
||||||
(Self::Credentials(a), Self::Credentials(b)) => a == b,
|
|
||||||
(Self::Signer(..), Self::Signer(..)) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Eq for Authentication {}
|
|
||||||
|
|
||||||
impl From<Credentials> for Authentication {
|
|
||||||
fn from(credentials: Credentials) -> Self {
|
|
||||||
Self::Credentials(credentials)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<DefaultSigner> for Authentication {
|
|
||||||
fn from(signer: DefaultSigner) -> Self {
|
|
||||||
Self::Signer(signer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Authentication {
|
|
||||||
/// Return the password used for authentication, if any.
|
|
||||||
pub(crate) fn password(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.password(),
|
|
||||||
Self::Signer(..) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the username used for authentication, if any.
|
|
||||||
pub(crate) fn username(&self) -> Option<&str> {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.username(),
|
|
||||||
Self::Signer(..) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the username used for authentication, if any.
|
|
||||||
pub(crate) fn as_username(&self) -> Cow<'_, Username> {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.as_username(),
|
|
||||||
Self::Signer(..) => Cow::Owned(Username::none()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the username used for authentication, if any.
|
|
||||||
pub(crate) fn to_username(&self) -> Username {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.to_username(),
|
|
||||||
Self::Signer(..) => Username::none(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if the object contains a means of authenticating.
|
|
||||||
pub(crate) fn is_authenticated(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.is_authenticated(),
|
|
||||||
Self::Signer(..) => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `true` if the object contains no credentials.
|
|
||||||
pub(crate) fn is_empty(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.is_empty(),
|
|
||||||
Self::Signer(..) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Apply the authentication to the given request.
|
|
||||||
///
|
|
||||||
/// Any existing credentials will be overridden.
|
|
||||||
#[must_use]
|
|
||||||
pub(crate) async fn authenticate(&self, mut request: Request) -> Request {
|
|
||||||
match self {
|
|
||||||
Self::Credentials(credentials) => credentials.authenticate(request),
|
|
||||||
Self::Signer(signer) => {
|
|
||||||
// Build an `http::Request` from the `reqwest::Request`.
|
|
||||||
// SAFETY: If we have a valid `reqwest::Request`, we expect (e.g.) the URL to be valid.
|
|
||||||
let uri = Uri::from_str(request.url().as_str()).unwrap();
|
|
||||||
let mut http_req = http::Request::builder()
|
|
||||||
.method(request.method().clone())
|
|
||||||
.uri(uri)
|
|
||||||
.body(())
|
|
||||||
.unwrap();
|
|
||||||
*http_req.headers_mut() = request.headers().clone();
|
|
||||||
|
|
||||||
// Sign the parts.
|
|
||||||
let (mut parts, ()) = http_req.into_parts();
|
|
||||||
signer
|
|
||||||
.sign(&mut parts, None)
|
|
||||||
.await
|
|
||||||
.expect("AWS signing should succeed");
|
|
||||||
|
|
||||||
// Copy over the signed headers.
|
|
||||||
request.headers_mut().extend(parts.headers);
|
|
||||||
|
|
||||||
// Copy over the signed path and query, if any.
|
|
||||||
if let Some(path_and_query) = parts.uri.path_and_query() {
|
|
||||||
request.url_mut().set_path(path_and_query.path());
|
|
||||||
request.url_mut().set_query(path_and_query.query());
|
|
||||||
}
|
|
||||||
request
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use insta::assert_debug_snapshot;
|
use insta::assert_debug_snapshot;
|
||||||
|
|
@ -623,15 +446,4 @@ mod tests {
|
||||||
"Basic { username: Username(Some(\"user\")), password: Some(****) }"
|
"Basic { username: Username(Some(\"user\")), password: Some(****) }"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_bearer_token_obfuscation() {
|
|
||||||
let token = "super_secret_token";
|
|
||||||
let credentials = Credentials::bearer(token.into());
|
|
||||||
let debugged = format!("{credentials:?}");
|
|
||||||
assert!(
|
|
||||||
!debugged.contains(token),
|
|
||||||
"Token should be obfuscated in Debug impl: {debugged}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -95,9 +95,9 @@ impl Indexes {
|
||||||
index_urls
|
index_urls
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the index for a URL if one exists.
|
/// Get the index URL prefix for a URL if one exists.
|
||||||
pub fn index_for(&self, url: &Url) -> Option<&Index> {
|
pub fn index_url_for(&self, url: &Url) -> Option<&DisplaySafeUrl> {
|
||||||
self.find_prefix_index(url)
|
self.find_prefix_index(url).map(|index| &index.url)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the [`AuthPolicy`] for a URL.
|
/// Get the [`AuthPolicy`] for a URL.
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,11 @@
|
||||||
use std::{io::Write, process::Stdio};
|
use std::{io::Write, process::Stdio};
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
use tracing::{debug, instrument, trace, warn};
|
use tracing::{instrument, trace, warn};
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_warnings::warn_user_once;
|
use uv_warnings::warn_user_once;
|
||||||
|
|
||||||
use crate::credentials::Credentials;
|
use crate::credentials::Credentials;
|
||||||
|
|
||||||
/// Service name prefix for storing credentials in a keyring.
|
|
||||||
static UV_SERVICE_PREFIX: &str = "uv:";
|
|
||||||
|
|
||||||
/// A backend for retrieving credentials from a keyring.
|
/// A backend for retrieving credentials from a keyring.
|
||||||
///
|
///
|
||||||
/// See pip's implementation for reference
|
/// See pip's implementation for reference
|
||||||
|
|
@ -18,47 +15,15 @@ pub struct KeyringProvider {
|
||||||
backend: KeyringProviderBackend,
|
backend: KeyringProviderBackend,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Error {
|
pub(crate) enum KeyringProviderBackend {
|
||||||
#[error(transparent)]
|
/// Use the `keyring` command to fetch credentials.
|
||||||
Keyring(#[from] uv_keyring::Error),
|
|
||||||
|
|
||||||
#[error("The '{0}' keyring provider does not support storing credentials")]
|
|
||||||
StoreUnsupported(KeyringProviderBackend),
|
|
||||||
|
|
||||||
#[error("The '{0}' keyring provider does not support removing credentials")]
|
|
||||||
RemoveUnsupported(KeyringProviderBackend),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum KeyringProviderBackend {
|
|
||||||
/// Use a native system keyring integration for credentials.
|
|
||||||
Native,
|
|
||||||
/// Use the external `keyring` command for credentials.
|
|
||||||
Subprocess,
|
Subprocess,
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
Dummy(Vec<(String, &'static str, &'static str)>),
|
Dummy(Vec<(String, &'static str, &'static str)>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for KeyringProviderBackend {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::Native => write!(f, "native"),
|
|
||||||
Self::Subprocess => write!(f, "subprocess"),
|
|
||||||
#[cfg(test)]
|
|
||||||
Self::Dummy(_) => write!(f, "dummy"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl KeyringProvider {
|
impl KeyringProvider {
|
||||||
/// Create a new [`KeyringProvider::Native`].
|
|
||||||
pub fn native() -> Self {
|
|
||||||
Self {
|
|
||||||
backend: KeyringProviderBackend::Native,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a new [`KeyringProvider::Subprocess`].
|
/// Create a new [`KeyringProvider::Subprocess`].
|
||||||
pub fn subprocess() -> Self {
|
pub fn subprocess() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -66,124 +31,6 @@ impl KeyringProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Store credentials for the given [`DisplaySafeUrl`] to the keyring.
|
|
||||||
///
|
|
||||||
/// Only [`KeyringProviderBackend::Native`] is supported at this time.
|
|
||||||
#[instrument(skip_all, fields(url = % url.to_string(), username))]
|
|
||||||
pub async fn store(
|
|
||||||
&self,
|
|
||||||
url: &DisplaySafeUrl,
|
|
||||||
credentials: &Credentials,
|
|
||||||
) -> Result<bool, Error> {
|
|
||||||
let Some(username) = credentials.username() else {
|
|
||||||
trace!("Unable to store credentials in keyring for {url} due to missing username");
|
|
||||||
return Ok(false);
|
|
||||||
};
|
|
||||||
let Some(password) = credentials.password() else {
|
|
||||||
trace!("Unable to store credentials in keyring for {url} due to missing password");
|
|
||||||
return Ok(false);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Ensure we strip credentials from the URL before storing
|
|
||||||
let url = url.without_credentials();
|
|
||||||
|
|
||||||
// If there's no path, we'll perform a host-level login
|
|
||||||
let target = if let Some(host) = url.host_str().filter(|_| !url.path().is_empty()) {
|
|
||||||
let mut target = String::new();
|
|
||||||
if url.scheme() != "https" {
|
|
||||||
target.push_str(url.scheme());
|
|
||||||
target.push_str("://");
|
|
||||||
}
|
|
||||||
target.push_str(host);
|
|
||||||
if let Some(port) = url.port() {
|
|
||||||
target.push(':');
|
|
||||||
target.push_str(&port.to_string());
|
|
||||||
}
|
|
||||||
target
|
|
||||||
} else {
|
|
||||||
url.to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
match &self.backend {
|
|
||||||
KeyringProviderBackend::Native => {
|
|
||||||
self.store_native(&target, username, password).await?;
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
KeyringProviderBackend::Subprocess => {
|
|
||||||
Err(Error::StoreUnsupported(self.backend.clone()))
|
|
||||||
}
|
|
||||||
#[cfg(test)]
|
|
||||||
KeyringProviderBackend::Dummy(_) => Err(Error::StoreUnsupported(self.backend.clone())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Store credentials to the system keyring.
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
async fn store_native(
|
|
||||||
&self,
|
|
||||||
service: &str,
|
|
||||||
username: &str,
|
|
||||||
password: &str,
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
let prefixed_service = format!("{UV_SERVICE_PREFIX}{service}");
|
|
||||||
let entry = uv_keyring::Entry::new(&prefixed_service, username)?;
|
|
||||||
entry.set_password(password).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove credentials for the given [`DisplaySafeUrl`] and username from the keyring.
|
|
||||||
///
|
|
||||||
/// Only [`KeyringProviderBackend::Native`] is supported at this time.
|
|
||||||
#[instrument(skip_all, fields(url = % url.to_string(), username))]
|
|
||||||
pub async fn remove(&self, url: &DisplaySafeUrl, username: &str) -> Result<(), Error> {
|
|
||||||
// Ensure we strip credentials from the URL before storing
|
|
||||||
let url = url.without_credentials();
|
|
||||||
|
|
||||||
// If there's no path, we'll perform a host-level login
|
|
||||||
let target = if let Some(host) = url.host_str().filter(|_| !url.path().is_empty()) {
|
|
||||||
let mut target = String::new();
|
|
||||||
if url.scheme() != "https" {
|
|
||||||
target.push_str(url.scheme());
|
|
||||||
target.push_str("://");
|
|
||||||
}
|
|
||||||
target.push_str(host);
|
|
||||||
if let Some(port) = url.port() {
|
|
||||||
target.push(':');
|
|
||||||
target.push_str(&port.to_string());
|
|
||||||
}
|
|
||||||
target
|
|
||||||
} else {
|
|
||||||
url.to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
match &self.backend {
|
|
||||||
KeyringProviderBackend::Native => {
|
|
||||||
self.remove_native(&target, username).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
KeyringProviderBackend::Subprocess => {
|
|
||||||
Err(Error::RemoveUnsupported(self.backend.clone()))
|
|
||||||
}
|
|
||||||
#[cfg(test)]
|
|
||||||
KeyringProviderBackend::Dummy(_) => Err(Error::RemoveUnsupported(self.backend.clone())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove credentials from the system keyring for the given `service_name`/`username`
|
|
||||||
/// pair.
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
async fn remove_native(
|
|
||||||
&self,
|
|
||||||
service_name: &str,
|
|
||||||
username: &str,
|
|
||||||
) -> Result<(), uv_keyring::Error> {
|
|
||||||
let prefixed_service = format!("{UV_SERVICE_PREFIX}{service_name}");
|
|
||||||
let entry = uv_keyring::Entry::new(&prefixed_service, username)?;
|
|
||||||
entry.delete_credential().await?;
|
|
||||||
trace!("Removed credentials for {username}@{service_name} from system keyring");
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch credentials for the given [`Url`] from the keyring.
|
/// Fetch credentials for the given [`Url`] from the keyring.
|
||||||
///
|
///
|
||||||
/// Returns [`None`] if no password was found for the username or if any errors
|
/// Returns [`None`] if no password was found for the username or if any errors
|
||||||
|
|
@ -193,11 +40,11 @@ impl KeyringProvider {
|
||||||
// Validate the request
|
// Validate the request
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
url.host_str().is_some(),
|
url.host_str().is_some(),
|
||||||
"Should only use keyring for URLs with host"
|
"Should only use keyring for urls with host"
|
||||||
);
|
);
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
url.password().is_none(),
|
url.password().is_none(),
|
||||||
"Should only use keyring for URLs without a password"
|
"Should only use keyring for urls without a password"
|
||||||
);
|
);
|
||||||
debug_assert!(
|
debug_assert!(
|
||||||
!username.map(str::is_empty).unwrap_or(false),
|
!username.map(str::is_empty).unwrap_or(false),
|
||||||
|
|
@ -208,7 +55,6 @@ impl KeyringProvider {
|
||||||
// <https://github.com/pypa/pip/blob/ae5fff36b0aad6e5e0037884927eaa29163c0611/src/pip/_internal/network/auth.py#L376C1-L379C14>
|
// <https://github.com/pypa/pip/blob/ae5fff36b0aad6e5e0037884927eaa29163c0611/src/pip/_internal/network/auth.py#L376C1-L379C14>
|
||||||
trace!("Checking keyring for URL {url}");
|
trace!("Checking keyring for URL {url}");
|
||||||
let mut credentials = match self.backend {
|
let mut credentials = match self.backend {
|
||||||
KeyringProviderBackend::Native => self.fetch_native(url.as_str(), username).await,
|
|
||||||
KeyringProviderBackend::Subprocess => {
|
KeyringProviderBackend::Subprocess => {
|
||||||
self.fetch_subprocess(url.as_str(), username).await
|
self.fetch_subprocess(url.as_str(), username).await
|
||||||
}
|
}
|
||||||
|
|
@ -226,7 +72,6 @@ impl KeyringProvider {
|
||||||
};
|
};
|
||||||
trace!("Checking keyring for host {host}");
|
trace!("Checking keyring for host {host}");
|
||||||
credentials = match self.backend {
|
credentials = match self.backend {
|
||||||
KeyringProviderBackend::Native => self.fetch_native(&host, username).await,
|
|
||||||
KeyringProviderBackend::Subprocess => self.fetch_subprocess(&host, username).await,
|
KeyringProviderBackend::Subprocess => self.fetch_subprocess(&host, username).await,
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
KeyringProviderBackend::Dummy(ref store) => {
|
KeyringProviderBackend::Dummy(ref store) => {
|
||||||
|
|
@ -320,7 +165,7 @@ impl KeyringProvider {
|
||||||
// N.B. We do not show the `service_name` here because we'll show the warning twice
|
// N.B. We do not show the `service_name` here because we'll show the warning twice
|
||||||
// otherwise, once for the URL and once for the realm.
|
// otherwise, once for the URL and once for the realm.
|
||||||
warn_user_once!(
|
warn_user_once!(
|
||||||
"Attempted to fetch credentials using the `keyring` command, but it does not support `--mode creds`; upgrade to `keyring>=v25.2.1` or provide a username"
|
"Attempted to fetch credentials using the `keyring` command, but it does not support `--mode creds`; upgrade to `keyring>=v25.2.1` for support or provide a username"
|
||||||
);
|
);
|
||||||
} else if username.is_none() {
|
} else if username.is_none() {
|
||||||
// If we captured stderr, display it in case it's helpful to the user
|
// If we captured stderr, display it in case it's helpful to the user
|
||||||
|
|
@ -330,31 +175,6 @@ impl KeyringProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(self))]
|
|
||||||
async fn fetch_native(
|
|
||||||
&self,
|
|
||||||
service: &str,
|
|
||||||
username: Option<&str>,
|
|
||||||
) -> Option<(String, String)> {
|
|
||||||
let prefixed_service = format!("{UV_SERVICE_PREFIX}{service}");
|
|
||||||
let username = username?;
|
|
||||||
let Ok(entry) = uv_keyring::Entry::new(&prefixed_service, username) else {
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
match entry.get_password().await {
|
|
||||||
Ok(password) => return Some((username.to_string(), password)),
|
|
||||||
Err(uv_keyring::Error::NoEntry) => {
|
|
||||||
debug!("No entry found in system keyring for {service}");
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
warn_user_once!(
|
|
||||||
"Unable to fetch credentials for {service} from system keyring: {err}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn fetch_dummy(
|
fn fetch_dummy(
|
||||||
store: &Vec<(String, &'static str, &'static str)>,
|
store: &Vec<(String, &'static str, &'static str)>,
|
||||||
|
|
@ -404,13 +224,12 @@ mod tests {
|
||||||
let url = Url::parse("file:/etc/bin/").unwrap();
|
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||||
let keyring = KeyringProvider::empty();
|
let keyring = KeyringProvider::empty();
|
||||||
// Panics due to debug assertion; returns `None` in production
|
// Panics due to debug assertion; returns `None` in production
|
||||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user"));
|
let result = std::panic::AssertUnwindSafe(
|
||||||
if cfg!(debug_assertions) {
|
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user")),
|
||||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
)
|
||||||
assert!(result.is_err());
|
.catch_unwind()
|
||||||
} else {
|
.await;
|
||||||
assert_eq!(fetch.await, None);
|
assert!(result.is_err());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
@ -418,13 +237,12 @@ mod tests {
|
||||||
let url = Url::parse("https://user:password@example.com").unwrap();
|
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||||
let keyring = KeyringProvider::empty();
|
let keyring = KeyringProvider::empty();
|
||||||
// Panics due to debug assertion; returns `None` in production
|
// Panics due to debug assertion; returns `None` in production
|
||||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
let result = std::panic::AssertUnwindSafe(
|
||||||
if cfg!(debug_assertions) {
|
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
)
|
||||||
assert!(result.is_err());
|
.catch_unwind()
|
||||||
} else {
|
.await;
|
||||||
assert_eq!(fetch.await, None);
|
assert!(result.is_err());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
@ -432,13 +250,12 @@ mod tests {
|
||||||
let url = Url::parse("https://example.com").unwrap();
|
let url = Url::parse("https://example.com").unwrap();
|
||||||
let keyring = KeyringProvider::empty();
|
let keyring = KeyringProvider::empty();
|
||||||
// Panics due to debug assertion; returns `None` in production
|
// Panics due to debug assertion; returns `None` in production
|
||||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
let result = std::panic::AssertUnwindSafe(
|
||||||
if cfg!(debug_assertions) {
|
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
)
|
||||||
assert!(result.is_err());
|
.catch_unwind()
|
||||||
} else {
|
.await;
|
||||||
assert_eq!(fetch.await, None);
|
assert!(result.is_err());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,48 @@
|
||||||
pub use access_token::AccessToken;
|
use std::sync::{Arc, LazyLock};
|
||||||
pub use cache::CredentialsCache;
|
|
||||||
pub use credentials::{Credentials, Username};
|
use tracing::trace;
|
||||||
|
|
||||||
|
use cache::CredentialsCache;
|
||||||
|
pub use credentials::Credentials;
|
||||||
pub use index::{AuthPolicy, Index, Indexes};
|
pub use index::{AuthPolicy, Index, Indexes};
|
||||||
pub use keyring::KeyringProvider;
|
pub use keyring::KeyringProvider;
|
||||||
pub use middleware::AuthMiddleware;
|
pub use middleware::AuthMiddleware;
|
||||||
pub use pyx::{
|
use realm::Realm;
|
||||||
DEFAULT_TOLERANCE_SECS, PyxJwt, PyxOAuthTokens, PyxTokenStore, PyxTokens, TokenStoreError,
|
use uv_redacted::DisplaySafeUrl;
|
||||||
};
|
|
||||||
pub use realm::{Realm, RealmRef};
|
|
||||||
pub use service::{Service, ServiceParseError};
|
|
||||||
pub use store::{AuthBackend, AuthScheme, TextCredentialStore, TomlCredentialError};
|
|
||||||
|
|
||||||
mod access_token;
|
|
||||||
mod cache;
|
mod cache;
|
||||||
mod credentials;
|
mod credentials;
|
||||||
mod index;
|
mod index;
|
||||||
mod keyring;
|
mod keyring;
|
||||||
mod middleware;
|
mod middleware;
|
||||||
mod providers;
|
mod providers;
|
||||||
mod pyx;
|
|
||||||
mod realm;
|
mod realm;
|
||||||
mod service;
|
|
||||||
mod store;
|
// TODO(zanieb): Consider passing a cache explicitly throughout
|
||||||
|
|
||||||
|
/// Global authentication cache for a uv invocation
|
||||||
|
///
|
||||||
|
/// This is used to share credentials across uv clients.
|
||||||
|
pub(crate) static CREDENTIALS_CACHE: LazyLock<CredentialsCache> =
|
||||||
|
LazyLock::new(CredentialsCache::default);
|
||||||
|
|
||||||
|
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||||
|
///
|
||||||
|
/// Returns `true` if the store was updated.
|
||||||
|
pub fn store_credentials_from_url(url: &DisplaySafeUrl) -> bool {
|
||||||
|
if let Some(credentials) = Credentials::from_url(url) {
|
||||||
|
trace!("Caching credentials for {url}");
|
||||||
|
CREDENTIALS_CACHE.insert(url, Arc::new(credentials));
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||||
|
///
|
||||||
|
/// Returns `true` if the store was updated.
|
||||||
|
pub fn store_credentials(url: &DisplaySafeUrl, credentials: Arc<Credentials>) {
|
||||||
|
trace!("Caching credentials for {url}");
|
||||||
|
CREDENTIALS_CACHE.insert(url, credentials);
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,30 +4,18 @@ use anyhow::{anyhow, format_err};
|
||||||
use http::{Extensions, StatusCode};
|
use http::{Extensions, StatusCode};
|
||||||
use netrc::Netrc;
|
use netrc::Netrc;
|
||||||
use reqwest::{Request, Response};
|
use reqwest::{Request, Response};
|
||||||
use reqwest_middleware::{ClientWithMiddleware, Error, Middleware, Next};
|
use reqwest_middleware::{Error, Middleware, Next};
|
||||||
use tokio::sync::Mutex;
|
|
||||||
use tracing::{debug, trace, warn};
|
use tracing::{debug, trace, warn};
|
||||||
|
|
||||||
use uv_preview::{Preview, PreviewFeatures};
|
use crate::providers::HuggingFaceProvider;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
use uv_static::EnvVars;
|
|
||||||
use uv_warnings::owo_colors::OwoColorize;
|
|
||||||
|
|
||||||
use crate::credentials::Authentication;
|
|
||||||
use crate::providers::{HuggingFaceProvider, S3EndpointProvider};
|
|
||||||
use crate::pyx::{DEFAULT_TOLERANCE_SECS, PyxTokenStore};
|
|
||||||
use crate::{
|
use crate::{
|
||||||
AccessToken, CredentialsCache, KeyringProvider,
|
CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||||
cache::FetchUrl,
|
cache::FetchUrl,
|
||||||
credentials::{Credentials, Username},
|
credentials::{Credentials, Username},
|
||||||
index::{AuthPolicy, Indexes},
|
index::{AuthPolicy, Indexes},
|
||||||
realm::Realm,
|
realm::Realm,
|
||||||
};
|
};
|
||||||
use crate::{Index, TextCredentialStore};
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
/// Cached check for whether we're running in Dependabot.
|
|
||||||
static IS_DEPENDABOT: LazyLock<bool> =
|
|
||||||
LazyLock::new(|| std::env::var(EnvVars::DEPENDABOT).is_ok_and(|value| value == "true"));
|
|
||||||
|
|
||||||
/// Strategy for loading netrc files.
|
/// Strategy for loading netrc files.
|
||||||
enum NetrcMode {
|
enum NetrcMode {
|
||||||
|
|
@ -63,128 +51,29 @@ impl NetrcMode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Strategy for loading text-based credential files.
|
|
||||||
enum TextStoreMode {
|
|
||||||
Automatic(tokio::sync::OnceCell<Option<TextCredentialStore>>),
|
|
||||||
Enabled(TextCredentialStore),
|
|
||||||
Disabled,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for TextStoreMode {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::Automatic(tokio::sync::OnceCell::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TextStoreMode {
|
|
||||||
async fn load_default_store() -> Option<TextCredentialStore> {
|
|
||||||
let path = TextCredentialStore::default_file()
|
|
||||||
.inspect_err(|err| {
|
|
||||||
warn!("Failed to determine credentials file path: {}", err);
|
|
||||||
})
|
|
||||||
.ok()?;
|
|
||||||
|
|
||||||
match TextCredentialStore::read(&path).await {
|
|
||||||
Ok((store, _lock)) => {
|
|
||||||
debug!("Loaded credential file {}", path.display());
|
|
||||||
Some(store)
|
|
||||||
}
|
|
||||||
Err(err)
|
|
||||||
if err
|
|
||||||
.as_io_error()
|
|
||||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
|
||||||
{
|
|
||||||
debug!("No credentials file found at {}", path.display());
|
|
||||||
None
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
warn!(
|
|
||||||
"Failed to load credentials from {}: {}",
|
|
||||||
path.display(),
|
|
||||||
err
|
|
||||||
);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the parsed credential store, if enabled.
|
|
||||||
async fn get(&self) -> Option<&TextCredentialStore> {
|
|
||||||
match self {
|
|
||||||
// TODO(zanieb): Reconsider this pattern. We're just mirroring the [`NetrcMode`]
|
|
||||||
// implementation for now.
|
|
||||||
Self::Automatic(lock) => lock.get_or_init(Self::load_default_store).await.as_ref(),
|
|
||||||
Self::Enabled(store) => Some(store),
|
|
||||||
Self::Disabled => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
enum TokenState {
|
|
||||||
/// The token state has not yet been initialized from the store.
|
|
||||||
Uninitialized,
|
|
||||||
/// The token state has been initialized, and the store either returned tokens or `None` if
|
|
||||||
/// the user has not yet authenticated.
|
|
||||||
Initialized(Option<AccessToken>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
enum S3CredentialState {
|
|
||||||
/// The S3 credential state has not yet been initialized.
|
|
||||||
Uninitialized,
|
|
||||||
/// The S3 credential state has been initialized, with either a signer or `None` if
|
|
||||||
/// no S3 endpoint is configured.
|
|
||||||
Initialized(Option<Arc<Authentication>>),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A middleware that adds basic authentication to requests.
|
/// A middleware that adds basic authentication to requests.
|
||||||
///
|
///
|
||||||
/// Uses a cache to propagate credentials from previously seen requests and
|
/// Uses a cache to propagate credentials from previously seen requests and
|
||||||
/// fetches credentials from a netrc file, TOML file, and the keyring.
|
/// fetches credentials from a netrc file and the keyring.
|
||||||
pub struct AuthMiddleware {
|
pub struct AuthMiddleware {
|
||||||
netrc: NetrcMode,
|
netrc: NetrcMode,
|
||||||
text_store: TextStoreMode,
|
|
||||||
keyring: Option<KeyringProvider>,
|
keyring: Option<KeyringProvider>,
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
cache: Option<CredentialsCache>,
|
||||||
cache: Arc<CredentialsCache>,
|
|
||||||
/// Auth policies for specific URLs.
|
/// Auth policies for specific URLs.
|
||||||
indexes: Indexes,
|
indexes: Indexes,
|
||||||
/// Set all endpoints as needing authentication. We never try to send an
|
/// Set all endpoints as needing authentication. We never try to send an
|
||||||
/// unauthenticated request, avoiding cloning an uncloneable request.
|
/// unauthenticated request, avoiding cloning an uncloneable request.
|
||||||
only_authenticated: bool,
|
only_authenticated: bool,
|
||||||
/// The base client to use for requests within the middleware.
|
|
||||||
base_client: Option<ClientWithMiddleware>,
|
|
||||||
/// The pyx token store to use for persistent credentials.
|
|
||||||
pyx_token_store: Option<PyxTokenStore>,
|
|
||||||
/// Tokens to use for persistent credentials.
|
|
||||||
pyx_token_state: Mutex<TokenState>,
|
|
||||||
/// Cached S3 credentials to avoid running the credential helper multiple times.
|
|
||||||
s3_credential_state: Mutex<S3CredentialState>,
|
|
||||||
preview: Preview,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for AuthMiddleware {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AuthMiddleware {
|
impl AuthMiddleware {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
netrc: NetrcMode::default(),
|
netrc: NetrcMode::default(),
|
||||||
text_store: TextStoreMode::default(),
|
|
||||||
keyring: None,
|
keyring: None,
|
||||||
// TODO(konsti): There shouldn't be a credential cache without that in the initializer.
|
cache: None,
|
||||||
cache: Arc::new(CredentialsCache::default()),
|
|
||||||
indexes: Indexes::new(),
|
indexes: Indexes::new(),
|
||||||
only_authenticated: false,
|
only_authenticated: false,
|
||||||
base_client: None,
|
|
||||||
pyx_token_store: None,
|
|
||||||
pyx_token_state: Mutex::new(TokenState::Uninitialized),
|
|
||||||
s3_credential_state: Mutex::new(S3CredentialState::Uninitialized),
|
|
||||||
preview: Preview::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -201,19 +90,6 @@ impl AuthMiddleware {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configure the text credential store to use.
|
|
||||||
///
|
|
||||||
/// `None` disables authentication via text store.
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_text_store(mut self, store: Option<TextCredentialStore>) -> Self {
|
|
||||||
self.text_store = if let Some(store) = store {
|
|
||||||
TextStoreMode::Enabled(store)
|
|
||||||
} else {
|
|
||||||
TextStoreMode::Disabled
|
|
||||||
};
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configure the [`KeyringProvider`] to use.
|
/// Configure the [`KeyringProvider`] to use.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn with_keyring(mut self, keyring: Option<KeyringProvider>) -> Self {
|
pub fn with_keyring(mut self, keyring: Option<KeyringProvider>) -> Self {
|
||||||
|
|
@ -221,24 +97,10 @@ impl AuthMiddleware {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configure the [`Preview`] features to use.
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_preview(mut self, preview: Preview) -> Self {
|
|
||||||
self.preview = preview;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configure the [`CredentialsCache`] to use.
|
/// Configure the [`CredentialsCache`] to use.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn with_cache(mut self, cache: CredentialsCache) -> Self {
|
pub fn with_cache(mut self, cache: CredentialsCache) -> Self {
|
||||||
self.cache = Arc::new(cache);
|
self.cache = Some(cache);
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configure the [`CredentialsCache`] to use from an existing [`Arc`].
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_cache_arc(mut self, cache: Arc<CredentialsCache>) -> Self {
|
|
||||||
self.cache = cache;
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -257,23 +119,17 @@ impl AuthMiddleware {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Configure the [`ClientWithMiddleware`] to use for requests within the middleware.
|
/// Get the configured authentication store.
|
||||||
#[must_use]
|
///
|
||||||
pub fn with_base_client(mut self, client: ClientWithMiddleware) -> Self {
|
/// If not set, the global store is used.
|
||||||
self.base_client = Some(client);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Configure the [`PyxTokenStore`] to use for persistent credentials.
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_pyx_token_store(mut self, token_store: PyxTokenStore) -> Self {
|
|
||||||
self.pyx_token_store = Some(token_store);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
|
||||||
fn cache(&self) -> &CredentialsCache {
|
fn cache(&self) -> &CredentialsCache {
|
||||||
&self.cache
|
self.cache.as_ref().unwrap_or(&CREDENTIALS_CACHE)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for AuthMiddleware {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -322,16 +178,16 @@ impl Middleware for AuthMiddleware {
|
||||||
next: Next<'_>,
|
next: Next<'_>,
|
||||||
) -> reqwest_middleware::Result<Response> {
|
) -> reqwest_middleware::Result<Response> {
|
||||||
// Check for credentials attached to the request already
|
// Check for credentials attached to the request already
|
||||||
let request_credentials = Credentials::from_request(&request).map(Authentication::from);
|
let request_credentials = Credentials::from_request(&request);
|
||||||
|
|
||||||
// In the middleware, existing credentials are already moved from the URL
|
// In the middleware, existing credentials are already moved from the URL
|
||||||
// to the headers so for display purposes we restore some information
|
// to the headers so for display purposes we restore some information
|
||||||
let url = tracing_url(&request, request_credentials.as_ref());
|
let url = tracing_url(&request, request_credentials.as_ref());
|
||||||
let index = self.indexes.index_for(request.url());
|
let maybe_index_url = self.indexes.index_url_for(request.url());
|
||||||
let auth_policy = self.indexes.auth_policy_for(request.url());
|
let auth_policy = self.indexes.auth_policy_for(request.url());
|
||||||
trace!("Handling request for {url} with authentication policy {auth_policy}");
|
trace!("Handling request for {url} with authentication policy {auth_policy}");
|
||||||
|
|
||||||
let credentials: Option<Arc<Authentication>> = if matches!(auth_policy, AuthPolicy::Never) {
|
let credentials: Option<Arc<Credentials>> = if matches!(auth_policy, AuthPolicy::Never) {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
if let Some(request_credentials) = request_credentials {
|
if let Some(request_credentials) = request_credentials {
|
||||||
|
|
@ -342,7 +198,7 @@ impl Middleware for AuthMiddleware {
|
||||||
extensions,
|
extensions,
|
||||||
next,
|
next,
|
||||||
&url,
|
&url,
|
||||||
index,
|
maybe_index_url,
|
||||||
auth_policy,
|
auth_policy,
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
@ -355,10 +211,10 @@ impl Middleware for AuthMiddleware {
|
||||||
// making a failing request
|
// making a failing request
|
||||||
let credentials = self.cache().get_url(request.url(), &Username::none());
|
let credentials = self.cache().get_url(request.url(), &Username::none());
|
||||||
if let Some(credentials) = credentials.as_ref() {
|
if let Some(credentials) = credentials.as_ref() {
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
|
|
||||||
// If it's fully authenticated, finish the request
|
// If it's fully authenticated, finish the request
|
||||||
if credentials.is_authenticated() {
|
if credentials.password().is_some() {
|
||||||
trace!("Request for {url} is fully authenticated");
|
trace!("Request for {url} is fully authenticated");
|
||||||
return self
|
return self
|
||||||
.complete_request(None, request, extensions, next, auth_policy)
|
.complete_request(None, request, extensions, next, auth_policy)
|
||||||
|
|
@ -375,24 +231,9 @@ impl Middleware for AuthMiddleware {
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.is_some_and(|credentials| credentials.username().is_some());
|
.is_some_and(|credentials| credentials.username().is_some());
|
||||||
|
|
||||||
// Determine whether this is a "known" URL.
|
let retry_unauthenticated =
|
||||||
let is_known_url = self
|
!self.only_authenticated && !matches!(auth_policy, AuthPolicy::Always);
|
||||||
.pyx_token_store
|
let (mut retry_request, response) = if retry_unauthenticated {
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|token_store| token_store.is_known_url(request.url()));
|
|
||||||
|
|
||||||
let must_authenticate = self.only_authenticated
|
|
||||||
|| (match auth_policy {
|
|
||||||
AuthPolicy::Auto => is_known_url,
|
|
||||||
AuthPolicy::Always => true,
|
|
||||||
AuthPolicy::Never => false,
|
|
||||||
}
|
|
||||||
// Dependabot intercepts HTTP requests and injects credentials, which means that we
|
|
||||||
// cannot eagerly enforce an `AuthPolicy` as we don't know whether credentials will be
|
|
||||||
// added outside of uv.
|
|
||||||
&& !*IS_DEPENDABOT);
|
|
||||||
|
|
||||||
let (mut retry_request, response) = if !must_authenticate {
|
|
||||||
let url = tracing_url(&request, credentials.as_deref());
|
let url = tracing_url(&request, credentials.as_deref());
|
||||||
if credentials.is_none() {
|
if credentials.is_none() {
|
||||||
trace!("Attempting unauthenticated request for {url}");
|
trace!("Attempting unauthenticated request for {url}");
|
||||||
|
|
@ -440,8 +281,8 @@ impl Middleware for AuthMiddleware {
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|credentials| credentials.to_username())
|
.map(|credentials| credentials.to_username())
|
||||||
.unwrap_or(Username::none());
|
.unwrap_or(Username::none());
|
||||||
let credentials = if let Some(index) = index {
|
let credentials = if let Some(index_url) = maybe_index_url {
|
||||||
self.cache().get_url(&index.url, &username).or_else(|| {
|
self.cache().get_url(index_url, &username).or_else(|| {
|
||||||
self.cache()
|
self.cache()
|
||||||
.get_realm(Realm::from(&**retry_request_url), username)
|
.get_realm(Realm::from(&**retry_request_url), username)
|
||||||
})
|
})
|
||||||
|
|
@ -454,9 +295,9 @@ impl Middleware for AuthMiddleware {
|
||||||
.or(credentials);
|
.or(credentials);
|
||||||
|
|
||||||
if let Some(credentials) = credentials.as_ref() {
|
if let Some(credentials) = credentials.as_ref() {
|
||||||
if credentials.is_authenticated() {
|
if credentials.password().is_some() {
|
||||||
trace!("Retrying request for {url} with credentials from cache {credentials:?}");
|
trace!("Retrying request for {url} with credentials from cache {credentials:?}");
|
||||||
retry_request = credentials.authenticate(retry_request).await;
|
retry_request = credentials.authenticate(retry_request);
|
||||||
return self
|
return self
|
||||||
.complete_request(None, retry_request, extensions, next, auth_policy)
|
.complete_request(None, retry_request, extensions, next, auth_policy)
|
||||||
.await;
|
.await;
|
||||||
|
|
@ -469,12 +310,12 @@ impl Middleware for AuthMiddleware {
|
||||||
.fetch_credentials(
|
.fetch_credentials(
|
||||||
credentials.as_deref(),
|
credentials.as_deref(),
|
||||||
retry_request_url,
|
retry_request_url,
|
||||||
index,
|
maybe_index_url,
|
||||||
auth_policy,
|
auth_policy,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
retry_request = credentials.authenticate(retry_request).await;
|
retry_request = credentials.authenticate(retry_request);
|
||||||
trace!("Retrying request for {url} with {credentials:?}");
|
trace!("Retrying request for {url} with {credentials:?}");
|
||||||
return self
|
return self
|
||||||
.complete_request(
|
.complete_request(
|
||||||
|
|
@ -490,7 +331,7 @@ impl Middleware for AuthMiddleware {
|
||||||
if let Some(credentials) = credentials.as_ref() {
|
if let Some(credentials) = credentials.as_ref() {
|
||||||
if !attempt_has_username {
|
if !attempt_has_username {
|
||||||
trace!("Retrying request for {url} with username from cache {credentials:?}");
|
trace!("Retrying request for {url} with username from cache {credentials:?}");
|
||||||
retry_request = credentials.authenticate(retry_request).await;
|
retry_request = credentials.authenticate(retry_request);
|
||||||
return self
|
return self
|
||||||
.complete_request(None, retry_request, extensions, next, auth_policy)
|
.complete_request(None, retry_request, extensions, next, auth_policy)
|
||||||
.await;
|
.await;
|
||||||
|
|
@ -499,19 +340,6 @@ impl Middleware for AuthMiddleware {
|
||||||
|
|
||||||
if let Some(response) = response {
|
if let Some(response) = response {
|
||||||
Ok(response)
|
Ok(response)
|
||||||
} else if let Some(store) = is_known_url
|
|
||||||
.then_some(self.pyx_token_store.as_ref())
|
|
||||||
.flatten()
|
|
||||||
{
|
|
||||||
let domain = store
|
|
||||||
.api()
|
|
||||||
.domain()
|
|
||||||
.unwrap_or("pyx.dev")
|
|
||||||
.trim_start_matches("api.");
|
|
||||||
Err(Error::Middleware(format_err!(
|
|
||||||
"Run `{}` to authenticate uv with pyx",
|
|
||||||
format!("uv auth login {domain}").green()
|
|
||||||
)))
|
|
||||||
} else {
|
} else {
|
||||||
Err(Error::Middleware(format_err!(
|
Err(Error::Middleware(format_err!(
|
||||||
"Missing credentials for {url}"
|
"Missing credentials for {url}"
|
||||||
|
|
@ -526,7 +354,7 @@ impl AuthMiddleware {
|
||||||
/// If credentials are present, insert them into the cache on success.
|
/// If credentials are present, insert them into the cache on success.
|
||||||
async fn complete_request(
|
async fn complete_request(
|
||||||
&self,
|
&self,
|
||||||
credentials: Option<Arc<Authentication>>,
|
credentials: Option<Arc<Credentials>>,
|
||||||
request: Request,
|
request: Request,
|
||||||
extensions: &mut Extensions,
|
extensions: &mut Extensions,
|
||||||
next: Next<'_>,
|
next: Next<'_>,
|
||||||
|
|
@ -536,7 +364,7 @@ impl AuthMiddleware {
|
||||||
// Nothing to insert into the cache if we don't have credentials
|
// Nothing to insert into the cache if we don't have credentials
|
||||||
return next.run(request, extensions).await;
|
return next.run(request, extensions).await;
|
||||||
};
|
};
|
||||||
let url = DisplaySafeUrl::from_url(request.url().clone());
|
let url = DisplaySafeUrl::from(request.url().clone());
|
||||||
if matches!(auth_policy, AuthPolicy::Always) && credentials.password().is_none() {
|
if matches!(auth_policy, AuthPolicy::Always) && credentials.password().is_none() {
|
||||||
return Err(Error::Middleware(format_err!("Missing password for {url}")));
|
return Err(Error::Middleware(format_err!("Missing password for {url}")));
|
||||||
}
|
}
|
||||||
|
|
@ -547,7 +375,6 @@ impl AuthMiddleware {
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.is_ok_and(|response| response.error_for_status_ref().is_ok())
|
.is_ok_and(|response| response.error_for_status_ref().is_ok())
|
||||||
{
|
{
|
||||||
// TODO(zanieb): Consider also updating the system keyring after successful use
|
|
||||||
trace!("Updating cached credentials for {url} to {credentials:?}");
|
trace!("Updating cached credentials for {url} to {credentials:?}");
|
||||||
self.cache().insert(&url, credentials);
|
self.cache().insert(&url, credentials);
|
||||||
}
|
}
|
||||||
|
|
@ -558,18 +385,18 @@ impl AuthMiddleware {
|
||||||
/// Use known request credentials to complete the request.
|
/// Use known request credentials to complete the request.
|
||||||
async fn complete_request_with_request_credentials(
|
async fn complete_request_with_request_credentials(
|
||||||
&self,
|
&self,
|
||||||
credentials: Authentication,
|
credentials: Credentials,
|
||||||
mut request: Request,
|
mut request: Request,
|
||||||
extensions: &mut Extensions,
|
extensions: &mut Extensions,
|
||||||
next: Next<'_>,
|
next: Next<'_>,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
index: Option<&Index>,
|
index_url: Option<&DisplaySafeUrl>,
|
||||||
auth_policy: AuthPolicy,
|
auth_policy: AuthPolicy,
|
||||||
) -> reqwest_middleware::Result<Response> {
|
) -> reqwest_middleware::Result<Response> {
|
||||||
let credentials = Arc::new(credentials);
|
let credentials = Arc::new(credentials);
|
||||||
|
|
||||||
// If there's a password, send the request and cache
|
// If there's a password, send the request and cache
|
||||||
if credentials.is_authenticated() {
|
if credentials.password().is_some() {
|
||||||
trace!("Request for {url} already contains username and password");
|
trace!("Request for {url} already contains username and password");
|
||||||
return self
|
return self
|
||||||
.complete_request(Some(credentials), request, extensions, next, auth_policy)
|
.complete_request(Some(credentials), request, extensions, next, auth_policy)
|
||||||
|
|
@ -579,21 +406,17 @@ impl AuthMiddleware {
|
||||||
trace!("Request for {url} is missing a password, looking for credentials");
|
trace!("Request for {url} is missing a password, looking for credentials");
|
||||||
|
|
||||||
// There's just a username, try to find a password.
|
// There's just a username, try to find a password.
|
||||||
// If we have an index, check the cache for that URL. Otherwise,
|
// If we have an index URL, check the cache for that URL. Otherwise,
|
||||||
// check for the realm.
|
// check for the realm.
|
||||||
let maybe_cached_credentials = if let Some(index) = index {
|
let maybe_cached_credentials = if let Some(index_url) = index_url {
|
||||||
self.cache()
|
self.cache()
|
||||||
.get_url(&index.url, credentials.as_username().as_ref())
|
.get_url(index_url, credentials.as_username().as_ref())
|
||||||
.or_else(|| {
|
|
||||||
self.cache()
|
|
||||||
.get_url(&index.root_url, credentials.as_username().as_ref())
|
|
||||||
})
|
|
||||||
} else {
|
} else {
|
||||||
self.cache()
|
self.cache()
|
||||||
.get_realm(Realm::from(request.url()), credentials.to_username())
|
.get_realm(Realm::from(request.url()), credentials.to_username())
|
||||||
};
|
};
|
||||||
if let Some(credentials) = maybe_cached_credentials {
|
if let Some(credentials) = maybe_cached_credentials {
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
// Do not insert already-cached credentials
|
// Do not insert already-cached credentials
|
||||||
let credentials = None;
|
let credentials = None;
|
||||||
return self
|
return self
|
||||||
|
|
@ -605,27 +428,27 @@ impl AuthMiddleware {
|
||||||
.cache()
|
.cache()
|
||||||
.get_url(request.url(), credentials.as_username().as_ref())
|
.get_url(request.url(), credentials.as_username().as_ref())
|
||||||
{
|
{
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
// Do not insert already-cached credentials
|
// Do not insert already-cached credentials
|
||||||
None
|
None
|
||||||
} else if let Some(credentials) = self
|
} else if let Some(credentials) = self
|
||||||
.fetch_credentials(
|
.fetch_credentials(
|
||||||
Some(&credentials),
|
Some(&credentials),
|
||||||
DisplaySafeUrl::ref_cast(request.url()),
|
DisplaySafeUrl::ref_cast(request.url()),
|
||||||
index,
|
index_url,
|
||||||
auth_policy,
|
auth_policy,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
} else if index.is_some() {
|
} else if index_url.is_some() {
|
||||||
// If this is a known index, we fall back to checking for the realm.
|
// If this is a known index, we fall back to checking for the realm.
|
||||||
if let Some(credentials) = self
|
if let Some(credentials) = self
|
||||||
.cache()
|
.cache()
|
||||||
.get_realm(Realm::from(request.url()), credentials.to_username())
|
.get_realm(Realm::from(request.url()), credentials.to_username())
|
||||||
{
|
{
|
||||||
request = credentials.authenticate(request).await;
|
request = credentials.authenticate(request);
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
} else {
|
} else {
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
|
|
@ -644,19 +467,19 @@ impl AuthMiddleware {
|
||||||
/// Supports netrc file and keyring lookups.
|
/// Supports netrc file and keyring lookups.
|
||||||
async fn fetch_credentials(
|
async fn fetch_credentials(
|
||||||
&self,
|
&self,
|
||||||
credentials: Option<&Authentication>,
|
credentials: Option<&Credentials>,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
index: Option<&Index>,
|
maybe_index_url: Option<&DisplaySafeUrl>,
|
||||||
auth_policy: AuthPolicy,
|
auth_policy: AuthPolicy,
|
||||||
) -> Option<Arc<Authentication>> {
|
) -> Option<Arc<Credentials>> {
|
||||||
let username = Username::from(
|
let username = Username::from(
|
||||||
credentials.map(|credentials| credentials.username().unwrap_or_default().to_string()),
|
credentials.map(|credentials| credentials.username().unwrap_or_default().to_string()),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Fetches can be expensive, so we will only run them _once_ per realm or index URL and username combination
|
// Fetches can be expensive, so we will only run them _once_ per realm or index URL and username combination
|
||||||
// All other requests for the same realm or index URL will wait until the first one completes
|
// All other requests for the same realm or index URL will wait until the first one completes
|
||||||
let key = if let Some(index) = index {
|
let key = if let Some(index_url) = maybe_index_url {
|
||||||
(FetchUrl::Index(index.url.clone()), username)
|
(FetchUrl::Index(index_url.clone()), username)
|
||||||
} else {
|
} else {
|
||||||
(FetchUrl::Realm(Realm::from(&**url)), username)
|
(FetchUrl::Realm(Realm::from(&**url)), username)
|
||||||
};
|
};
|
||||||
|
|
@ -680,78 +503,13 @@ impl AuthMiddleware {
|
||||||
return credentials;
|
return credentials;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Support for known providers, like Hugging Face and S3.
|
// Support for known providers, like Hugging Face.
|
||||||
if let Some(credentials) = HuggingFaceProvider::credentials_for(url)
|
if let Some(credentials) = HuggingFaceProvider::credentials_for(url).map(Arc::new) {
|
||||||
.map(Authentication::from)
|
|
||||||
.map(Arc::new)
|
|
||||||
{
|
|
||||||
debug!("Found Hugging Face credentials for {url}");
|
debug!("Found Hugging Face credentials for {url}");
|
||||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||||
return Some(credentials);
|
return Some(credentials);
|
||||||
}
|
}
|
||||||
|
|
||||||
if S3EndpointProvider::is_s3_endpoint(url, self.preview) {
|
|
||||||
let mut s3_state = self.s3_credential_state.lock().await;
|
|
||||||
|
|
||||||
// If the S3 credential state is uninitialized, initialize it.
|
|
||||||
let credentials = match &*s3_state {
|
|
||||||
S3CredentialState::Uninitialized => {
|
|
||||||
trace!("Initializing S3 credentials for {url}");
|
|
||||||
let signer = S3EndpointProvider::create_signer();
|
|
||||||
let credentials = Arc::new(Authentication::from(signer));
|
|
||||||
*s3_state = S3CredentialState::Initialized(Some(credentials.clone()));
|
|
||||||
Some(credentials)
|
|
||||||
}
|
|
||||||
S3CredentialState::Initialized(credentials) => credentials.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(credentials) = credentials {
|
|
||||||
debug!("Found S3 credentials for {url}");
|
|
||||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
|
||||||
return Some(credentials);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this is a known URL, authenticate it via the token store.
|
|
||||||
if let Some(base_client) = self.base_client.as_ref() {
|
|
||||||
if let Some(token_store) = self.pyx_token_store.as_ref() {
|
|
||||||
if token_store.is_known_url(url) {
|
|
||||||
let mut token_state = self.pyx_token_state.lock().await;
|
|
||||||
|
|
||||||
// If the token store is uninitialized, initialize it.
|
|
||||||
let token = match *token_state {
|
|
||||||
TokenState::Uninitialized => {
|
|
||||||
trace!("Initializing token store for {url}");
|
|
||||||
let generated = match token_store
|
|
||||||
.access_token(base_client, DEFAULT_TOLERANCE_SECS)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(Some(token)) => Some(token),
|
|
||||||
Ok(None) => None,
|
|
||||||
Err(err) => {
|
|
||||||
warn!("Failed to generate access tokens: {err}");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
*token_state = TokenState::Initialized(generated.clone());
|
|
||||||
generated
|
|
||||||
}
|
|
||||||
TokenState::Initialized(ref tokens) => tokens.clone(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let credentials = token.map(|token| {
|
|
||||||
trace!("Using credentials from token store for {url}");
|
|
||||||
Arc::new(Authentication::from(Credentials::from(token)))
|
|
||||||
});
|
|
||||||
|
|
||||||
// Register the fetch for this key
|
|
||||||
self.cache().fetches.done(key.clone(), credentials.clone());
|
|
||||||
|
|
||||||
return credentials;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Netrc support based on: <https://github.com/gribouille/netrc>.
|
// Netrc support based on: <https://github.com/gribouille/netrc>.
|
||||||
let credentials = if let Some(credentials) = self.netrc.get().and_then(|netrc| {
|
let credentials = if let Some(credentials) = self.netrc.get().and_then(|netrc| {
|
||||||
debug!("Checking netrc for credentials for {url}");
|
debug!("Checking netrc for credentials for {url}");
|
||||||
|
|
@ -766,51 +524,6 @@ impl AuthMiddleware {
|
||||||
debug!("Found credentials in netrc file for {url}");
|
debug!("Found credentials in netrc file for {url}");
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
|
|
||||||
// Text credential store support.
|
|
||||||
} else if let Some(credentials) = self.text_store.get().await.and_then(|text_store| {
|
|
||||||
debug!("Checking text store for credentials for {url}");
|
|
||||||
text_store
|
|
||||||
.get_credentials(
|
|
||||||
url,
|
|
||||||
credentials
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|credentials| credentials.username()),
|
|
||||||
)
|
|
||||||
.cloned()
|
|
||||||
}) {
|
|
||||||
debug!("Found credentials in plaintext store for {url}");
|
|
||||||
Some(credentials)
|
|
||||||
} else if let Some(credentials) = {
|
|
||||||
if self.preview.is_enabled(PreviewFeatures::NATIVE_AUTH) {
|
|
||||||
let native_store = KeyringProvider::native();
|
|
||||||
let username = credentials.and_then(|credentials| credentials.username());
|
|
||||||
let display_username = if let Some(username) = username {
|
|
||||||
format!("{username}@")
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
};
|
|
||||||
if let Some(index) = index {
|
|
||||||
// N.B. The native store performs an exact look up right now, so we use the root
|
|
||||||
// URL of the index instead of relying on prefix-matching.
|
|
||||||
debug!(
|
|
||||||
"Checking native store for credentials for index URL {}{}",
|
|
||||||
display_username, index.root_url
|
|
||||||
);
|
|
||||||
native_store.fetch(&index.root_url, username).await
|
|
||||||
} else {
|
|
||||||
debug!(
|
|
||||||
"Checking native store for credentials for URL {}{}",
|
|
||||||
display_username, url
|
|
||||||
);
|
|
||||||
native_store.fetch(url, username).await
|
|
||||||
}
|
|
||||||
// TODO(zanieb): We should have a realm fallback here too
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} {
|
|
||||||
debug!("Found credentials in native store for {url}");
|
|
||||||
Some(credentials)
|
|
||||||
// N.B. The keyring provider performs lookups for the exact URL then falls back to the host.
|
// N.B. The keyring provider performs lookups for the exact URL then falls back to the host.
|
||||||
// But, in the absence of an index URL, we cache the result per realm. So in that case,
|
// But, in the absence of an index URL, we cache the result per realm. So in that case,
|
||||||
// if a keyring implementation returns different credentials for different URLs in the
|
// if a keyring implementation returns different credentials for different URLs in the
|
||||||
|
|
@ -821,37 +534,24 @@ impl AuthMiddleware {
|
||||||
// URLs; instead, we fetch if there's a username or if the user has requested to
|
// URLs; instead, we fetch if there's a username or if the user has requested to
|
||||||
// always authenticate.
|
// always authenticate.
|
||||||
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
|
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
|
||||||
if let Some(index) = index {
|
if let Some(index_url) = maybe_index_url {
|
||||||
debug!(
|
debug!("Checking keyring for credentials for index URL {}@{}", username, index_url);
|
||||||
"Checking keyring for credentials for index URL {}@{}",
|
keyring.fetch(DisplaySafeUrl::ref_cast(index_url), Some(username)).await
|
||||||
username, index.url
|
|
||||||
);
|
|
||||||
keyring
|
|
||||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), Some(username))
|
|
||||||
.await
|
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
debug!("Checking keyring for credentials for full URL {}@{}", username, url);
|
||||||
"Checking keyring for credentials for full URL {}@{}",
|
|
||||||
username, url
|
|
||||||
);
|
|
||||||
keyring.fetch(url, Some(username)).await
|
keyring.fetch(url, Some(username)).await
|
||||||
}
|
}
|
||||||
} else if matches!(auth_policy, AuthPolicy::Always) {
|
} else if matches!(auth_policy, AuthPolicy::Always) {
|
||||||
if let Some(index) = index {
|
if let Some(index_url) = maybe_index_url {
|
||||||
debug!(
|
debug!(
|
||||||
"Checking keyring for credentials for index URL {} without username due to `authenticate = always`",
|
"Checking keyring for credentials for index URL {index_url} without username due to `authenticate = always`"
|
||||||
index.url
|
|
||||||
);
|
);
|
||||||
keyring
|
keyring.fetch(DisplaySafeUrl::ref_cast(index_url), None).await
|
||||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), None)
|
|
||||||
.await
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
debug!("Skipping keyring fetch for {url} without username; use `authenticate = always` to force");
|
||||||
"Skipping keyring fetch for {url} without username; use `authenticate = always` to force"
|
|
||||||
);
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -861,9 +561,8 @@ impl AuthMiddleware {
|
||||||
Some(credentials)
|
Some(credentials)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
}
|
||||||
|
.map(Arc::new);
|
||||||
let credentials = credentials.map(Authentication::from).map(Arc::new);
|
|
||||||
|
|
||||||
// Register the fetch for this key
|
// Register the fetch for this key
|
||||||
self.cache().fetches.done(key, credentials.clone());
|
self.cache().fetches.done(key, credentials.clone());
|
||||||
|
|
@ -872,9 +571,9 @@ impl AuthMiddleware {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tracing_url(request: &Request, credentials: Option<&Authentication>) -> DisplaySafeUrl {
|
fn tracing_url(request: &Request, credentials: Option<&Credentials>) -> DisplaySafeUrl {
|
||||||
let mut url = DisplaySafeUrl::from_url(request.url().clone());
|
let mut url = DisplaySafeUrl::from(request.url().clone());
|
||||||
if let Some(Authentication::Credentials(creds)) = credentials {
|
if let Some(creds) = credentials {
|
||||||
if let Some(username) = creds.username() {
|
if let Some(username) = creds.username() {
|
||||||
let _ = url.set_username(username);
|
let _ = url.set_username(username);
|
||||||
}
|
}
|
||||||
|
|
@ -1015,10 +714,10 @@ mod tests {
|
||||||
let cache = CredentialsCache::new();
|
let cache = CredentialsCache::new();
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url,
|
&base_url,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username.to_string()),
|
Some(username.to_string()),
|
||||||
Some(password.to_string()),
|
Some(password.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -1069,10 +768,7 @@ mod tests {
|
||||||
let cache = CredentialsCache::new();
|
let cache = CredentialsCache::new();
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url,
|
&base_url,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(Some(username.to_string()), None)),
|
||||||
Some(username.to_string()),
|
|
||||||
None,
|
|
||||||
))),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -1465,10 +1161,7 @@ mod tests {
|
||||||
// URL.
|
// URL.
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url,
|
&base_url,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(Some(username.to_string()), None)),
|
||||||
Some(username.to_string()),
|
|
||||||
None,
|
|
||||||
))),
|
|
||||||
);
|
);
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
.with(AuthMiddleware::new().with_cache(cache).with_keyring(Some(
|
.with(AuthMiddleware::new().with_cache(cache).with_keyring(Some(
|
||||||
|
|
@ -1517,17 +1210,17 @@ mod tests {
|
||||||
// Seed the cache with our credentials
|
// Seed the cache with our credentials
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_1,
|
&base_url_1,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_1.to_string()),
|
Some(username_1.to_string()),
|
||||||
Some(password_1.to_string()),
|
Some(password_1.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_2,
|
&base_url_2,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_2.to_string()),
|
Some(username_2.to_string()),
|
||||||
Some(password_2.to_string()),
|
Some(password_2.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -1712,17 +1405,17 @@ mod tests {
|
||||||
// Seed the cache with our credentials
|
// Seed the cache with our credentials
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_1,
|
&base_url_1,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_1.to_string()),
|
Some(username_1.to_string()),
|
||||||
Some(password_1.to_string()),
|
Some(password_1.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
cache.insert(
|
cache.insert(
|
||||||
&base_url_2,
|
&base_url_2,
|
||||||
Arc::new(Authentication::from(Credentials::basic(
|
Arc::new(Credentials::basic(
|
||||||
Some(username_2.to_string()),
|
Some(username_2.to_string()),
|
||||||
Some(password_2.to_string()),
|
Some(password_2.to_string()),
|
||||||
))),
|
)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let client = test_client_builder()
|
let client = test_client_builder()
|
||||||
|
|
@ -2062,13 +1755,13 @@ mod tests {
|
||||||
let base_url_2 = base_url.join("prefix_2")?;
|
let base_url_2 = base_url.join("prefix_2")?;
|
||||||
let indexes = Indexes::from_indexes(vec![
|
let indexes = Indexes::from_indexes(vec![
|
||||||
Index {
|
Index {
|
||||||
url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||||
root_url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
root_url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||||
auth_policy: AuthPolicy::Auto,
|
auth_policy: AuthPolicy::Auto,
|
||||||
},
|
},
|
||||||
Index {
|
Index {
|
||||||
url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||||
root_url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
root_url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||||
auth_policy: AuthPolicy::Auto,
|
auth_policy: AuthPolicy::Auto,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
|
|
@ -2170,8 +1863,8 @@ mod tests {
|
||||||
let base_url = Url::parse(&server.uri())?;
|
let base_url = Url::parse(&server.uri())?;
|
||||||
let index_url = base_url.join("prefix_1")?;
|
let index_url = base_url.join("prefix_1")?;
|
||||||
let indexes = Indexes::from_indexes(vec![Index {
|
let indexes = Indexes::from_indexes(vec![Index {
|
||||||
url: DisplaySafeUrl::from_url(index_url.clone()),
|
url: DisplaySafeUrl::from(index_url.clone()),
|
||||||
root_url: DisplaySafeUrl::from_url(index_url.clone()),
|
root_url: DisplaySafeUrl::from(index_url.clone()),
|
||||||
auth_policy: AuthPolicy::Auto,
|
auth_policy: AuthPolicy::Auto,
|
||||||
}]);
|
}]);
|
||||||
|
|
||||||
|
|
@ -2225,7 +1918,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn indexes_for(url: &Url, policy: AuthPolicy) -> Indexes {
|
fn indexes_for(url: &Url, policy: AuthPolicy) -> Indexes {
|
||||||
let mut url = DisplaySafeUrl::from_url(url.clone());
|
let mut url = DisplaySafeUrl::from(url.clone());
|
||||||
url.set_password(None).ok();
|
url.set_password(None).ok();
|
||||||
url.set_username("").ok();
|
url.set_username("").ok();
|
||||||
Indexes::from_indexes(vec![Index {
|
Indexes::from_indexes(vec![Index {
|
||||||
|
|
@ -2326,7 +2019,7 @@ mod tests {
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
client.get(server.uri()).send().await,
|
client.get(server.uri()).send().await,
|
||||||
Err(reqwest_middleware::Error::Middleware(_))
|
Err(reqwest_middleware::Error::Middleware(_))
|
||||||
));
|
),);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -2425,20 +2118,20 @@ mod tests {
|
||||||
DisplaySafeUrl::parse("https://pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
DisplaySafeUrl::parse("https://pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
let creds = Authentication::from(Credentials::Basic {
|
let creds = Credentials::Basic {
|
||||||
username: Username::new(Some(String::from("user"))),
|
username: Username::new(Some(String::from("user"))),
|
||||||
password: None,
|
password: None,
|
||||||
});
|
};
|
||||||
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tracing_url(&req, Some(&creds)),
|
tracing_url(&req, Some(&creds)),
|
||||||
DisplaySafeUrl::parse("https://user@pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
DisplaySafeUrl::parse("https://user@pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
let creds = Authentication::from(Credentials::Basic {
|
let creds = Credentials::Basic {
|
||||||
username: Username::new(Some(String::from("user"))),
|
username: Username::new(Some(String::from("user"))),
|
||||||
password: Some(Password::new(String::from("password"))),
|
password: Some(Password::new(String::from("password"))),
|
||||||
});
|
};
|
||||||
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
tracing_url(&req, Some(&creds)),
|
tracing_url(&req, Some(&creds)),
|
||||||
|
|
@ -2447,132 +2140,6 @@ mod tests {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test(tokio::test)]
|
|
||||||
async fn test_text_store_basic_auth() -> Result<(), Error> {
|
|
||||||
let username = "user";
|
|
||||||
let password = "password";
|
|
||||||
|
|
||||||
let server = start_test_server(username, password).await;
|
|
||||||
let base_url = Url::parse(&server.uri())?;
|
|
||||||
|
|
||||||
// Create a text credential store with matching credentials
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
let service = crate::Service::try_from(base_url.to_string()).unwrap();
|
|
||||||
let credentials =
|
|
||||||
Credentials::basic(Some(username.to_string()), Some(password.to_string()));
|
|
||||||
store.insert(service.clone(), credentials);
|
|
||||||
|
|
||||||
let client = test_client_builder()
|
|
||||||
.with(
|
|
||||||
AuthMiddleware::new()
|
|
||||||
.with_cache(CredentialsCache::new())
|
|
||||||
.with_text_store(Some(store)),
|
|
||||||
)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
client.get(server.uri()).send().await?.status(),
|
|
||||||
200,
|
|
||||||
"Credentials should be pulled from the text store"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test(tokio::test)]
|
|
||||||
async fn test_text_store_disabled() -> Result<(), Error> {
|
|
||||||
let username = "user";
|
|
||||||
let password = "password";
|
|
||||||
let server = start_test_server(username, password).await;
|
|
||||||
|
|
||||||
let client = test_client_builder()
|
|
||||||
.with(
|
|
||||||
AuthMiddleware::new()
|
|
||||||
.with_cache(CredentialsCache::new())
|
|
||||||
.with_text_store(None), // Explicitly disable text store
|
|
||||||
)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
client.get(server.uri()).send().await?.status(),
|
|
||||||
401,
|
|
||||||
"Credentials should not be found when text store is disabled"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test(tokio::test)]
|
|
||||||
async fn test_text_store_by_username() -> Result<(), Error> {
|
|
||||||
let username = "testuser";
|
|
||||||
let password = "testpass";
|
|
||||||
let wrong_username = "wronguser";
|
|
||||||
|
|
||||||
let server = start_test_server(username, password).await;
|
|
||||||
let base_url = Url::parse(&server.uri())?;
|
|
||||||
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
let service = crate::Service::try_from(base_url.to_string()).unwrap();
|
|
||||||
let credentials =
|
|
||||||
crate::Credentials::basic(Some(username.to_string()), Some(password.to_string()));
|
|
||||||
store.insert(service.clone(), credentials);
|
|
||||||
|
|
||||||
let client = test_client_builder()
|
|
||||||
.with(
|
|
||||||
AuthMiddleware::new()
|
|
||||||
.with_cache(CredentialsCache::new())
|
|
||||||
.with_text_store(Some(store)),
|
|
||||||
)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// Request with matching username should succeed
|
|
||||||
let url_with_username = format!(
|
|
||||||
"{}://{}@{}",
|
|
||||||
base_url.scheme(),
|
|
||||||
username,
|
|
||||||
base_url.host_str().unwrap()
|
|
||||||
);
|
|
||||||
let url_with_port = if let Some(port) = base_url.port() {
|
|
||||||
format!("{}:{}{}", url_with_username, port, base_url.path())
|
|
||||||
} else {
|
|
||||||
format!("{}{}", url_with_username, base_url.path())
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
client.get(&url_with_port).send().await?.status(),
|
|
||||||
200,
|
|
||||||
"Request with matching username should succeed"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Request with non-matching username should fail
|
|
||||||
let url_with_wrong_username = format!(
|
|
||||||
"{}://{}@{}",
|
|
||||||
base_url.scheme(),
|
|
||||||
wrong_username,
|
|
||||||
base_url.host_str().unwrap()
|
|
||||||
);
|
|
||||||
let url_with_port = if let Some(port) = base_url.port() {
|
|
||||||
format!("{}:{}{}", url_with_wrong_username, port, base_url.path())
|
|
||||||
} else {
|
|
||||||
format!("{}{}", url_with_wrong_username, base_url.path())
|
|
||||||
};
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
client.get(&url_with_port).send().await?.status(),
|
|
||||||
401,
|
|
||||||
"Request with non-matching username should fail"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Request without username should succeed
|
|
||||||
assert_eq!(
|
|
||||||
client.get(server.uri()).send().await?.status(),
|
|
||||||
200,
|
|
||||||
"Request with no username should succeed"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_request(url: &str) -> Request {
|
fn create_request(url: &str) -> Request {
|
||||||
Request::new(Method::GET, Url::parse(url).unwrap())
|
Request::new(Method::GET, Url::parse(url).unwrap())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,10 @@
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::sync::LazyLock;
|
use std::sync::LazyLock;
|
||||||
|
|
||||||
use reqsign::aws::DefaultSigner;
|
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_preview::{Preview, PreviewFeatures};
|
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
use uv_warnings::warn_user_once;
|
|
||||||
|
|
||||||
use crate::Credentials;
|
use crate::Credentials;
|
||||||
use crate::credentials::Token;
|
|
||||||
use crate::realm::{Realm, RealmRef};
|
use crate::realm::{Realm, RealmRef};
|
||||||
|
|
||||||
/// The [`Realm`] for the Hugging Face platform.
|
/// The [`Realm`] for the Hugging Face platform.
|
||||||
|
|
@ -46,59 +40,10 @@ impl HuggingFaceProvider {
|
||||||
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
||||||
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
||||||
return Some(Credentials::Bearer {
|
return Some(Credentials::Bearer {
|
||||||
token: Token::new(token.clone()),
|
token: token.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The [`Url`] for the S3 endpoint, if set.
|
|
||||||
static S3_ENDPOINT_REALM: LazyLock<Option<Realm>> = LazyLock::new(|| {
|
|
||||||
let s3_endpoint_url = std::env::var(EnvVars::UV_S3_ENDPOINT_URL).ok()?;
|
|
||||||
let url = Url::parse(&s3_endpoint_url).expect("Failed to parse S3 endpoint URL");
|
|
||||||
Some(Realm::from(&url))
|
|
||||||
});
|
|
||||||
|
|
||||||
/// A provider for authentication credentials for S3 endpoints.
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
|
||||||
pub(crate) struct S3EndpointProvider;
|
|
||||||
|
|
||||||
impl S3EndpointProvider {
|
|
||||||
/// Returns `true` if the URL matches the configured S3 endpoint.
|
|
||||||
pub(crate) fn is_s3_endpoint(url: &Url, preview: Preview) -> bool {
|
|
||||||
if let Some(s3_endpoint_realm) = S3_ENDPOINT_REALM.as_ref().map(RealmRef::from) {
|
|
||||||
if !preview.is_enabled(PreviewFeatures::S3_ENDPOINT) {
|
|
||||||
warn_user_once!(
|
|
||||||
"The `s3-endpoint` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
|
|
||||||
PreviewFeatures::S3_ENDPOINT
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Treat any URL on the same domain or subdomain as available for S3 signing.
|
|
||||||
let realm = RealmRef::from(url);
|
|
||||||
if realm == s3_endpoint_realm || realm.is_subdomain_of(s3_endpoint_realm) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates a new S3 signer with the configured region.
|
|
||||||
///
|
|
||||||
/// This is potentially expensive as it may invoke credential helpers, so the result
|
|
||||||
/// should be cached.
|
|
||||||
pub(crate) fn create_signer() -> DefaultSigner {
|
|
||||||
// TODO(charlie): Can `reqsign` infer the region for us? Profiles, for example,
|
|
||||||
// often have a region set already.
|
|
||||||
let region = std::env::var(EnvVars::AWS_REGION)
|
|
||||||
.map(Cow::Owned)
|
|
||||||
.unwrap_or_else(|_| {
|
|
||||||
std::env::var(EnvVars::AWS_DEFAULT_REGION)
|
|
||||||
.map(Cow::Owned)
|
|
||||||
.unwrap_or_else(|_| Cow::Borrowed("us-east-1"))
|
|
||||||
});
|
|
||||||
reqsign::aws::default_signer("s3", ®ion)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -1,737 +0,0 @@
|
||||||
use std::io;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use base64::Engine;
|
|
||||||
use base64::prelude::BASE64_URL_SAFE_NO_PAD;
|
|
||||||
use etcetera::BaseStrategy;
|
|
||||||
use reqwest_middleware::ClientWithMiddleware;
|
|
||||||
use tracing::debug;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use uv_cache_key::CanonicalUrl;
|
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
|
||||||
use uv_small_str::SmallString;
|
|
||||||
use uv_state::{StateBucket, StateStore};
|
|
||||||
use uv_static::EnvVars;
|
|
||||||
|
|
||||||
use crate::credentials::Token;
|
|
||||||
use crate::{AccessToken, Credentials, Realm};
|
|
||||||
|
|
||||||
/// Retrieve the pyx API key from the environment variable, or return `None`.
|
|
||||||
fn read_pyx_api_key() -> Option<String> {
|
|
||||||
std::env::var(EnvVars::PYX_API_KEY)
|
|
||||||
.ok()
|
|
||||||
.or_else(|| std::env::var(EnvVars::UV_API_KEY).ok())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Retrieve the pyx authentication token (JWT) from the environment variable, or return `None`.
|
|
||||||
fn read_pyx_auth_token() -> Option<AccessToken> {
|
|
||||||
std::env::var(EnvVars::PYX_AUTH_TOKEN)
|
|
||||||
.ok()
|
|
||||||
.or_else(|| std::env::var(EnvVars::UV_AUTH_TOKEN).ok())
|
|
||||||
.map(AccessToken::from)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An access token with an accompanying refresh token.
|
|
||||||
///
|
|
||||||
/// Refresh tokens are single-use tokens that can be exchanged for a renewed access token
|
|
||||||
/// and a new refresh token.
|
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
|
||||||
pub struct PyxOAuthTokens {
|
|
||||||
pub access_token: AccessToken,
|
|
||||||
pub refresh_token: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An access token with an accompanying API key.
|
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
|
||||||
pub struct PyxApiKeyTokens {
|
|
||||||
pub access_token: AccessToken,
|
|
||||||
pub api_key: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
|
||||||
pub enum PyxTokens {
|
|
||||||
/// An access token with an accompanying refresh token.
|
|
||||||
///
|
|
||||||
/// Refresh tokens are single-use tokens that can be exchanged for a renewed access token
|
|
||||||
/// and a new refresh token.
|
|
||||||
OAuth(PyxOAuthTokens),
|
|
||||||
/// An access token with an accompanying API key.
|
|
||||||
///
|
|
||||||
/// API keys are long-lived tokens that can be exchanged for an access token.
|
|
||||||
ApiKey(PyxApiKeyTokens),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PyxTokens> for AccessToken {
|
|
||||||
fn from(tokens: PyxTokens) -> Self {
|
|
||||||
match tokens {
|
|
||||||
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token,
|
|
||||||
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PyxTokens> for Credentials {
|
|
||||||
fn from(tokens: PyxTokens) -> Self {
|
|
||||||
let access_token = match tokens {
|
|
||||||
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token,
|
|
||||||
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token,
|
|
||||||
};
|
|
||||||
Self::from(access_token)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<AccessToken> for Credentials {
|
|
||||||
fn from(access_token: AccessToken) -> Self {
|
|
||||||
Self::Bearer {
|
|
||||||
token: Token::new(access_token.into_bytes()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The default tolerance for the access token expiration.
|
|
||||||
pub const DEFAULT_TOLERANCE_SECS: u64 = 60 * 5;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
struct PyxDirectories {
|
|
||||||
/// The root directory for the token store (e.g., `/Users/ferris/.local/share/pyx/credentials`).
|
|
||||||
root: PathBuf,
|
|
||||||
/// The subdirectory for the token store (e.g., `/Users/ferris/.local/share/uv/credentials/3859a629b26fda96`).
|
|
||||||
subdirectory: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PyxDirectories {
|
|
||||||
/// Detect the [`PyxDirectories`] for a given API URL.
|
|
||||||
fn from_api(api: &DisplaySafeUrl) -> Result<Self, io::Error> {
|
|
||||||
// Store credentials in a subdirectory based on the API URL.
|
|
||||||
let digest = uv_cache_key::cache_digest(&CanonicalUrl::new(api));
|
|
||||||
|
|
||||||
// If the user explicitly set `PYX_CREDENTIALS_DIR`, use that.
|
|
||||||
if let Some(root) = std::env::var_os(EnvVars::PYX_CREDENTIALS_DIR) {
|
|
||||||
let root = std::path::absolute(root)?;
|
|
||||||
let subdirectory = root.join(&digest);
|
|
||||||
return Ok(Self { root, subdirectory });
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the user has pyx credentials in their uv credentials directory, read them for
|
|
||||||
// backwards compatibility.
|
|
||||||
let root = if let Some(tool_dir) = std::env::var_os(EnvVars::UV_CREDENTIALS_DIR) {
|
|
||||||
std::path::absolute(tool_dir)?
|
|
||||||
} else {
|
|
||||||
StateStore::from_settings(None)?.bucket(StateBucket::Credentials)
|
|
||||||
};
|
|
||||||
let subdirectory = root.join(&digest);
|
|
||||||
if subdirectory.exists() {
|
|
||||||
return Ok(Self { root, subdirectory });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, use (e.g.) `~/.local/share/pyx`.
|
|
||||||
let Ok(xdg) = etcetera::base_strategy::choose_base_strategy() else {
|
|
||||||
return Err(io::Error::new(
|
|
||||||
io::ErrorKind::NotFound,
|
|
||||||
"Could not determine user data directory",
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
let root = xdg.data_dir().join("pyx").join("credentials");
|
|
||||||
let subdirectory = root.join(&digest);
|
|
||||||
Ok(Self { root, subdirectory })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct PyxTokenStore {
|
|
||||||
/// The root directory for the token store (e.g., `/Users/ferris/.local/share/pyx/credentials`).
|
|
||||||
root: PathBuf,
|
|
||||||
/// The subdirectory for the token store (e.g., `/Users/ferris/.local/share/uv/credentials/3859a629b26fda96`).
|
|
||||||
subdirectory: PathBuf,
|
|
||||||
/// The API URL for the token store (e.g., `https://api.pyx.dev`).
|
|
||||||
api: DisplaySafeUrl,
|
|
||||||
/// The CDN domain for the token store (e.g., `astralhosted.com`).
|
|
||||||
cdn: SmallString,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PyxTokenStore {
|
|
||||||
/// Create a new [`PyxTokenStore`] from settings.
|
|
||||||
pub fn from_settings() -> Result<Self, TokenStoreError> {
|
|
||||||
// Read the API URL and CDN domain from the environment variables, or fallback to the
|
|
||||||
// defaults.
|
|
||||||
let api = if let Ok(api_url) = std::env::var(EnvVars::PYX_API_URL) {
|
|
||||||
DisplaySafeUrl::parse(&api_url)
|
|
||||||
} else {
|
|
||||||
DisplaySafeUrl::parse("https://api.pyx.dev")
|
|
||||||
}?;
|
|
||||||
let cdn = std::env::var(EnvVars::PYX_CDN_DOMAIN)
|
|
||||||
.ok()
|
|
||||||
.map(SmallString::from)
|
|
||||||
.unwrap_or_else(|| SmallString::from(arcstr::literal!("astralhosted.com")));
|
|
||||||
|
|
||||||
// Determine the root directory for the token store.
|
|
||||||
let PyxDirectories { root, subdirectory } = PyxDirectories::from_api(&api)?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
root,
|
|
||||||
subdirectory,
|
|
||||||
api,
|
|
||||||
cdn,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the root directory for the token store.
|
|
||||||
pub fn root(&self) -> &Path {
|
|
||||||
&self.root
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the API URL for the token store.
|
|
||||||
pub fn api(&self) -> &DisplaySafeUrl {
|
|
||||||
&self.api
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get or initialize an [`AccessToken`] from the store.
|
|
||||||
///
|
|
||||||
/// If an access token is set in the environment, it will be returned as-is.
|
|
||||||
///
|
|
||||||
/// If an access token is present on-disk, it will be returned (and refreshed, if necessary).
|
|
||||||
///
|
|
||||||
/// If no access token is found, but an API key is present, the API key will be used to
|
|
||||||
/// bootstrap an access token.
|
|
||||||
pub async fn access_token(
|
|
||||||
&self,
|
|
||||||
client: &ClientWithMiddleware,
|
|
||||||
tolerance_secs: u64,
|
|
||||||
) -> Result<Option<AccessToken>, TokenStoreError> {
|
|
||||||
// If the access token is already set in the environment, return it.
|
|
||||||
if let Some(access_token) = read_pyx_auth_token() {
|
|
||||||
return Ok(Some(access_token));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the tokens from the store.
|
|
||||||
let tokens = self.init(client, tolerance_secs).await?;
|
|
||||||
|
|
||||||
// Extract the access token from the OAuth tokens or API key.
|
|
||||||
Ok(tokens.map(AccessToken::from))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize the [`PyxTokens`] from the store.
|
|
||||||
///
|
|
||||||
/// If an access token is already present, it will be returned (and refreshed, if necessary).
|
|
||||||
///
|
|
||||||
/// If no access token is found, but an API key is present, the API key will be used to
|
|
||||||
/// bootstrap an access token.
|
|
||||||
pub async fn init(
|
|
||||||
&self,
|
|
||||||
client: &ClientWithMiddleware,
|
|
||||||
tolerance_secs: u64,
|
|
||||||
) -> Result<Option<PyxTokens>, TokenStoreError> {
|
|
||||||
match self.read().await? {
|
|
||||||
Some(tokens) => {
|
|
||||||
// Refresh the tokens if they are expired.
|
|
||||||
let tokens = self.refresh(tokens, client, tolerance_secs).await?;
|
|
||||||
Ok(Some(tokens))
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
// If no tokens are present, bootstrap them from an API key.
|
|
||||||
self.bootstrap(client).await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write the tokens to the store.
|
|
||||||
pub async fn write(&self, tokens: &PyxTokens) -> Result<(), TokenStoreError> {
|
|
||||||
fs_err::tokio::create_dir_all(&self.subdirectory).await?;
|
|
||||||
match tokens {
|
|
||||||
PyxTokens::OAuth(tokens) => {
|
|
||||||
// Write OAuth tokens to a generic `tokens.json` file.
|
|
||||||
fs_err::tokio::write(
|
|
||||||
self.subdirectory.join("tokens.json"),
|
|
||||||
serde_json::to_vec(tokens)?,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
PyxTokens::ApiKey(tokens) => {
|
|
||||||
// Write API key tokens to a file based on the API key.
|
|
||||||
let digest = uv_cache_key::cache_digest(&tokens.api_key);
|
|
||||||
fs_err::tokio::write(
|
|
||||||
self.subdirectory.join(format!("{digest}.json")),
|
|
||||||
&tokens.access_token,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the user appears to have an authentication token set.
|
|
||||||
pub fn has_auth_token(&self) -> bool {
|
|
||||||
read_pyx_auth_token().is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the user appears to have an API key set.
|
|
||||||
pub fn has_api_key(&self) -> bool {
|
|
||||||
read_pyx_api_key().is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the user appears to have OAuth tokens stored on disk.
|
|
||||||
pub fn has_oauth_tokens(&self) -> bool {
|
|
||||||
self.subdirectory.join("tokens.json").is_file()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the user appears to have credentials (which may be invalid).
|
|
||||||
pub fn has_credentials(&self) -> bool {
|
|
||||||
self.has_auth_token() || self.has_api_key() || self.has_oauth_tokens()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read the tokens from the store.
|
|
||||||
pub async fn read(&self) -> Result<Option<PyxTokens>, TokenStoreError> {
|
|
||||||
if let Some(api_key) = read_pyx_api_key() {
|
|
||||||
// Read the API key tokens from a file based on the API key.
|
|
||||||
let digest = uv_cache_key::cache_digest(&api_key);
|
|
||||||
match fs_err::tokio::read(self.subdirectory.join(format!("{digest}.json"))).await {
|
|
||||||
Ok(data) => {
|
|
||||||
let access_token =
|
|
||||||
AccessToken::from(String::from_utf8(data).expect("Invalid UTF-8"));
|
|
||||||
Ok(Some(PyxTokens::ApiKey(PyxApiKeyTokens {
|
|
||||||
access_token,
|
|
||||||
api_key,
|
|
||||||
})))
|
|
||||||
}
|
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
|
|
||||||
Err(err) => Err(err.into()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
match fs_err::tokio::read(self.subdirectory.join("tokens.json")).await {
|
|
||||||
Ok(data) => {
|
|
||||||
let tokens: PyxOAuthTokens = serde_json::from_slice(&data)?;
|
|
||||||
Ok(Some(PyxTokens::OAuth(tokens)))
|
|
||||||
}
|
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
|
|
||||||
Err(err) => Err(err.into()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove the tokens from the store.
|
|
||||||
pub async fn delete(&self) -> Result<(), io::Error> {
|
|
||||||
fs_err::tokio::remove_dir_all(&self.subdirectory).await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Bootstrap the tokens from the store.
|
|
||||||
async fn bootstrap(
|
|
||||||
&self,
|
|
||||||
client: &ClientWithMiddleware,
|
|
||||||
) -> Result<Option<PyxTokens>, TokenStoreError> {
|
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
|
||||||
struct Payload {
|
|
||||||
access_token: AccessToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Retrieve the API key from the environment variable, if set.
|
|
||||||
let Some(api_key) = read_pyx_api_key() else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
debug!("Bootstrapping access token from an API key");
|
|
||||||
|
|
||||||
// Parse the API URL.
|
|
||||||
let mut url = self.api.clone();
|
|
||||||
url.set_path("auth/cli/access-token");
|
|
||||||
|
|
||||||
let mut request = reqwest::Request::new(reqwest::Method::POST, Url::from(url));
|
|
||||||
request.headers_mut().insert(
|
|
||||||
"Authorization",
|
|
||||||
reqwest::header::HeaderValue::from_str(&format!("Bearer {api_key}"))?,
|
|
||||||
);
|
|
||||||
|
|
||||||
let response = client.execute(request).await?;
|
|
||||||
let Payload { access_token } = response.error_for_status()?.json::<Payload>().await?;
|
|
||||||
let tokens = PyxTokens::ApiKey(PyxApiKeyTokens {
|
|
||||||
access_token,
|
|
||||||
api_key,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Write the tokens to disk.
|
|
||||||
self.write(&tokens).await?;
|
|
||||||
|
|
||||||
Ok(Some(tokens))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Refresh the tokens in the store, if they are expired.
|
|
||||||
///
|
|
||||||
/// In theory, we should _also_ refresh if we hit a 401; but for now, we only refresh ahead of
|
|
||||||
/// time.
|
|
||||||
async fn refresh(
|
|
||||||
&self,
|
|
||||||
tokens: PyxTokens,
|
|
||||||
client: &ClientWithMiddleware,
|
|
||||||
tolerance_secs: u64,
|
|
||||||
) -> Result<PyxTokens, TokenStoreError> {
|
|
||||||
// Decode the access token.
|
|
||||||
let jwt = PyxJwt::decode(match &tokens {
|
|
||||||
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token,
|
|
||||||
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// If the access token is expired, refresh it.
|
|
||||||
let is_up_to_date = match jwt.exp {
|
|
||||||
None => {
|
|
||||||
debug!("Access token has no expiration; refreshing...");
|
|
||||||
false
|
|
||||||
}
|
|
||||||
Some(..) if tolerance_secs == 0 => {
|
|
||||||
debug!("Refreshing access token due to zero tolerance...");
|
|
||||||
false
|
|
||||||
}
|
|
||||||
Some(jwt) => {
|
|
||||||
let exp = jiff::Timestamp::from_second(jwt)?;
|
|
||||||
let now = jiff::Timestamp::now();
|
|
||||||
if exp < now {
|
|
||||||
debug!("Access token is expired (`{exp}`); refreshing...");
|
|
||||||
false
|
|
||||||
} else if exp < now + Duration::from_secs(tolerance_secs) {
|
|
||||||
debug!(
|
|
||||||
"Access token will expire within the tolerance (`{exp}`); refreshing..."
|
|
||||||
);
|
|
||||||
false
|
|
||||||
} else {
|
|
||||||
debug!("Access token is up-to-date (`{exp}`)");
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if is_up_to_date {
|
|
||||||
return Ok(tokens);
|
|
||||||
}
|
|
||||||
|
|
||||||
let tokens = match tokens {
|
|
||||||
PyxTokens::OAuth(PyxOAuthTokens { refresh_token, .. }) => {
|
|
||||||
// Parse the API URL.
|
|
||||||
let mut url = self.api.clone();
|
|
||||||
url.set_path("auth/cli/refresh");
|
|
||||||
|
|
||||||
let mut request = reqwest::Request::new(reqwest::Method::POST, Url::from(url));
|
|
||||||
let body = serde_json::json!({
|
|
||||||
"refresh_token": refresh_token
|
|
||||||
});
|
|
||||||
*request.body_mut() = Some(body.to_string().into());
|
|
||||||
|
|
||||||
let response = client.execute(request).await?;
|
|
||||||
let tokens = response
|
|
||||||
.error_for_status()?
|
|
||||||
.json::<PyxOAuthTokens>()
|
|
||||||
.await?;
|
|
||||||
PyxTokens::OAuth(tokens)
|
|
||||||
}
|
|
||||||
PyxTokens::ApiKey(PyxApiKeyTokens { api_key, .. }) => {
|
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
|
||||||
struct Payload {
|
|
||||||
access_token: AccessToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse the API URL.
|
|
||||||
let mut url = self.api.clone();
|
|
||||||
url.set_path("auth/cli/access-token");
|
|
||||||
|
|
||||||
let mut request = reqwest::Request::new(reqwest::Method::POST, Url::from(url));
|
|
||||||
request.headers_mut().insert(
|
|
||||||
"Authorization",
|
|
||||||
reqwest::header::HeaderValue::from_str(&format!("Bearer {api_key}"))?,
|
|
||||||
);
|
|
||||||
|
|
||||||
let response = client.execute(request).await?;
|
|
||||||
let Payload { access_token } =
|
|
||||||
response.error_for_status()?.json::<Payload>().await?;
|
|
||||||
PyxTokens::ApiKey(PyxApiKeyTokens {
|
|
||||||
access_token,
|
|
||||||
api_key,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Write the new tokens to disk.
|
|
||||||
self.write(&tokens).await?;
|
|
||||||
Ok(tokens)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the given URL is "known" to this token store (i.e., should be
|
|
||||||
/// authenticated using the store's tokens).
|
|
||||||
pub fn is_known_url(&self, url: &Url) -> bool {
|
|
||||||
is_known_url(url, &self.api, &self.cdn)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the URL is on a "known" domain (i.e., the same domain as the API or CDN).
|
|
||||||
///
|
|
||||||
/// Like [`is_known_url`](Self::is_known_url), but also returns `true` if the API is on the
|
|
||||||
/// subdomain of the URL (e.g., if the API is `api.pyx.dev` and the URL is `pyx.dev`).
|
|
||||||
pub fn is_known_domain(&self, url: &Url) -> bool {
|
|
||||||
is_known_domain(url, &self.api, &self.cdn)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
pub enum TokenStoreError {
|
|
||||||
#[error(transparent)]
|
|
||||||
Url(#[from] DisplaySafeUrlError),
|
|
||||||
#[error(transparent)]
|
|
||||||
Io(#[from] io::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
Serialization(#[from] serde_json::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
Reqwest(#[from] reqwest::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
ReqwestMiddleware(#[from] reqwest_middleware::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
InvalidHeaderValue(#[from] reqwest::header::InvalidHeaderValue),
|
|
||||||
#[error(transparent)]
|
|
||||||
Jiff(#[from] jiff::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
Jwt(#[from] JwtError),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenStoreError {
|
|
||||||
/// Returns `true` if the error is a 401 (Unauthorized) error.
|
|
||||||
pub fn is_unauthorized(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
Self::Reqwest(err) => err.status() == Some(reqwest::StatusCode::UNAUTHORIZED),
|
|
||||||
Self::ReqwestMiddleware(err) => err.status() == Some(reqwest::StatusCode::UNAUTHORIZED),
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The payload of the JWT.
|
|
||||||
#[derive(Debug, serde::Deserialize)]
|
|
||||||
pub struct PyxJwt {
|
|
||||||
/// The expiration time of the JWT, as a Unix timestamp.
|
|
||||||
pub exp: Option<i64>,
|
|
||||||
/// The issuer of the JWT.
|
|
||||||
pub iss: Option<String>,
|
|
||||||
/// The name of the organization, if any.
|
|
||||||
#[serde(rename = "urn:pyx:org_name")]
|
|
||||||
pub name: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PyxJwt {
|
|
||||||
/// Decode the JWT from the access token.
|
|
||||||
pub fn decode(access_token: &AccessToken) -> Result<Self, JwtError> {
|
|
||||||
let mut token_segments = access_token.as_str().splitn(3, '.');
|
|
||||||
|
|
||||||
let _header = token_segments.next().ok_or(JwtError::MissingHeader)?;
|
|
||||||
let payload = token_segments.next().ok_or(JwtError::MissingPayload)?;
|
|
||||||
let _signature = token_segments.next().ok_or(JwtError::MissingSignature)?;
|
|
||||||
if token_segments.next().is_some() {
|
|
||||||
return Err(JwtError::TooManySegments);
|
|
||||||
}
|
|
||||||
|
|
||||||
let decoded = BASE64_URL_SAFE_NO_PAD.decode(payload)?;
|
|
||||||
|
|
||||||
let jwt = serde_json::from_slice::<Self>(&decoded)?;
|
|
||||||
Ok(jwt)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
pub enum JwtError {
|
|
||||||
#[error("JWT is missing a header")]
|
|
||||||
MissingHeader,
|
|
||||||
#[error("JWT is missing a payload")]
|
|
||||||
MissingPayload,
|
|
||||||
#[error("JWT is missing a signature")]
|
|
||||||
MissingSignature,
|
|
||||||
#[error("JWT has too many segments")]
|
|
||||||
TooManySegments,
|
|
||||||
#[error(transparent)]
|
|
||||||
Base64(#[from] base64::DecodeError),
|
|
||||||
#[error(transparent)]
|
|
||||||
Serde(#[from] serde_json::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_known_url(url: &Url, api: &DisplaySafeUrl, cdn: &str) -> bool {
|
|
||||||
// Determine whether the URL matches the API realm.
|
|
||||||
if Realm::from(url) == Realm::from(&**api) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine whether the URL matches the CDN domain (or a subdomain of it).
|
|
||||||
//
|
|
||||||
// For example, if URL is on `files.astralhosted.com` and the CDN domain is
|
|
||||||
// `astralhosted.com`, consider it known.
|
|
||||||
if matches!(url.scheme(), "https") && matches_domain(url, cdn) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_known_domain(url: &Url, api: &DisplaySafeUrl, cdn: &str) -> bool {
|
|
||||||
// Determine whether the URL matches the API domain.
|
|
||||||
if let Some(domain) = url.domain() {
|
|
||||||
if matches_domain(api, domain) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
is_known_url(url, api, cdn)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the target URL is on the given domain.
|
|
||||||
fn matches_domain(url: &Url, domain: &str) -> bool {
|
|
||||||
url.domain().is_some_and(|subdomain| {
|
|
||||||
subdomain == domain
|
|
||||||
|| subdomain
|
|
||||||
.strip_suffix(domain)
|
|
||||||
.is_some_and(|prefix| prefix.ends_with('.'))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_is_known_url() {
|
|
||||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
|
||||||
let cdn_domain = "astralhosted.com";
|
|
||||||
|
|
||||||
// Same realm as API.
|
|
||||||
assert!(is_known_url(
|
|
||||||
&Url::parse("https://api.pyx.dev/simple/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// Different path on same API domain
|
|
||||||
assert!(is_known_url(
|
|
||||||
&Url::parse("https://api.pyx.dev/v1/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// CDN domain.
|
|
||||||
assert!(is_known_url(
|
|
||||||
&Url::parse("https://astralhosted.com/packages/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// CDN subdomain.
|
|
||||||
assert!(is_known_url(
|
|
||||||
&Url::parse("https://files.astralhosted.com/packages/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// CDN on HTTP.
|
|
||||||
assert!(!is_known_url(
|
|
||||||
&Url::parse("http://astralhosted.com/packages/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// Unknown domain.
|
|
||||||
assert!(!is_known_url(
|
|
||||||
&Url::parse("https://pypi.org/simple/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// Similar but not matching domain.
|
|
||||||
assert!(!is_known_url(
|
|
||||||
&Url::parse("https://badastralhosted.com/packages/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_is_known_domain() {
|
|
||||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
|
||||||
let cdn_domain = "astralhosted.com";
|
|
||||||
|
|
||||||
// Same realm as API.
|
|
||||||
assert!(is_known_domain(
|
|
||||||
&Url::parse("https://api.pyx.dev/simple/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// API super-domain.
|
|
||||||
assert!(is_known_domain(
|
|
||||||
&Url::parse("https://pyx.dev").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// API subdomain.
|
|
||||||
assert!(!is_known_domain(
|
|
||||||
&Url::parse("https://foo.api.pyx.dev").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// Different subdomain.
|
|
||||||
assert!(!is_known_domain(
|
|
||||||
&Url::parse("https://beta.pyx.dev/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// CDN domain.
|
|
||||||
assert!(is_known_domain(
|
|
||||||
&Url::parse("https://astralhosted.com/packages/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// CDN subdomain.
|
|
||||||
assert!(is_known_domain(
|
|
||||||
&Url::parse("https://files.astralhosted.com/packages/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// Unknown domain.
|
|
||||||
assert!(!is_known_domain(
|
|
||||||
&Url::parse("https://pypi.org/simple/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
|
|
||||||
// Different TLD.
|
|
||||||
assert!(!is_known_domain(
|
|
||||||
&Url::parse("https://pyx.com/").unwrap(),
|
|
||||||
&api_url,
|
|
||||||
cdn_domain
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_matches_domain() {
|
|
||||||
assert!(matches_domain(
|
|
||||||
&Url::parse("https://example.com").unwrap(),
|
|
||||||
"example.com"
|
|
||||||
));
|
|
||||||
assert!(matches_domain(
|
|
||||||
&Url::parse("https://foo.example.com").unwrap(),
|
|
||||||
"example.com"
|
|
||||||
));
|
|
||||||
assert!(matches_domain(
|
|
||||||
&Url::parse("https://bar.foo.example.com").unwrap(),
|
|
||||||
"example.com"
|
|
||||||
));
|
|
||||||
|
|
||||||
assert!(!matches_domain(
|
|
||||||
&Url::parse("https://example.com").unwrap(),
|
|
||||||
"other.com"
|
|
||||||
));
|
|
||||||
assert!(!matches_domain(
|
|
||||||
&Url::parse("https://example.org").unwrap(),
|
|
||||||
"example.com"
|
|
||||||
));
|
|
||||||
assert!(!matches_domain(
|
|
||||||
&Url::parse("https://badexample.com").unwrap(),
|
|
||||||
"example.com"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
use std::{fmt::Display, fmt::Formatter};
|
use std::{fmt::Display, fmt::Formatter};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
use uv_small_str::SmallString;
|
use uv_small_str::SmallString;
|
||||||
|
|
||||||
/// Used to determine if authentication information should be retained on a new URL.
|
/// Used to determine if authentication information should be retained on a new URL.
|
||||||
|
|
@ -24,18 +23,12 @@ use uv_small_str::SmallString;
|
||||||
// However, `url` (and therefore `reqwest`) sets the `port` to `None` if it matches the default port
|
// However, `url` (and therefore `reqwest`) sets the `port` to `None` if it matches the default port
|
||||||
// so we do not need any special handling here.
|
// so we do not need any special handling here.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Realm {
|
pub(crate) struct Realm {
|
||||||
scheme: SmallString,
|
scheme: SmallString,
|
||||||
host: Option<SmallString>,
|
host: Option<SmallString>,
|
||||||
port: Option<u16>,
|
port: Option<u16>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&DisplaySafeUrl> for Realm {
|
|
||||||
fn from(url: &DisplaySafeUrl) -> Self {
|
|
||||||
Self::from(&**url)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Url> for Realm {
|
impl From<&Url> for Realm {
|
||||||
fn from(url: &Url) -> Self {
|
fn from(url: &Url) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -82,27 +75,12 @@ impl Hash for Realm {
|
||||||
|
|
||||||
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct RealmRef<'a> {
|
pub(crate) struct RealmRef<'a> {
|
||||||
scheme: &'a str,
|
scheme: &'a str,
|
||||||
host: Option<&'a str>,
|
host: Option<&'a str>,
|
||||||
port: Option<u16>,
|
port: Option<u16>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RealmRef<'_> {
|
|
||||||
/// Returns true if this realm is a subdomain of the other realm.
|
|
||||||
pub(crate) fn is_subdomain_of(&self, other: Self) -> bool {
|
|
||||||
other.scheme == self.scheme
|
|
||||||
&& other.port == self.port
|
|
||||||
&& other.host.is_some_and(|other_host| {
|
|
||||||
self.host.is_some_and(|self_host| {
|
|
||||||
self_host
|
|
||||||
.strip_suffix(other_host)
|
|
||||||
.is_some_and(|prefix| prefix.ends_with('.'))
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> From<&'a Url> for RealmRef<'a> {
|
impl<'a> From<&'a Url> for RealmRef<'a> {
|
||||||
fn from(url: &'a Url) -> Self {
|
fn from(url: &'a Url) -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -237,87 +215,4 @@ mod tests {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_is_subdomain_of() -> Result<(), ParseError> {
|
|
||||||
use crate::realm::RealmRef;
|
|
||||||
|
|
||||||
// Subdomain relationship: sub.example.com is a subdomain of example.com
|
|
||||||
let subdomain_url = Url::parse("https://sub.example.com")?;
|
|
||||||
let domain_url = Url::parse("https://example.com")?;
|
|
||||||
let subdomain = RealmRef::from(&subdomain_url);
|
|
||||||
let domain = RealmRef::from(&domain_url);
|
|
||||||
assert!(subdomain.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Deeper subdomain: foo.bar.example.com is a subdomain of example.com
|
|
||||||
let deep_subdomain_url = Url::parse("https://foo.bar.example.com")?;
|
|
||||||
let deep_subdomain = RealmRef::from(&deep_subdomain_url);
|
|
||||||
assert!(deep_subdomain.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Deeper subdomain: foo.bar.example.com is also a subdomain of bar.example.com
|
|
||||||
let parent_subdomain_url = Url::parse("https://bar.example.com")?;
|
|
||||||
let parent_subdomain = RealmRef::from(&parent_subdomain_url);
|
|
||||||
assert!(deep_subdomain.is_subdomain_of(parent_subdomain));
|
|
||||||
|
|
||||||
// Not a subdomain: example.com is not a subdomain of sub.example.com
|
|
||||||
assert!(!domain.is_subdomain_of(subdomain));
|
|
||||||
|
|
||||||
// Same domain is not a subdomain of itself
|
|
||||||
assert!(!domain.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Different TLD: example.org is not a subdomain of example.com
|
|
||||||
let different_tld_url = Url::parse("https://example.org")?;
|
|
||||||
let different_tld = RealmRef::from(&different_tld_url);
|
|
||||||
assert!(!different_tld.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Partial match but not a subdomain: notexample.com is not a subdomain of example.com
|
|
||||||
let partial_match_url = Url::parse("https://notexample.com")?;
|
|
||||||
let partial_match = RealmRef::from(&partial_match_url);
|
|
||||||
assert!(!partial_match.is_subdomain_of(domain));
|
|
||||||
|
|
||||||
// Different scheme: http subdomain is not a subdomain of https domain
|
|
||||||
let http_subdomain_url = Url::parse("http://sub.example.com")?;
|
|
||||||
let https_domain_url = Url::parse("https://example.com")?;
|
|
||||||
let http_subdomain = RealmRef::from(&http_subdomain_url);
|
|
||||||
let https_domain = RealmRef::from(&https_domain_url);
|
|
||||||
assert!(!http_subdomain.is_subdomain_of(https_domain));
|
|
||||||
|
|
||||||
// Different port: same subdomain with different port is not a subdomain
|
|
||||||
let subdomain_port_8080_url = Url::parse("https://sub.example.com:8080")?;
|
|
||||||
let domain_port_9090_url = Url::parse("https://example.com:9090")?;
|
|
||||||
let subdomain_port_8080 = RealmRef::from(&subdomain_port_8080_url);
|
|
||||||
let domain_port_9090 = RealmRef::from(&domain_port_9090_url);
|
|
||||||
assert!(!subdomain_port_8080.is_subdomain_of(domain_port_9090));
|
|
||||||
|
|
||||||
// Same port: subdomain with same explicit port is a subdomain
|
|
||||||
let subdomain_with_port_url = Url::parse("https://sub.example.com:8080")?;
|
|
||||||
let domain_with_port_url = Url::parse("https://example.com:8080")?;
|
|
||||||
let subdomain_with_port = RealmRef::from(&subdomain_with_port_url);
|
|
||||||
let domain_with_port = RealmRef::from(&domain_with_port_url);
|
|
||||||
assert!(subdomain_with_port.is_subdomain_of(domain_with_port));
|
|
||||||
|
|
||||||
// Default port handling: subdomain with implicit port is a subdomain
|
|
||||||
let subdomain_default_url = Url::parse("https://sub.example.com")?;
|
|
||||||
let domain_explicit_443_url = Url::parse("https://example.com:443")?;
|
|
||||||
let subdomain_default = RealmRef::from(&subdomain_default_url);
|
|
||||||
let domain_explicit_443 = RealmRef::from(&domain_explicit_443_url);
|
|
||||||
assert!(subdomain_default.is_subdomain_of(domain_explicit_443));
|
|
||||||
|
|
||||||
// Edge case: empty host (shouldn't happen with valid URLs but testing defensive code)
|
|
||||||
let file_url = Url::parse("file:///path/to/file")?;
|
|
||||||
let https_url = Url::parse("https://example.com")?;
|
|
||||||
let file_realm = RealmRef::from(&file_url);
|
|
||||||
let https_realm = RealmRef::from(&https_url);
|
|
||||||
assert!(!file_realm.is_subdomain_of(https_realm));
|
|
||||||
assert!(!https_realm.is_subdomain_of(file_realm));
|
|
||||||
|
|
||||||
// Subdomain with path (path should be ignored)
|
|
||||||
let subdomain_with_path_url = Url::parse("https://sub.example.com/path")?;
|
|
||||||
let domain_with_path_url = Url::parse("https://example.com/other")?;
|
|
||||||
let subdomain_with_path = RealmRef::from(&subdomain_with_path_url);
|
|
||||||
let domain_with_path = RealmRef::from(&domain_with_path_url);
|
|
||||||
assert!(subdomain_with_path.is_subdomain_of(domain_with_path));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,95 +0,0 @@
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::str::FromStr;
|
|
||||||
use thiserror::Error;
|
|
||||||
use url::Url;
|
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum ServiceParseError {
|
|
||||||
#[error(transparent)]
|
|
||||||
InvalidUrl(#[from] DisplaySafeUrlError),
|
|
||||||
#[error("Unsupported scheme: {0}")]
|
|
||||||
UnsupportedScheme(String),
|
|
||||||
#[error("HTTPS is required for non-local hosts")]
|
|
||||||
HttpsRequired,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A service URL that wraps [`DisplaySafeUrl`] for CLI usage.
|
|
||||||
///
|
|
||||||
/// This type provides automatic URL parsing and validation when used as a CLI argument,
|
|
||||||
/// eliminating the need for manual parsing in command functions.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct Service(DisplaySafeUrl);
|
|
||||||
|
|
||||||
impl Service {
|
|
||||||
/// Get the underlying [`DisplaySafeUrl`].
|
|
||||||
pub fn url(&self) -> &DisplaySafeUrl {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert into the underlying [`DisplaySafeUrl`].
|
|
||||||
pub fn into_url(self) -> DisplaySafeUrl {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Validate that the URL scheme is supported.
|
|
||||||
fn check_scheme(url: &Url) -> Result<(), ServiceParseError> {
|
|
||||||
match url.scheme() {
|
|
||||||
"https" => Ok(()),
|
|
||||||
"http" if matches!(url.host_str(), Some("localhost" | "127.0.0.1")) => Ok(()),
|
|
||||||
"http" => Err(ServiceParseError::HttpsRequired),
|
|
||||||
value => Err(ServiceParseError::UnsupportedScheme(value.to_string())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for Service {
|
|
||||||
type Err = ServiceParseError;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
// First try parsing as-is
|
|
||||||
let url = match DisplaySafeUrl::parse(s) {
|
|
||||||
Ok(url) => url,
|
|
||||||
Err(DisplaySafeUrlError::Url(url::ParseError::RelativeUrlWithoutBase)) => {
|
|
||||||
// If it's a relative URL, try prepending https://
|
|
||||||
let with_https = format!("https://{s}");
|
|
||||||
DisplaySafeUrl::parse(&with_https)?
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err.into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
Self::check_scheme(&url)?;
|
|
||||||
|
|
||||||
Ok(Self(url))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for Service {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
self.0.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for Service {
|
|
||||||
type Error = ServiceParseError;
|
|
||||||
|
|
||||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
|
||||||
Self::from_str(&value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Service> for String {
|
|
||||||
fn from(service: Service) -> Self {
|
|
||||||
service.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<DisplaySafeUrl> for Service {
|
|
||||||
type Error = ServiceParseError;
|
|
||||||
|
|
||||||
fn try_from(value: DisplaySafeUrl) -> Result<Self, Self::Error> {
|
|
||||||
Self::check_scheme(&value)?;
|
|
||||||
Ok(Self(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,688 +0,0 @@
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
|
|
||||||
use fs_err as fs;
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use thiserror::Error;
|
|
||||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, with_added_extension};
|
|
||||||
use uv_preview::{Preview, PreviewFeatures};
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
|
|
||||||
use uv_state::{StateBucket, StateStore};
|
|
||||||
use uv_static::EnvVars;
|
|
||||||
|
|
||||||
use crate::credentials::{Password, Token, Username};
|
|
||||||
use crate::realm::Realm;
|
|
||||||
use crate::service::Service;
|
|
||||||
use crate::{Credentials, KeyringProvider};
|
|
||||||
|
|
||||||
/// The storage backend to use in `uv auth` commands.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum AuthBackend {
|
|
||||||
// TODO(zanieb): Right now, we're using a keyring provider for the system store but that's just
|
|
||||||
// where the native implementation is living at the moment. We should consider refactoring these
|
|
||||||
// into a shared API in the future.
|
|
||||||
System(KeyringProvider),
|
|
||||||
TextStore(TextCredentialStore, LockedFile),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AuthBackend {
|
|
||||||
pub async fn from_settings(preview: Preview) -> Result<Self, TomlCredentialError> {
|
|
||||||
// If preview is enabled, we'll use the system-native store
|
|
||||||
if preview.is_enabled(PreviewFeatures::NATIVE_AUTH) {
|
|
||||||
return Ok(Self::System(KeyringProvider::native()));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, we'll use the plaintext credential store
|
|
||||||
let path = TextCredentialStore::default_file()?;
|
|
||||||
match TextCredentialStore::read(&path).await {
|
|
||||||
Ok((store, lock)) => Ok(Self::TextStore(store, lock)),
|
|
||||||
Err(err)
|
|
||||||
if err
|
|
||||||
.as_io_error()
|
|
||||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
|
||||||
{
|
|
||||||
Ok(Self::TextStore(
|
|
||||||
TextCredentialStore::default(),
|
|
||||||
TextCredentialStore::lock(&path).await?,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
Err(err) => Err(err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Authentication scheme to use.
|
|
||||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "lowercase")]
|
|
||||||
pub enum AuthScheme {
|
|
||||||
/// HTTP Basic Authentication
|
|
||||||
///
|
|
||||||
/// Uses a username and password.
|
|
||||||
#[default]
|
|
||||||
Basic,
|
|
||||||
/// Bearer token authentication.
|
|
||||||
///
|
|
||||||
/// Uses a token provided as `Bearer <token>` in the `Authorization` header.
|
|
||||||
Bearer,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Errors that can occur when working with TOML credential storage.
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum TomlCredentialError {
|
|
||||||
#[error(transparent)]
|
|
||||||
Io(#[from] std::io::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
LockedFile(#[from] LockedFileError),
|
|
||||||
#[error("Failed to parse TOML credential file: {0}")]
|
|
||||||
ParseError(#[from] toml::de::Error),
|
|
||||||
#[error("Failed to serialize credentials to TOML")]
|
|
||||||
SerializeError(#[from] toml::ser::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
BasicAuthError(#[from] BasicAuthError),
|
|
||||||
#[error(transparent)]
|
|
||||||
BearerAuthError(#[from] BearerAuthError),
|
|
||||||
#[error("Failed to determine credentials directory")]
|
|
||||||
CredentialsDirError,
|
|
||||||
#[error("Token is not valid unicode")]
|
|
||||||
TokenNotUnicode(#[from] std::string::FromUtf8Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TomlCredentialError {
|
|
||||||
pub fn as_io_error(&self) -> Option<&std::io::Error> {
|
|
||||||
match self {
|
|
||||||
Self::Io(err) => Some(err),
|
|
||||||
Self::LockedFile(err) => err.as_io_error(),
|
|
||||||
Self::ParseError(_)
|
|
||||||
| Self::SerializeError(_)
|
|
||||||
| Self::BasicAuthError(_)
|
|
||||||
| Self::BearerAuthError(_)
|
|
||||||
| Self::CredentialsDirError
|
|
||||||
| Self::TokenNotUnicode(_) => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum BasicAuthError {
|
|
||||||
#[error("`username` is required with `scheme = basic`")]
|
|
||||||
MissingUsername,
|
|
||||||
#[error("`token` cannot be provided with `scheme = basic`")]
|
|
||||||
UnexpectedToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum BearerAuthError {
|
|
||||||
#[error("`token` is required with `scheme = bearer`")]
|
|
||||||
MissingToken,
|
|
||||||
#[error("`username` cannot be provided with `scheme = bearer`")]
|
|
||||||
UnexpectedUsername,
|
|
||||||
#[error("`password` cannot be provided with `scheme = bearer`")]
|
|
||||||
UnexpectedPassword,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A single credential entry in a TOML credentials file.
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
#[serde(try_from = "TomlCredentialWire", into = "TomlCredentialWire")]
|
|
||||||
struct TomlCredential {
|
|
||||||
/// The service URL for this credential.
|
|
||||||
service: Service,
|
|
||||||
/// The credentials for this entry.
|
|
||||||
credentials: Credentials,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
struct TomlCredentialWire {
|
|
||||||
/// The service URL for this credential.
|
|
||||||
service: Service,
|
|
||||||
/// The username to use. Only allowed with [`AuthScheme::Basic`].
|
|
||||||
username: Username,
|
|
||||||
/// The authentication scheme.
|
|
||||||
#[serde(default)]
|
|
||||||
scheme: AuthScheme,
|
|
||||||
/// The password to use. Only allowed with [`AuthScheme::Basic`].
|
|
||||||
password: Option<Password>,
|
|
||||||
/// The token to use. Only allowed with [`AuthScheme::Bearer`].
|
|
||||||
token: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<TomlCredential> for TomlCredentialWire {
|
|
||||||
fn from(value: TomlCredential) -> Self {
|
|
||||||
match value.credentials {
|
|
||||||
Credentials::Basic { username, password } => Self {
|
|
||||||
service: value.service,
|
|
||||||
username,
|
|
||||||
scheme: AuthScheme::Basic,
|
|
||||||
password,
|
|
||||||
token: None,
|
|
||||||
},
|
|
||||||
Credentials::Bearer { token } => Self {
|
|
||||||
service: value.service,
|
|
||||||
username: Username::new(None),
|
|
||||||
scheme: AuthScheme::Bearer,
|
|
||||||
password: None,
|
|
||||||
token: Some(String::from_utf8(token.into_bytes()).expect("Token is valid UTF-8")),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<TomlCredentialWire> for TomlCredential {
|
|
||||||
type Error = TomlCredentialError;
|
|
||||||
|
|
||||||
fn try_from(value: TomlCredentialWire) -> Result<Self, Self::Error> {
|
|
||||||
match value.scheme {
|
|
||||||
AuthScheme::Basic => {
|
|
||||||
if value.username.as_deref().is_none() {
|
|
||||||
return Err(TomlCredentialError::BasicAuthError(
|
|
||||||
BasicAuthError::MissingUsername,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if value.token.is_some() {
|
|
||||||
return Err(TomlCredentialError::BasicAuthError(
|
|
||||||
BasicAuthError::UnexpectedToken,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let credentials = Credentials::Basic {
|
|
||||||
username: value.username,
|
|
||||||
password: value.password,
|
|
||||||
};
|
|
||||||
Ok(Self {
|
|
||||||
service: value.service,
|
|
||||||
credentials,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
AuthScheme::Bearer => {
|
|
||||||
if value.username.is_some() {
|
|
||||||
return Err(TomlCredentialError::BearerAuthError(
|
|
||||||
BearerAuthError::UnexpectedUsername,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if value.password.is_some() {
|
|
||||||
return Err(TomlCredentialError::BearerAuthError(
|
|
||||||
BearerAuthError::UnexpectedPassword,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if value.token.is_none() {
|
|
||||||
return Err(TomlCredentialError::BearerAuthError(
|
|
||||||
BearerAuthError::MissingToken,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
let credentials = Credentials::Bearer {
|
|
||||||
token: Token::new(value.token.unwrap().into_bytes()),
|
|
||||||
};
|
|
||||||
Ok(Self {
|
|
||||||
service: value.service,
|
|
||||||
credentials,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
|
||||||
struct TomlCredentials {
|
|
||||||
/// Array of credential entries.
|
|
||||||
#[serde(rename = "credential")]
|
|
||||||
credentials: Vec<TomlCredential>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A credential store with a plain text storage backend.
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct TextCredentialStore {
|
|
||||||
credentials: FxHashMap<(Service, Username), Credentials>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TextCredentialStore {
|
|
||||||
/// Return the directory for storing credentials.
|
|
||||||
pub fn directory_path() -> Result<PathBuf, TomlCredentialError> {
|
|
||||||
if let Some(dir) = std::env::var_os(EnvVars::UV_CREDENTIALS_DIR)
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.map(PathBuf::from)
|
|
||||||
{
|
|
||||||
return Ok(dir);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(StateStore::from_settings(None)?.bucket(StateBucket::Credentials))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the standard file path for storing credentials.
|
|
||||||
pub fn default_file() -> Result<PathBuf, TomlCredentialError> {
|
|
||||||
let dir = Self::directory_path()?;
|
|
||||||
Ok(dir.join("credentials.toml"))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Acquire a lock on the credentials file at the given path.
|
|
||||||
pub async fn lock(path: &Path) -> Result<LockedFile, TomlCredentialError> {
|
|
||||||
if let Some(parent) = path.parent() {
|
|
||||||
fs::create_dir_all(parent)?;
|
|
||||||
}
|
|
||||||
let lock = with_added_extension(path, ".lock");
|
|
||||||
Ok(LockedFile::acquire(lock, LockedFileMode::Exclusive, "credentials store").await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read credentials from a file.
|
|
||||||
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, TomlCredentialError> {
|
|
||||||
let content = fs::read_to_string(path)?;
|
|
||||||
let credentials: TomlCredentials = toml::from_str(&content)?;
|
|
||||||
|
|
||||||
let credentials: FxHashMap<(Service, Username), Credentials> = credentials
|
|
||||||
.credentials
|
|
||||||
.into_iter()
|
|
||||||
.map(|credential| {
|
|
||||||
let username = match &credential.credentials {
|
|
||||||
Credentials::Basic { username, .. } => username.clone(),
|
|
||||||
Credentials::Bearer { .. } => Username::none(),
|
|
||||||
};
|
|
||||||
(
|
|
||||||
(credential.service.clone(), username),
|
|
||||||
credential.credentials,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(Self { credentials })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read credentials from a file.
|
|
||||||
///
|
|
||||||
/// Returns [`TextCredentialStore`] and a [`LockedFile`] to hold if mutating the store.
|
|
||||||
///
|
|
||||||
/// If the store will not be written to following the read, the lock can be dropped.
|
|
||||||
pub async fn read<P: AsRef<Path>>(path: P) -> Result<(Self, LockedFile), TomlCredentialError> {
|
|
||||||
let lock = Self::lock(path.as_ref()).await?;
|
|
||||||
let store = Self::from_file(path)?;
|
|
||||||
Ok((store, lock))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Persist credentials to a file.
|
|
||||||
///
|
|
||||||
/// Requires a [`LockedFile`] from [`TextCredentialStore::lock`] or
|
|
||||||
/// [`TextCredentialStore::read`] to ensure exclusive access.
|
|
||||||
pub fn write<P: AsRef<Path>>(
|
|
||||||
self,
|
|
||||||
path: P,
|
|
||||||
_lock: LockedFile,
|
|
||||||
) -> Result<(), TomlCredentialError> {
|
|
||||||
let credentials = self
|
|
||||||
.credentials
|
|
||||||
.into_iter()
|
|
||||||
.map(|((service, _username), credentials)| TomlCredential {
|
|
||||||
service,
|
|
||||||
credentials,
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let toml_creds = TomlCredentials { credentials };
|
|
||||||
let content = toml::to_string_pretty(&toml_creds)?;
|
|
||||||
fs::create_dir_all(
|
|
||||||
path.as_ref()
|
|
||||||
.parent()
|
|
||||||
.ok_or(TomlCredentialError::CredentialsDirError)?,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// TODO(zanieb): We should use an atomic write here
|
|
||||||
fs::write(path, content)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get credentials for a given URL and username.
|
|
||||||
///
|
|
||||||
/// The most specific URL prefix match in the same [`Realm`] is returned, if any.
|
|
||||||
pub fn get_credentials(
|
|
||||||
&self,
|
|
||||||
url: &DisplaySafeUrl,
|
|
||||||
username: Option<&str>,
|
|
||||||
) -> Option<&Credentials> {
|
|
||||||
let request_realm = Realm::from(url);
|
|
||||||
|
|
||||||
// Perform an exact lookup first
|
|
||||||
// TODO(zanieb): Consider adding `DisplaySafeUrlRef` so we can avoid this clone
|
|
||||||
// TODO(zanieb): We could also return early here if we can't normalize to a `Service`
|
|
||||||
if let Ok(url_service) = Service::try_from(url.clone()) {
|
|
||||||
if let Some(credential) = self.credentials.get(&(
|
|
||||||
url_service.clone(),
|
|
||||||
Username::from(username.map(str::to_string)),
|
|
||||||
)) {
|
|
||||||
return Some(credential);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If that fails, iterate through to find a prefix match
|
|
||||||
let mut best: Option<(usize, &Service, &Credentials)> = None;
|
|
||||||
|
|
||||||
for ((service, stored_username), credential) in &self.credentials {
|
|
||||||
let service_realm = Realm::from(service.url().deref());
|
|
||||||
|
|
||||||
// Only consider services in the same realm
|
|
||||||
if service_realm != request_realm {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Service path must be a prefix of request path
|
|
||||||
if !url.path().starts_with(service.url().path()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If a username is provided, it must match
|
|
||||||
if let Some(request_username) = username {
|
|
||||||
if Some(request_username) != stored_username.as_deref() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update our best matching credential based on prefix length
|
|
||||||
let specificity = service.url().path().len();
|
|
||||||
if best.is_none_or(|(best_specificity, _, _)| specificity > best_specificity) {
|
|
||||||
best = Some((specificity, service, credential));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the most specific match
|
|
||||||
if let Some((_, _, credential)) = best {
|
|
||||||
return Some(credential);
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Store credentials for a given service.
|
|
||||||
pub fn insert(&mut self, service: Service, credentials: Credentials) -> Option<Credentials> {
|
|
||||||
let username = match &credentials {
|
|
||||||
Credentials::Basic { username, .. } => username.clone(),
|
|
||||||
Credentials::Bearer { .. } => Username::none(),
|
|
||||||
};
|
|
||||||
self.credentials.insert((service, username), credentials)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Remove credentials for a given service.
|
|
||||||
pub fn remove(&mut self, service: &Service, username: Username) -> Option<Credentials> {
|
|
||||||
// Remove the specific credential for this service and username
|
|
||||||
self.credentials.remove(&(service.clone(), username))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use std::io::Write;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use tempfile::NamedTempFile;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_toml_serialization() {
|
|
||||||
let credentials = TomlCredentials {
|
|
||||||
credentials: vec![
|
|
||||||
TomlCredential {
|
|
||||||
service: Service::from_str("https://example.com").unwrap(),
|
|
||||||
credentials: Credentials::Basic {
|
|
||||||
username: Username::new(Some("user1".to_string())),
|
|
||||||
password: Some(Password::new("pass1".to_string())),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
TomlCredential {
|
|
||||||
service: Service::from_str("https://test.org").unwrap(),
|
|
||||||
credentials: Credentials::Basic {
|
|
||||||
username: Username::new(Some("user2".to_string())),
|
|
||||||
password: Some(Password::new("pass2".to_string())),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
let toml_str = toml::to_string_pretty(&credentials).unwrap();
|
|
||||||
let parsed: TomlCredentials = toml::from_str(&toml_str).unwrap();
|
|
||||||
|
|
||||||
assert_eq!(parsed.credentials.len(), 2);
|
|
||||||
assert_eq!(
|
|
||||||
parsed.credentials[0].service.to_string(),
|
|
||||||
"https://example.com/"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
parsed.credentials[1].service.to_string(),
|
|
||||||
"https://test.org/"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_credential_store_operations() {
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
let credentials = Credentials::basic(Some("user".to_string()), Some("pass".to_string()));
|
|
||||||
|
|
||||||
let service = Service::from_str("https://example.com").unwrap();
|
|
||||||
store.insert(service.clone(), credentials.clone());
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
|
||||||
assert!(store.get_credentials(&url, None).is_some());
|
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/path").unwrap();
|
|
||||||
let retrieved = store.get_credentials(&url, None).unwrap();
|
|
||||||
assert_eq!(retrieved.username(), Some("user"));
|
|
||||||
assert_eq!(retrieved.password(), Some("pass"));
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
store
|
|
||||||
.remove(&service, Username::from(Some("user".to_string())))
|
|
||||||
.is_some()
|
|
||||||
);
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
|
||||||
assert!(store.get_credentials(&url, None).is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn test_file_operations() {
|
|
||||||
let mut temp_file = NamedTempFile::new().unwrap();
|
|
||||||
writeln!(
|
|
||||||
temp_file,
|
|
||||||
r#"
|
|
||||||
[[credential]]
|
|
||||||
service = "https://example.com"
|
|
||||||
username = "testuser"
|
|
||||||
scheme = "basic"
|
|
||||||
password = "testpass"
|
|
||||||
|
|
||||||
[[credential]]
|
|
||||||
service = "https://test.org"
|
|
||||||
username = "user2"
|
|
||||||
password = "pass2"
|
|
||||||
"#
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let store = TextCredentialStore::from_file(temp_file.path()).unwrap();
|
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
|
||||||
assert!(store.get_credentials(&url, None).is_some());
|
|
||||||
let url = DisplaySafeUrl::parse("https://test.org/").unwrap();
|
|
||||||
assert!(store.get_credentials(&url, None).is_some());
|
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
|
||||||
let cred = store.get_credentials(&url, None).unwrap();
|
|
||||||
assert_eq!(cred.username(), Some("testuser"));
|
|
||||||
assert_eq!(cred.password(), Some("testpass"));
|
|
||||||
|
|
||||||
// Test saving
|
|
||||||
let temp_output = NamedTempFile::new().unwrap();
|
|
||||||
store
|
|
||||||
.write(
|
|
||||||
temp_output.path(),
|
|
||||||
TextCredentialStore::lock(temp_file.path()).await.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let content = fs::read_to_string(temp_output.path()).unwrap();
|
|
||||||
assert!(content.contains("example.com"));
|
|
||||||
assert!(content.contains("testuser"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_prefix_matching() {
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
let credentials = Credentials::basic(Some("user".to_string()), Some("pass".to_string()));
|
|
||||||
|
|
||||||
// Store credentials for a specific path prefix
|
|
||||||
let service = Service::from_str("https://example.com/api").unwrap();
|
|
||||||
store.insert(service.clone(), credentials.clone());
|
|
||||||
|
|
||||||
// Should match URLs that are prefixes of the stored service
|
|
||||||
let matching_urls = [
|
|
||||||
"https://example.com/api",
|
|
||||||
"https://example.com/api/v1",
|
|
||||||
"https://example.com/api/v1/users",
|
|
||||||
];
|
|
||||||
|
|
||||||
for url_str in matching_urls {
|
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
|
||||||
let cred = store.get_credentials(&url, None);
|
|
||||||
assert!(cred.is_some(), "Failed to match URL with prefix: {url_str}");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Should NOT match URLs that are not prefixes
|
|
||||||
let non_matching_urls = [
|
|
||||||
"https://example.com/different",
|
|
||||||
"https://example.com/ap", // Not a complete path segment match
|
|
||||||
"https://example.com", // Shorter than the stored prefix
|
|
||||||
];
|
|
||||||
|
|
||||||
for url_str in non_matching_urls {
|
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
|
||||||
let cred = store.get_credentials(&url, None);
|
|
||||||
assert!(cred.is_none(), "Should not match non-prefix URL: {url_str}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_realm_based_matching() {
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
let credentials = Credentials::basic(Some("user".to_string()), Some("pass".to_string()));
|
|
||||||
|
|
||||||
// Store by full URL (realm)
|
|
||||||
let service = Service::from_str("https://example.com").unwrap();
|
|
||||||
store.insert(service.clone(), credentials.clone());
|
|
||||||
|
|
||||||
// Should match URLs in the same realm
|
|
||||||
let matching_urls = [
|
|
||||||
"https://example.com",
|
|
||||||
"https://example.com/path",
|
|
||||||
"https://example.com/different/path",
|
|
||||||
"https://example.com:443/path", // Default HTTPS port
|
|
||||||
];
|
|
||||||
|
|
||||||
for url_str in matching_urls {
|
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
|
||||||
let cred = store.get_credentials(&url, None);
|
|
||||||
assert!(
|
|
||||||
cred.is_some(),
|
|
||||||
"Failed to match URL in same realm: {url_str}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Should NOT match URLs in different realms
|
|
||||||
let non_matching_urls = [
|
|
||||||
"http://example.com", // Different scheme
|
|
||||||
"https://different.com", // Different host
|
|
||||||
"https://example.com:8080", // Different port
|
|
||||||
];
|
|
||||||
|
|
||||||
for url_str in non_matching_urls {
|
|
||||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
|
||||||
let cred = store.get_credentials(&url, None);
|
|
||||||
assert!(
|
|
||||||
cred.is_none(),
|
|
||||||
"Should not match URL in different realm: {url_str}"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_most_specific_prefix_matching() {
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
let general_cred =
|
|
||||||
Credentials::basic(Some("general".to_string()), Some("pass1".to_string()));
|
|
||||||
let specific_cred =
|
|
||||||
Credentials::basic(Some("specific".to_string()), Some("pass2".to_string()));
|
|
||||||
|
|
||||||
// Store credentials with different prefix lengths
|
|
||||||
let general_service = Service::from_str("https://example.com/api").unwrap();
|
|
||||||
let specific_service = Service::from_str("https://example.com/api/v1").unwrap();
|
|
||||||
store.insert(general_service.clone(), general_cred);
|
|
||||||
store.insert(specific_service.clone(), specific_cred);
|
|
||||||
|
|
||||||
// Should match the most specific prefix
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
|
||||||
let cred = store.get_credentials(&url, None).unwrap();
|
|
||||||
assert_eq!(cred.username(), Some("specific"));
|
|
||||||
|
|
||||||
// Should match the general prefix for non-specific paths
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/api/v2").unwrap();
|
|
||||||
let cred = store.get_credentials(&url, None).unwrap();
|
|
||||||
assert_eq!(cred.username(), Some("general"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_username_exact_url_match() {
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
|
||||||
let service = Service::from_str("https://example.com").unwrap();
|
|
||||||
let user1_creds = Credentials::basic(Some("user1".to_string()), Some("pass1".to_string()));
|
|
||||||
store.insert(service.clone(), user1_creds.clone());
|
|
||||||
|
|
||||||
// Should return credentials when username matches
|
|
||||||
let result = store.get_credentials(&url, Some("user1"));
|
|
||||||
assert!(result.is_some());
|
|
||||||
assert_eq!(result.unwrap().username(), Some("user1"));
|
|
||||||
assert_eq!(result.unwrap().password(), Some("pass1"));
|
|
||||||
|
|
||||||
// Should not return credentials when username doesn't match
|
|
||||||
let result = store.get_credentials(&url, Some("user2"));
|
|
||||||
assert!(result.is_none());
|
|
||||||
|
|
||||||
// Should return credentials when no username is specified
|
|
||||||
let result = store.get_credentials(&url, None);
|
|
||||||
assert!(result.is_some());
|
|
||||||
assert_eq!(result.unwrap().username(), Some("user1"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_username_prefix_url_match() {
|
|
||||||
let mut store = TextCredentialStore::default();
|
|
||||||
|
|
||||||
// Add credentials with different usernames for overlapping URL prefixes
|
|
||||||
let general_service = Service::from_str("https://example.com/api").unwrap();
|
|
||||||
let specific_service = Service::from_str("https://example.com/api/v1").unwrap();
|
|
||||||
|
|
||||||
let general_creds = Credentials::basic(
|
|
||||||
Some("general_user".to_string()),
|
|
||||||
Some("general_pass".to_string()),
|
|
||||||
);
|
|
||||||
let specific_creds = Credentials::basic(
|
|
||||||
Some("specific_user".to_string()),
|
|
||||||
Some("specific_pass".to_string()),
|
|
||||||
);
|
|
||||||
|
|
||||||
store.insert(general_service, general_creds);
|
|
||||||
store.insert(specific_service, specific_creds);
|
|
||||||
|
|
||||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
|
||||||
|
|
||||||
// Should match specific credentials when username matches
|
|
||||||
let result = store.get_credentials(&url, Some("specific_user"));
|
|
||||||
assert!(result.is_some());
|
|
||||||
assert_eq!(result.unwrap().username(), Some("specific_user"));
|
|
||||||
|
|
||||||
// Should match the general credentials when requesting general_user (falls back to less specific prefix)
|
|
||||||
let result = store.get_credentials(&url, Some("general_user"));
|
|
||||||
assert!(
|
|
||||||
result.is_some(),
|
|
||||||
"Should match general_user from less specific prefix"
|
|
||||||
);
|
|
||||||
assert_eq!(result.unwrap().username(), Some("general_user"));
|
|
||||||
|
|
||||||
// Should match most specific when no username specified
|
|
||||||
let result = store.get_credentials(&url, None);
|
|
||||||
assert!(result.is_some());
|
|
||||||
assert_eq!(result.unwrap().username(), Some("specific_user"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,12 +1,13 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-bench"
|
name = "uv-bench"
|
||||||
version = "0.0.8"
|
version = "0.0.0"
|
||||||
description = "This is an internal component crate of uv"
|
description = "uv Micro-benchmarks"
|
||||||
publish = false
|
publish = false
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
||||||
|
|
@ -22,19 +23,18 @@ name = "uv"
|
||||||
path = "benches/uv.rs"
|
path = "benches/uv.rs"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dependencies]
|
||||||
uv-cache = { workspace = true }
|
uv-cache = { workspace = true }
|
||||||
uv-client = { workspace = true }
|
uv-client = { workspace = true }
|
||||||
uv-configuration = { workspace = true }
|
uv-configuration = { workspace = true }
|
||||||
uv-dispatch = { workspace = true }
|
uv-dispatch = { workspace = true }
|
||||||
uv-distribution = { workspace = true }
|
uv-distribution = { workspace = true }
|
||||||
uv-distribution-types = { workspace = true }
|
uv-distribution-types = { workspace = true }
|
||||||
uv-extract = { workspace = true }
|
uv-extract = { workspace = true, optional = true }
|
||||||
uv-install-wheel = { workspace = true }
|
uv-install-wheel = { workspace = true }
|
||||||
uv-pep440 = { workspace = true }
|
uv-pep440 = { workspace = true }
|
||||||
uv-pep508 = { workspace = true }
|
uv-pep508 = { workspace = true }
|
||||||
uv-platform-tags = { workspace = true }
|
uv-platform-tags = { workspace = true }
|
||||||
uv-preview = { workspace = true }
|
|
||||||
uv-pypi-types = { workspace = true }
|
uv-pypi-types = { workspace = true }
|
||||||
uv-python = { workspace = true }
|
uv-python = { workspace = true }
|
||||||
uv-resolver = { workspace = true }
|
uv-resolver = { workspace = true }
|
||||||
|
|
@ -42,7 +42,10 @@ uv-types = { workspace = true }
|
||||||
uv-workspace = { workspace = true }
|
uv-workspace = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
criterion = { version = "4.0.3", default-features = false, package = "codspeed-criterion-compat", features = ["async_tokio"] }
|
codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true }
|
||||||
|
criterion = { version = "0.7.0", default-features = false, features = [
|
||||||
|
"async_tokio",
|
||||||
|
] }
|
||||||
jiff = { workspace = true }
|
jiff = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
|
||||||
|
|
@ -50,4 +53,5 @@ tokio = { workspace = true }
|
||||||
ignored = ["uv-extract"]
|
ignored = ["uv-extract"]
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
codspeed = ["codspeed-criterion-compat"]
|
||||||
static = ["uv-extract/static"]
|
static = ["uv-extract/static"]
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-bench
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bench).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
use std::hint::black_box;
|
use std::hint::black_box;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::{BaseClientBuilder, RegistryClientBuilder};
|
use uv_client::RegistryClientBuilder;
|
||||||
use uv_distribution_types::Requirement;
|
use uv_distribution_types::Requirement;
|
||||||
use uv_python::PythonEnvironment;
|
use uv_python::PythonEnvironment;
|
||||||
use uv_resolver::Manifest;
|
use uv_resolver::Manifest;
|
||||||
|
|
@ -59,14 +59,11 @@ fn setup(manifest: Manifest) -> impl Fn(bool) {
|
||||||
.build()
|
.build()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let cache = Cache::from_path("../../.cache")
|
let cache = Cache::from_path("../../.cache").init().unwrap();
|
||||||
.init_no_wait()
|
|
||||||
.expect("No cache contention when running benchmarks")
|
|
||||||
.unwrap();
|
|
||||||
let interpreter = PythonEnvironment::from_root("../../.venv", &cache)
|
let interpreter = PythonEnvironment::from_root("../../.venv", &cache)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.into_interpreter();
|
.into_interpreter();
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache.clone()).build();
|
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||||
|
|
||||||
move |universal| {
|
move |universal| {
|
||||||
runtime
|
runtime
|
||||||
|
|
@ -88,18 +85,19 @@ mod resolver {
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::RegistryClient;
|
use uv_client::RegistryClient;
|
||||||
use uv_configuration::{BuildOptions, Concurrency, Constraints, IndexStrategy, SourceStrategy};
|
use uv_configuration::{
|
||||||
|
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy,
|
||||||
|
PackageConfigSettings, Preview, SourceStrategy,
|
||||||
|
};
|
||||||
use uv_dispatch::{BuildDispatch, SharedState};
|
use uv_dispatch::{BuildDispatch, SharedState};
|
||||||
use uv_distribution::DistributionDatabase;
|
use uv_distribution::DistributionDatabase;
|
||||||
use uv_distribution_types::{
|
use uv_distribution_types::{
|
||||||
ConfigSettings, DependencyMetadata, ExtraBuildRequires, ExtraBuildVariables,
|
DependencyMetadata, ExtraBuildRequires, ExtraBuildVariables, IndexLocations, RequiresPython,
|
||||||
IndexLocations, PackageConfigSettings, RequiresPython,
|
|
||||||
};
|
};
|
||||||
use uv_install_wheel::LinkMode;
|
use uv_install_wheel::LinkMode;
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder};
|
use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder};
|
||||||
use uv_platform_tags::{Arch, Os, Platform, Tags};
|
use uv_platform_tags::{Arch, Os, Platform, Tags};
|
||||||
use uv_preview::Preview;
|
|
||||||
use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment};
|
use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment};
|
||||||
use uv_python::Interpreter;
|
use uv_python::Interpreter;
|
||||||
use uv_resolver::{
|
use uv_resolver::{
|
||||||
|
|
@ -134,7 +132,7 @@ mod resolver {
|
||||||
);
|
);
|
||||||
|
|
||||||
static TAGS: LazyLock<Tags> = LazyLock::new(|| {
|
static TAGS: LazyLock<Tags> = LazyLock::new(|| {
|
||||||
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false, false).unwrap()
|
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false).unwrap()
|
||||||
});
|
});
|
||||||
|
|
||||||
pub(crate) async fn resolve(
|
pub(crate) async fn resolve(
|
||||||
|
|
|
||||||
|
|
@ -1 +1,10 @@
|
||||||
|
pub mod criterion {
|
||||||
|
//! This module re-exports the criterion API but picks the right backend depending on whether
|
||||||
|
//! the benchmarks are built to run locally or with codspeed
|
||||||
|
|
||||||
|
#[cfg(not(feature = "codspeed"))]
|
||||||
|
pub use criterion::*;
|
||||||
|
|
||||||
|
#[cfg(feature = "codspeed")]
|
||||||
|
pub use codspeed_criterion_compat::*;
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "uv-bin-install"
|
|
||||||
version = "0.0.8"
|
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
|
||||||
rust-version = { workspace = true }
|
|
||||||
homepage = { workspace = true }
|
|
||||||
repository = { workspace = true }
|
|
||||||
authors = { workspace = true }
|
|
||||||
license = { workspace = true }
|
|
||||||
|
|
||||||
[lib]
|
|
||||||
doctest = false
|
|
||||||
|
|
||||||
[lints]
|
|
||||||
workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
uv-cache = { workspace = true }
|
|
||||||
uv-client = { workspace = true }
|
|
||||||
uv-distribution-filename = { workspace = true }
|
|
||||||
uv-extract = { workspace = true }
|
|
||||||
uv-pep440 = { workspace = true }
|
|
||||||
uv-platform = { workspace = true }
|
|
||||||
uv-redacted = { workspace = true }
|
|
||||||
|
|
||||||
fs-err = { workspace = true, features = ["tokio"] }
|
|
||||||
futures = { workspace = true }
|
|
||||||
reqwest = { workspace = true }
|
|
||||||
reqwest-middleware = { workspace = true }
|
|
||||||
reqwest-retry = { workspace = true }
|
|
||||||
tempfile = { workspace = true }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tokio-util = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
|
||||||
url = { workspace = true }
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-bin-install
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bin-install).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,438 +0,0 @@
|
||||||
//! Binary download and installation utilities for uv.
|
|
||||||
//!
|
|
||||||
//! These utilities are specifically for consuming distributions that are _not_ Python packages,
|
|
||||||
//! e.g., `ruff` (which does have a Python package, but also has standalone binaries on GitHub).
|
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::pin::Pin;
|
|
||||||
use std::task::{Context, Poll};
|
|
||||||
use std::time::{Duration, SystemTime};
|
|
||||||
|
|
||||||
use futures::TryStreamExt;
|
|
||||||
use reqwest_retry::RetryPolicy;
|
|
||||||
use reqwest_retry::policies::ExponentialBackoff;
|
|
||||||
use std::fmt;
|
|
||||||
use thiserror::Error;
|
|
||||||
use tokio::io::{AsyncRead, ReadBuf};
|
|
||||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
|
||||||
use tracing::debug;
|
|
||||||
use url::Url;
|
|
||||||
use uv_distribution_filename::SourceDistExtension;
|
|
||||||
|
|
||||||
use uv_cache::{Cache, CacheBucket, CacheEntry, Error as CacheError};
|
|
||||||
use uv_client::{BaseClient, is_transient_network_error};
|
|
||||||
use uv_extract::{Error as ExtractError, stream};
|
|
||||||
use uv_pep440::Version;
|
|
||||||
use uv_platform::Platform;
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
|
||||||
|
|
||||||
/// Binary tools that can be installed.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub enum Binary {
|
|
||||||
Ruff,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Binary {
|
|
||||||
/// Get the default version for this binary.
|
|
||||||
pub fn default_version(&self) -> Version {
|
|
||||||
match self {
|
|
||||||
// TODO(zanieb): Figure out a nice way to automate updating this
|
|
||||||
Self::Ruff => Version::new([0, 12, 5]),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The name of the binary.
|
|
||||||
///
|
|
||||||
/// See [`Binary::executable`] for the platform-specific executable name.
|
|
||||||
pub fn name(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Self::Ruff => "ruff",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the download URL for a specific version and platform.
|
|
||||||
pub fn download_url(
|
|
||||||
&self,
|
|
||||||
version: &Version,
|
|
||||||
platform: &str,
|
|
||||||
format: ArchiveFormat,
|
|
||||||
) -> Result<Url, Error> {
|
|
||||||
match self {
|
|
||||||
Self::Ruff => {
|
|
||||||
let url = format!(
|
|
||||||
"https://github.com/astral-sh/ruff/releases/download/{version}/ruff-{platform}.{}",
|
|
||||||
format.extension()
|
|
||||||
);
|
|
||||||
Url::parse(&url).map_err(|err| Error::UrlParse { url, source: err })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get the executable name
|
|
||||||
pub fn executable(&self) -> String {
|
|
||||||
format!("{}{}", self.name(), std::env::consts::EXE_SUFFIX)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Binary {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.write_str(self.name())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Archive formats for binary downloads.
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
|
||||||
pub enum ArchiveFormat {
|
|
||||||
Zip,
|
|
||||||
TarGz,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ArchiveFormat {
|
|
||||||
/// Get the file extension for this archive format.
|
|
||||||
pub fn extension(&self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
Self::Zip => "zip",
|
|
||||||
Self::TarGz => "tar.gz",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ArchiveFormat> for SourceDistExtension {
|
|
||||||
fn from(val: ArchiveFormat) -> Self {
|
|
||||||
match val {
|
|
||||||
ArchiveFormat::Zip => Self::Zip,
|
|
||||||
ArchiveFormat::TarGz => Self::TarGz,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Errors that can occur during binary download and installation.
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error("Failed to download from: {url}")]
|
|
||||||
Download {
|
|
||||||
url: Url,
|
|
||||||
#[source]
|
|
||||||
source: reqwest_middleware::Error,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[error("Failed to parse URL: {url}")]
|
|
||||||
UrlParse {
|
|
||||||
url: String,
|
|
||||||
#[source]
|
|
||||||
source: url::ParseError,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[error("Failed to extract archive")]
|
|
||||||
Extract {
|
|
||||||
#[source]
|
|
||||||
source: ExtractError,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[error("Binary not found in archive at expected location: {expected}")]
|
|
||||||
BinaryNotFound { expected: PathBuf },
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Io(#[from] std::io::Error),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
|
||||||
Cache(#[from] CacheError),
|
|
||||||
|
|
||||||
#[error("Failed to detect platform")]
|
|
||||||
Platform(#[from] uv_platform::Error),
|
|
||||||
|
|
||||||
#[error("Attempt failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
|
||||||
RetriedError {
|
|
||||||
#[source]
|
|
||||||
err: Box<Error>,
|
|
||||||
retries: u32,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Error {
|
|
||||||
/// Return the number of attempts that were made to complete this request before this error was
|
|
||||||
/// returned. Note that e.g. 3 retries equates to 4 attempts.
|
|
||||||
fn attempts(&self) -> u32 {
|
|
||||||
if let Self::RetriedError { retries, .. } = self {
|
|
||||||
return retries + 1;
|
|
||||||
}
|
|
||||||
1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Install the given binary.
|
|
||||||
pub async fn bin_install(
|
|
||||||
binary: Binary,
|
|
||||||
version: &Version,
|
|
||||||
client: &BaseClient,
|
|
||||||
retry_policy: &ExponentialBackoff,
|
|
||||||
cache: &Cache,
|
|
||||||
reporter: &dyn Reporter,
|
|
||||||
) -> Result<PathBuf, Error> {
|
|
||||||
let platform = Platform::from_env()?;
|
|
||||||
let platform_name = platform.as_cargo_dist_triple();
|
|
||||||
let cache_entry = CacheEntry::new(
|
|
||||||
cache
|
|
||||||
.bucket(CacheBucket::Binaries)
|
|
||||||
.join(binary.name())
|
|
||||||
.join(version.to_string())
|
|
||||||
.join(&platform_name),
|
|
||||||
binary.executable(),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Lock the directory to prevent racing installs
|
|
||||||
let _lock = cache_entry.with_file(".lock").lock().await?;
|
|
||||||
if cache_entry.path().exists() {
|
|
||||||
return Ok(cache_entry.into_path_buf());
|
|
||||||
}
|
|
||||||
|
|
||||||
let format = if platform.os.is_windows() {
|
|
||||||
ArchiveFormat::Zip
|
|
||||||
} else {
|
|
||||||
ArchiveFormat::TarGz
|
|
||||||
};
|
|
||||||
|
|
||||||
let download_url = binary.download_url(version, &platform_name, format)?;
|
|
||||||
|
|
||||||
let cache_dir = cache_entry.dir();
|
|
||||||
fs_err::tokio::create_dir_all(&cache_dir).await?;
|
|
||||||
|
|
||||||
let path = download_and_unpack_with_retry(
|
|
||||||
binary,
|
|
||||||
version,
|
|
||||||
client,
|
|
||||||
retry_policy,
|
|
||||||
cache,
|
|
||||||
reporter,
|
|
||||||
&platform_name,
|
|
||||||
format,
|
|
||||||
&download_url,
|
|
||||||
&cache_entry,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
// Add executable bit
|
|
||||||
#[cfg(unix)]
|
|
||||||
{
|
|
||||||
use std::fs::Permissions;
|
|
||||||
use std::os::unix::fs::PermissionsExt;
|
|
||||||
let permissions = fs_err::tokio::metadata(&path).await?.permissions();
|
|
||||||
if permissions.mode() & 0o111 != 0o111 {
|
|
||||||
fs_err::tokio::set_permissions(
|
|
||||||
&path,
|
|
||||||
Permissions::from_mode(permissions.mode() | 0o111),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Download and unpack a binary with retry on stream failures.
|
|
||||||
async fn download_and_unpack_with_retry(
|
|
||||||
binary: Binary,
|
|
||||||
version: &Version,
|
|
||||||
client: &BaseClient,
|
|
||||||
retry_policy: &ExponentialBackoff,
|
|
||||||
cache: &Cache,
|
|
||||||
reporter: &dyn Reporter,
|
|
||||||
platform_name: &str,
|
|
||||||
format: ArchiveFormat,
|
|
||||||
download_url: &Url,
|
|
||||||
cache_entry: &CacheEntry,
|
|
||||||
) -> Result<PathBuf, Error> {
|
|
||||||
let mut total_attempts = 0;
|
|
||||||
let mut retried_here = false;
|
|
||||||
let start_time = SystemTime::now();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let result = download_and_unpack(
|
|
||||||
binary,
|
|
||||||
version,
|
|
||||||
client,
|
|
||||||
cache,
|
|
||||||
reporter,
|
|
||||||
platform_name,
|
|
||||||
format,
|
|
||||||
download_url,
|
|
||||||
cache_entry,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let result = match result {
|
|
||||||
Ok(path) => Ok(path),
|
|
||||||
Err(err) => {
|
|
||||||
total_attempts += err.attempts();
|
|
||||||
let past_retries = total_attempts - 1;
|
|
||||||
|
|
||||||
if is_transient_network_error(&err) {
|
|
||||||
let retry_decision = retry_policy.should_retry(start_time, past_retries);
|
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
|
||||||
debug!(
|
|
||||||
"Transient failure while installing {} {}; retrying...",
|
|
||||||
binary.name(),
|
|
||||||
version
|
|
||||||
);
|
|
||||||
let duration = execute_after
|
|
||||||
.duration_since(SystemTime::now())
|
|
||||||
.unwrap_or_else(|_| Duration::default());
|
|
||||||
tokio::time::sleep(duration).await;
|
|
||||||
retried_here = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if retried_here {
|
|
||||||
Err(Error::RetriedError {
|
|
||||||
err: Box::new(err),
|
|
||||||
retries: past_retries,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
Err(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Download and unpackage a binary,
|
|
||||||
///
|
|
||||||
/// NOTE [`download_and_unpack_with_retry`] should be used instead.
|
|
||||||
async fn download_and_unpack(
|
|
||||||
binary: Binary,
|
|
||||||
version: &Version,
|
|
||||||
client: &BaseClient,
|
|
||||||
cache: &Cache,
|
|
||||||
reporter: &dyn Reporter,
|
|
||||||
platform_name: &str,
|
|
||||||
format: ArchiveFormat,
|
|
||||||
download_url: &Url,
|
|
||||||
cache_entry: &CacheEntry,
|
|
||||||
) -> Result<PathBuf, Error> {
|
|
||||||
// Create a temporary directory for extraction
|
|
||||||
let temp_dir = tempfile::tempdir_in(cache.bucket(CacheBucket::Binaries))?;
|
|
||||||
|
|
||||||
let response = client
|
|
||||||
.for_host(&DisplaySafeUrl::from_url(download_url.clone()))
|
|
||||||
.get(download_url.clone())
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.map_err(|err| Error::Download {
|
|
||||||
url: download_url.clone(),
|
|
||||||
source: err,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let inner_retries = response
|
|
||||||
.extensions()
|
|
||||||
.get::<reqwest_retry::RetryCount>()
|
|
||||||
.map(|retries| retries.value());
|
|
||||||
|
|
||||||
if let Err(status_error) = response.error_for_status_ref() {
|
|
||||||
let err = Error::Download {
|
|
||||||
url: download_url.clone(),
|
|
||||||
source: reqwest_middleware::Error::from(status_error),
|
|
||||||
};
|
|
||||||
if let Some(retries) = inner_retries {
|
|
||||||
return Err(Error::RetriedError {
|
|
||||||
err: Box::new(err),
|
|
||||||
retries,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return Err(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the download size from headers if available
|
|
||||||
let size = response
|
|
||||||
.headers()
|
|
||||||
.get(reqwest::header::CONTENT_LENGTH)
|
|
||||||
.and_then(|val| val.to_str().ok())
|
|
||||||
.and_then(|val| val.parse::<u64>().ok());
|
|
||||||
|
|
||||||
// Stream download directly to extraction
|
|
||||||
let reader = response
|
|
||||||
.bytes_stream()
|
|
||||||
.map_err(std::io::Error::other)
|
|
||||||
.into_async_read()
|
|
||||||
.compat();
|
|
||||||
|
|
||||||
let id = reporter.on_download_start(binary.name(), version, size);
|
|
||||||
let mut progress_reader = ProgressReader::new(reader, id, reporter);
|
|
||||||
stream::archive(&mut progress_reader, format.into(), temp_dir.path())
|
|
||||||
.await
|
|
||||||
.map_err(|e| Error::Extract { source: e })?;
|
|
||||||
reporter.on_download_complete(id);
|
|
||||||
|
|
||||||
// Find the binary in the extracted files
|
|
||||||
let extracted_binary = match format {
|
|
||||||
ArchiveFormat::Zip => {
|
|
||||||
// Windows ZIP archives contain the binary directly in the root
|
|
||||||
temp_dir.path().join(binary.executable())
|
|
||||||
}
|
|
||||||
ArchiveFormat::TarGz => {
|
|
||||||
// tar.gz archives contain the binary in a subdirectory
|
|
||||||
temp_dir
|
|
||||||
.path()
|
|
||||||
.join(format!("{}-{platform_name}", binary.name()))
|
|
||||||
.join(binary.executable())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if !extracted_binary.exists() {
|
|
||||||
return Err(Error::BinaryNotFound {
|
|
||||||
expected: extracted_binary,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Move the binary to its final location before the temp directory is dropped
|
|
||||||
fs_err::tokio::rename(&extracted_binary, cache_entry.path()).await?;
|
|
||||||
|
|
||||||
Ok(cache_entry.path().to_path_buf())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Progress reporter for binary downloads.
|
|
||||||
pub trait Reporter: Send + Sync {
|
|
||||||
/// Called when a download starts.
|
|
||||||
fn on_download_start(&self, name: &str, version: &Version, size: Option<u64>) -> usize;
|
|
||||||
/// Called when download progress is made.
|
|
||||||
fn on_download_progress(&self, id: usize, inc: u64);
|
|
||||||
/// Called when a download completes.
|
|
||||||
fn on_download_complete(&self, id: usize);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An asynchronous reader that reports progress as bytes are read.
|
|
||||||
struct ProgressReader<'a, R> {
|
|
||||||
reader: R,
|
|
||||||
index: usize,
|
|
||||||
reporter: &'a dyn Reporter,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, R> ProgressReader<'a, R> {
|
|
||||||
/// Create a new [`ProgressReader`] that wraps another reader.
|
|
||||||
fn new(reader: R, index: usize, reporter: &'a dyn Reporter) -> Self {
|
|
||||||
Self {
|
|
||||||
reader,
|
|
||||||
index,
|
|
||||||
reporter,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<R> AsyncRead for ProgressReader<'_, R>
|
|
||||||
where
|
|
||||||
R: AsyncRead + Unpin,
|
|
||||||
{
|
|
||||||
fn poll_read(
|
|
||||||
mut self: Pin<&mut Self>,
|
|
||||||
cx: &mut Context<'_>,
|
|
||||||
buf: &mut ReadBuf<'_>,
|
|
||||||
) -> Poll<std::io::Result<()>> {
|
|
||||||
Pin::new(&mut self.as_mut().reader)
|
|
||||||
.poll_read(cx, buf)
|
|
||||||
.map_ok(|()| {
|
|
||||||
self.reporter
|
|
||||||
.on_download_progress(self.index, buf.filled().len() as u64);
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-build-backend"
|
name = "uv-build-backend"
|
||||||
version = "0.0.8"
|
version = "0.1.0"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -26,7 +26,6 @@ uv-pypi-types = { workspace = true }
|
||||||
uv-version = { workspace = true }
|
uv-version = { workspace = true }
|
||||||
uv-warnings = { workspace = true }
|
uv-warnings = { workspace = true }
|
||||||
|
|
||||||
base64 = { workspace = true }
|
|
||||||
csv = { workspace = true }
|
csv = { workspace = true }
|
||||||
flate2 = { workspace = true, default-features = false }
|
flate2 = { workspace = true, default-features = false }
|
||||||
fs-err = { workspace = true }
|
fs-err = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-build-backend
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-backend).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,4 +1,3 @@
|
||||||
use itertools::Itertools;
|
|
||||||
mod metadata;
|
mod metadata;
|
||||||
mod serde_verbatim;
|
mod serde_verbatim;
|
||||||
mod settings;
|
mod settings;
|
||||||
|
|
@ -8,11 +7,8 @@ mod wheel;
|
||||||
pub use metadata::{PyProjectToml, check_direct_build};
|
pub use metadata::{PyProjectToml, check_direct_build};
|
||||||
pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
||||||
pub use source_dist::{build_source_dist, list_source_dist};
|
pub use source_dist::{build_source_dist, list_source_dist};
|
||||||
use uv_warnings::warn_user_once;
|
|
||||||
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
||||||
|
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::ffi::OsStr;
|
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
@ -32,20 +28,20 @@ use crate::settings::ModuleName;
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Io(#[from] io::Error),
|
Io(#[from] io::Error),
|
||||||
#[error("Invalid metadata format in: {}", _0.user_display())]
|
#[error("Invalid pyproject.toml")]
|
||||||
Toml(PathBuf, #[source] toml::de::Error),
|
Toml(#[from] toml::de::Error),
|
||||||
#[error("Invalid project metadata")]
|
#[error("Invalid pyproject.toml")]
|
||||||
Validation(#[from] ValidationError),
|
Validation(#[from] ValidationError),
|
||||||
#[error("Invalid module name: {0}")]
|
#[error("Invalid module name: {0}")]
|
||||||
InvalidModuleName(String, #[source] IdentifierParseError),
|
InvalidModuleName(String, #[source] IdentifierParseError),
|
||||||
#[error("Unsupported glob expression in: {field}")]
|
#[error("Unsupported glob expression in: `{field}`")]
|
||||||
PortableGlob {
|
PortableGlob {
|
||||||
field: String,
|
field: String,
|
||||||
#[source]
|
#[source]
|
||||||
source: PortableGlobError,
|
source: PortableGlobError,
|
||||||
},
|
},
|
||||||
/// <https://github.com/BurntSushi/ripgrep/discussions/2927>
|
/// <https://github.com/BurntSushi/ripgrep/discussions/2927>
|
||||||
#[error("Glob expressions caused to large regex in: {field}")]
|
#[error("Glob expressions caused to large regex in: `{field}`")]
|
||||||
GlobSetTooLarge {
|
GlobSetTooLarge {
|
||||||
field: String,
|
field: String,
|
||||||
#[source]
|
#[source]
|
||||||
|
|
@ -53,7 +49,7 @@ pub enum Error {
|
||||||
},
|
},
|
||||||
#[error("`pyproject.toml` must not be excluded from source distribution build")]
|
#[error("`pyproject.toml` must not be excluded from source distribution build")]
|
||||||
PyprojectTomlExcluded,
|
PyprojectTomlExcluded,
|
||||||
#[error("Failed to walk source tree: {}", root.user_display())]
|
#[error("Failed to walk source tree: `{}`", root.user_display())]
|
||||||
WalkDir {
|
WalkDir {
|
||||||
root: PathBuf,
|
root: PathBuf,
|
||||||
#[source]
|
#[source]
|
||||||
|
|
@ -63,19 +59,14 @@ pub enum Error {
|
||||||
Zip(#[from] zip::result::ZipError),
|
Zip(#[from] zip::result::ZipError),
|
||||||
#[error("Failed to write RECORD file")]
|
#[error("Failed to write RECORD file")]
|
||||||
Csv(#[from] csv::Error),
|
Csv(#[from] csv::Error),
|
||||||
#[error("Expected a Python module at: {}", _0.user_display())]
|
#[error("Expected a Python module at: `{}`", _0.user_display())]
|
||||||
MissingInitPy(PathBuf),
|
MissingInitPy(PathBuf),
|
||||||
#[error("For namespace packages, `__init__.py[i]` is not allowed in parent directory: {}", _0.user_display())]
|
#[error("For namespace packages, `__init__.py[i]` is not allowed in parent directory: `{}`", _0.user_display())]
|
||||||
NotANamespace(PathBuf),
|
NotANamespace(PathBuf),
|
||||||
/// Either an absolute path or a parent path through `..`.
|
/// Either an absolute path or a parent path through `..`.
|
||||||
#[error("Module root must be inside the project: {}", _0.user_display())]
|
#[error("Module root must be inside the project: `{}`", _0.user_display())]
|
||||||
InvalidModuleRoot(PathBuf),
|
InvalidModuleRoot(PathBuf),
|
||||||
/// Either an absolute path or a parent path through `..`.
|
#[error("Inconsistent metadata between prepare and build step: `{0}`")]
|
||||||
#[error("The path for the data directory {} must be inside the project: {}", name, path.user_display())]
|
|
||||||
InvalidDataRoot { name: String, path: PathBuf },
|
|
||||||
#[error("Virtual environments must not be added to source distributions or wheels, remove the directory or exclude it from the build: {}", _0.user_display())]
|
|
||||||
VenvInSourceTree(PathBuf),
|
|
||||||
#[error("Inconsistent metadata between prepare and build step: {0}")]
|
|
||||||
InconsistentSteps(&'static str),
|
InconsistentSteps(&'static str),
|
||||||
#[error("Failed to write to {}", _0.user_display())]
|
#[error("Failed to write to {}", _0.user_display())]
|
||||||
TarWrite(PathBuf, #[source] io::Error),
|
TarWrite(PathBuf, #[source] io::Error),
|
||||||
|
|
@ -194,60 +185,6 @@ fn check_metadata_directory(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the list of module names without names which would be included twice
|
|
||||||
///
|
|
||||||
/// In normal cases it should do nothing:
|
|
||||||
///
|
|
||||||
/// * `["aaa"] -> ["aaa"]`
|
|
||||||
/// * `["aaa", "bbb"] -> ["aaa", "bbb"]`
|
|
||||||
///
|
|
||||||
/// Duplicate elements are removed:
|
|
||||||
///
|
|
||||||
/// * `["aaa", "aaa"] -> ["aaa"]`
|
|
||||||
/// * `["bbb", "aaa", "bbb"] -> ["aaa", "bbb"]`
|
|
||||||
///
|
|
||||||
/// Names with more specific paths are removed in favour of more general paths:
|
|
||||||
///
|
|
||||||
/// * `["aaa.foo", "aaa"] -> ["aaa"]`
|
|
||||||
/// * `["bbb", "aaa", "bbb.foo", "ccc.foo", "ccc.foo.bar", "aaa"] -> ["aaa", "bbb.foo", "ccc.foo"]`
|
|
||||||
///
|
|
||||||
/// This does not preserve the order of the elements.
|
|
||||||
fn prune_redundant_modules(mut names: Vec<String>) -> Vec<String> {
|
|
||||||
names.sort();
|
|
||||||
let mut pruned = Vec::with_capacity(names.len());
|
|
||||||
for name in names {
|
|
||||||
if let Some(last) = pruned.last() {
|
|
||||||
if name == *last {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// This is a more specific (narrow) module name than what came before
|
|
||||||
if name
|
|
||||||
.strip_prefix(last)
|
|
||||||
.is_some_and(|suffix| suffix.starts_with('.'))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pruned.push(name);
|
|
||||||
}
|
|
||||||
pruned
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Wraps [`prune_redundant_modules`] with a conditional warning when modules are ignored
|
|
||||||
fn prune_redundant_modules_warn(names: &[String], show_warnings: bool) -> Vec<String> {
|
|
||||||
let pruned = prune_redundant_modules(names.to_vec());
|
|
||||||
if show_warnings && names.len() != pruned.len() {
|
|
||||||
let mut pruned: HashSet<_> = pruned.iter().collect();
|
|
||||||
let ignored: Vec<_> = names.iter().filter(|name| !pruned.remove(name)).collect();
|
|
||||||
let s = if ignored.len() == 1 { "" } else { "s" };
|
|
||||||
warn_user_once!(
|
|
||||||
"Ignoring redundant module name{s} in `tool.uv.build-backend.module-name`: `{}`",
|
|
||||||
ignored.into_iter().join("`, `")
|
|
||||||
);
|
|
||||||
}
|
|
||||||
pruned
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
||||||
/// checking the project layout and names.
|
/// checking the project layout and names.
|
||||||
///
|
///
|
||||||
|
|
@ -270,13 +207,10 @@ fn find_roots(
|
||||||
relative_module_root: &Path,
|
relative_module_root: &Path,
|
||||||
module_name: Option<&ModuleName>,
|
module_name: Option<&ModuleName>,
|
||||||
namespace: bool,
|
namespace: bool,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
||||||
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
||||||
// Check that even if a path contains `..`, we only include files below the module root.
|
let src_root = source_tree.join(&relative_module_root);
|
||||||
if !uv_fs::normalize_path(&source_tree.join(&relative_module_root))
|
if !src_root.starts_with(source_tree) {
|
||||||
.starts_with(uv_fs::normalize_path(source_tree))
|
|
||||||
{
|
|
||||||
return Err(Error::InvalidModuleRoot(relative_module_root.to_path_buf()));
|
return Err(Error::InvalidModuleRoot(relative_module_root.to_path_buf()));
|
||||||
}
|
}
|
||||||
let src_root = source_tree.join(&relative_module_root);
|
let src_root = source_tree.join(&relative_module_root);
|
||||||
|
|
@ -289,8 +223,8 @@ fn find_roots(
|
||||||
ModuleName::Name(name) => {
|
ModuleName::Name(name) => {
|
||||||
vec![name.split('.').collect::<PathBuf>()]
|
vec![name.split('.').collect::<PathBuf>()]
|
||||||
}
|
}
|
||||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
ModuleName::Names(names) => names
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|name| name.split('.').collect::<PathBuf>())
|
.map(|name| name.split('.').collect::<PathBuf>())
|
||||||
.collect(),
|
.collect(),
|
||||||
}
|
}
|
||||||
|
|
@ -308,9 +242,9 @@ fn find_roots(
|
||||||
let modules_relative = if let Some(module_name) = module_name {
|
let modules_relative = if let Some(module_name) = module_name {
|
||||||
match module_name {
|
match module_name {
|
||||||
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
||||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
ModuleName::Names(names) => names
|
||||||
.into_iter()
|
.iter()
|
||||||
.map(|name| module_path_from_module_name(&src_root, &name))
|
.map(|name| module_path_from_module_name(&src_root, name))
|
||||||
.collect::<Result<_, _>>()?,
|
.collect::<Result<_, _>>()?,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -413,27 +347,6 @@ fn module_path_from_module_name(src_root: &Path, module_name: &str) -> Result<Pa
|
||||||
Ok(module_relative)
|
Ok(module_relative)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Error if we're adding a venv to a distribution.
|
|
||||||
pub(crate) fn error_on_venv(file_name: &OsStr, path: &Path) -> Result<(), Error> {
|
|
||||||
// On 64-bit Unix, `lib64` is a (compatibility) symlink to lib. If we traverse `lib64` before
|
|
||||||
// `pyvenv.cfg`, we show a generic error for symlink directories instead.
|
|
||||||
if !(file_name == "pyvenv.cfg" || file_name == "lib64") {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(parent) = path.parent() else {
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
|
|
||||||
if parent.join("bin").join("python").is_symlink()
|
|
||||||
|| parent.join("Scripts").join("python.exe").is_file()
|
|
||||||
{
|
|
||||||
return Err(Error::VenvInSourceTree(parent.to_path_buf()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
@ -478,20 +391,19 @@ mod tests {
|
||||||
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
||||||
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
||||||
// latest and remove it since it has the same filename as the indirect wheel.
|
// latest and remove it since it has the same filename as the indirect wheel.
|
||||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||||
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION, false)?;
|
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION)?;
|
||||||
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
||||||
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
||||||
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
||||||
fs_err::remove_file(&direct_wheel_path)?;
|
fs_err::remove_file(&direct_wheel_path)?;
|
||||||
|
|
||||||
// Build a source distribution.
|
// Build a source distribution.
|
||||||
let (_name, source_dist_list_files) =
|
let (_name, source_dist_list_files) = list_source_dist(source_root, MOCK_UV_VERSION)?;
|
||||||
list_source_dist(source_root, MOCK_UV_VERSION, false)?;
|
|
||||||
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
||||||
// normalize the path.
|
// normalize the path.
|
||||||
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION)?;
|
||||||
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION, false)?;
|
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION)?;
|
||||||
let source_dist_path = dist.join(source_dist_filename.to_string());
|
let source_dist_path = dist.join(source_dist_filename.to_string());
|
||||||
let source_dist_contents = sdist_contents(&source_dist_path);
|
let source_dist_contents = sdist_contents(&source_dist_path);
|
||||||
|
|
||||||
|
|
@ -505,13 +417,7 @@ mod tests {
|
||||||
source_dist_filename.name.as_dist_info_name(),
|
source_dist_filename.name.as_dist_info_name(),
|
||||||
source_dist_filename.version
|
source_dist_filename.version
|
||||||
));
|
));
|
||||||
let wheel_filename = build_wheel(
|
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, MOCK_UV_VERSION)?;
|
||||||
&sdist_top_level_directory,
|
|
||||||
dist,
|
|
||||||
None,
|
|
||||||
MOCK_UV_VERSION,
|
|
||||||
false,
|
|
||||||
)?;
|
|
||||||
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
||||||
|
|
||||||
// Check that direct and indirect wheels are identical.
|
// Check that direct and indirect wheels are identical.
|
||||||
|
|
@ -599,7 +505,7 @@ mod tests {
|
||||||
/// platform-independent deterministic builds.
|
/// platform-independent deterministic builds.
|
||||||
#[test]
|
#[test]
|
||||||
fn built_by_uv_building() {
|
fn built_by_uv_building() {
|
||||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||||
let src = TempDir::new().unwrap();
|
let src = TempDir::new().unwrap();
|
||||||
for dir in [
|
for dir in [
|
||||||
"src",
|
"src",
|
||||||
|
|
@ -662,7 +568,7 @@ mod tests {
|
||||||
// Check that the source dist is reproducible across platforms.
|
// Check that the source dist is reproducible across platforms.
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
||||||
@"bb74bff575b135bb39e5c9bce56349441fb0923bb8857e32a5eaf34ec1843967"
|
@"871d1f859140721b67cbeaca074e7a2740c88c38028d0509eba87d1285f1da9e"
|
||||||
);
|
);
|
||||||
// Check both the files we report and the actual files
|
// Check both the files we report and the actual files
|
||||||
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
||||||
|
|
@ -716,7 +622,7 @@ mod tests {
|
||||||
// Check that the wheel is reproducible across platforms.
|
// Check that the wheel is reproducible across platforms.
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&wheel_path).unwrap())),
|
format!("{:x}", sha2::Sha256::digest(fs_err::read(&wheel_path).unwrap())),
|
||||||
@"319afb04e87caf894b1362b508ec745253c6d241423ea59021694d2015e821da"
|
@"342bf60c8406144f459358cde92408686c1631fe22389d042ce80379e589d6ec"
|
||||||
);
|
);
|
||||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
||||||
built_by_uv-0.1.0.data/data/
|
built_by_uv-0.1.0.data/data/
|
||||||
|
|
@ -759,31 +665,6 @@ mod tests {
|
||||||
built_by_uv-0.1.0.dist-info/entry_points.txt (generated)
|
built_by_uv-0.1.0.dist-info/entry_points.txt (generated)
|
||||||
built_by_uv-0.1.0.dist-info/METADATA (generated)
|
built_by_uv-0.1.0.dist-info/METADATA (generated)
|
||||||
");
|
");
|
||||||
|
|
||||||
let mut wheel = zip::ZipArchive::new(File::open(wheel_path).unwrap()).unwrap();
|
|
||||||
let mut record = String::new();
|
|
||||||
wheel
|
|
||||||
.by_name("built_by_uv-0.1.0.dist-info/RECORD")
|
|
||||||
.unwrap()
|
|
||||||
.read_to_string(&mut record)
|
|
||||||
.unwrap();
|
|
||||||
assert_snapshot!(record, @r###"
|
|
||||||
built_by_uv/__init__.py,sha256=AJ7XpTNWxYktP97ydb81UpnNqoebH7K4sHRakAMQKG4,44
|
|
||||||
built_by_uv/arithmetic/__init__.py,sha256=x2agwFbJAafc9Z6TdJ0K6b6bLMApQdvRSQjP4iy7IEI,67
|
|
||||||
built_by_uv/arithmetic/circle.py,sha256=FYZkv6KwrF9nJcwGOKigjke1dm1Fkie7qW1lWJoh3AE,287
|
|
||||||
built_by_uv/arithmetic/pi.txt,sha256=-4HqoLoIrSKGf0JdTrM8BTTiIz8rq-MSCDL6LeF0iuU,8
|
|
||||||
built_by_uv/cli.py,sha256=Jcm3PxSb8wTAN3dGm5vKEDQwCgoUXkoeggZeF34QyKM,44
|
|
||||||
built_by_uv-0.1.0.dist-info/licenses/LICENSE-APACHE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
|
||||||
built_by_uv-0.1.0.dist-info/licenses/LICENSE-MIT,sha256=F5Z0Cpu8QWyblXwXhrSo0b9WmYXQxd1LwLjVLJZwbiI,1077
|
|
||||||
built_by_uv-0.1.0.dist-info/licenses/third-party-licenses/PEP-401.txt,sha256=KN-KAx829G2saLjVmByc08RFFtIDWvHulqPyD0qEBZI,270
|
|
||||||
built_by_uv-0.1.0.data/headers/built_by_uv.h,sha256=p5-HBunJ1dY-xd4dMn03PnRClmGyRosScIp8rT46kg4,144
|
|
||||||
built_by_uv-0.1.0.data/scripts/whoami.sh,sha256=T2cmhuDFuX-dTkiSkuAmNyIzvv8AKopjnuTCcr9o-eE,20
|
|
||||||
built_by_uv-0.1.0.data/data/data.csv,sha256=7z7u-wXu7Qr2eBZFVpBILlNUiGSngv_1vYqZHVWOU94,265
|
|
||||||
built_by_uv-0.1.0.dist-info/WHEEL,sha256=PaG_oOj9G2zCRqoLK0SjWBVZbGAMtIXDmm-MEGw9Wo0,83
|
|
||||||
built_by_uv-0.1.0.dist-info/entry_points.txt,sha256=-IO6yaq6x6HSl-zWH96rZmgYvfyHlH00L5WQoCpz-YI,50
|
|
||||||
built_by_uv-0.1.0.dist-info/METADATA,sha256=m6EkVvKrGmqx43b_VR45LHD37IZxPYC0NI6Qx9_UXLE,474
|
|
||||||
built_by_uv-0.1.0.dist-info/RECORD,,
|
|
||||||
"###);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test that `license = { file = "LICENSE" }` is supported.
|
/// Test that `license = { file = "LICENSE" }` is supported.
|
||||||
|
|
@ -821,7 +702,7 @@ mod tests {
|
||||||
|
|
||||||
// Build a wheel from a source distribution
|
// Build a wheel from a source distribution
|
||||||
let output_dir = TempDir::new().unwrap();
|
let output_dir = TempDir::new().unwrap();
|
||||||
build_source_dist(src.path(), output_dir.path(), "0.5.15", false).unwrap();
|
build_source_dist(src.path(), output_dir.path(), "0.5.15").unwrap();
|
||||||
let sdist_tree = TempDir::new().unwrap();
|
let sdist_tree = TempDir::new().unwrap();
|
||||||
let source_dist_path = output_dir.path().join("pep_pep639_license-1.0.0.tar.gz");
|
let source_dist_path = output_dir.path().join("pep_pep639_license-1.0.0.tar.gz");
|
||||||
let sdist_reader = BufReader::new(File::open(&source_dist_path).unwrap());
|
let sdist_reader = BufReader::new(File::open(&source_dist_path).unwrap());
|
||||||
|
|
@ -832,7 +713,6 @@ mod tests {
|
||||||
output_dir.path(),
|
output_dir.path(),
|
||||||
None,
|
None,
|
||||||
"0.5.15",
|
"0.5.15",
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let wheel = output_dir
|
let wheel = output_dir
|
||||||
|
|
@ -897,7 +777,6 @@ mod tests {
|
||||||
output_dir.path(),
|
output_dir.path(),
|
||||||
Some(&metadata_dir.path().join(&dist_info_dir)),
|
Some(&metadata_dir.path().join(&dist_info_dir)),
|
||||||
"0.5.15",
|
"0.5.15",
|
||||||
false,
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let wheel = output_dir
|
let wheel = output_dir
|
||||||
|
|
@ -1036,7 +915,7 @@ mod tests {
|
||||||
.replace('\\', "/");
|
.replace('\\', "/");
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
err_message,
|
err_message,
|
||||||
@"Expected a Python module at: [TEMP_PATH]/src/camel_case/__init__.py"
|
@"Expected a Python module at: `[TEMP_PATH]/src/camel_case/__init__.py`"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1101,7 +980,7 @@ mod tests {
|
||||||
.replace('\\', "/");
|
.replace('\\', "/");
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
err_message,
|
err_message,
|
||||||
@"Expected a Python module at: [TEMP_PATH]/src/stuffed_bird-stubs/__init__.pyi"
|
@"Expected a Python module at: `[TEMP_PATH]/src/stuffed_bird-stubs/__init__.pyi`"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create the correct file
|
// Create the correct file
|
||||||
|
|
@ -1167,7 +1046,7 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
build_err(src.path()),
|
build_err(src.path()),
|
||||||
@"Expected a Python module at: [TEMP_PATH]/src/simple_namespace/part/__init__.py"
|
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part/__init__.py`"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create the correct file
|
// Create the correct file
|
||||||
|
|
@ -1189,7 +1068,7 @@ mod tests {
|
||||||
File::create(&bogus_init_py).unwrap();
|
File::create(&bogus_init_py).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
build_err(src.path()),
|
build_err(src.path()),
|
||||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: [TEMP_PATH]/src/simple_namespace"
|
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||||
);
|
);
|
||||||
fs_err::remove_file(bogus_init_py).unwrap();
|
fs_err::remove_file(bogus_init_py).unwrap();
|
||||||
|
|
||||||
|
|
@ -1409,7 +1288,7 @@ mod tests {
|
||||||
// The first module is missing an `__init__.py`.
|
// The first module is missing an `__init__.py`.
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
build_err(src.path()),
|
build_err(src.path()),
|
||||||
@"Expected a Python module at: [TEMP_PATH]/src/foo/__init__.py"
|
@"Expected a Python module at: `[TEMP_PATH]/src/foo/__init__.py`"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create the first correct `__init__.py` file
|
// Create the first correct `__init__.py` file
|
||||||
|
|
@ -1418,7 +1297,7 @@ mod tests {
|
||||||
// The second module, a namespace, is missing an `__init__.py`.
|
// The second module, a namespace, is missing an `__init__.py`.
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
build_err(src.path()),
|
build_err(src.path()),
|
||||||
@"Expected a Python module at: [TEMP_PATH]/src/simple_namespace/part_a/__init__.py"
|
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part_a/__init__.py`"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create the other two correct `__init__.py` files
|
// Create the other two correct `__init__.py` files
|
||||||
|
|
@ -1448,7 +1327,7 @@ mod tests {
|
||||||
File::create(&bogus_init_py).unwrap();
|
File::create(&bogus_init_py).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
build_err(src.path()),
|
build_err(src.path()),
|
||||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: [TEMP_PATH]/src/simple_namespace"
|
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||||
);
|
);
|
||||||
fs_err::remove_file(bogus_init_py).unwrap();
|
fs_err::remove_file(bogus_init_py).unwrap();
|
||||||
|
|
||||||
|
|
@ -1481,114 +1360,4 @@ mod tests {
|
||||||
simple_namespace_part-1.0.0.dist-info/WHEEL
|
simple_namespace_part-1.0.0.dist-info/WHEEL
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `prune_redundant_modules` should remove modules which are already
|
|
||||||
/// included (either directly or via their parent)
|
|
||||||
#[test]
|
|
||||||
fn test_prune_redundant_modules() {
|
|
||||||
fn check(input: &[&str], expect: &[&str]) {
|
|
||||||
let input = input.iter().map(|s| (*s).to_string()).collect();
|
|
||||||
let expect: Vec<_> = expect.iter().map(|s| (*s).to_string()).collect();
|
|
||||||
assert_eq!(prune_redundant_modules(input), expect);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Basic cases
|
|
||||||
check(&[], &[]);
|
|
||||||
check(&["foo"], &["foo"]);
|
|
||||||
check(&["foo", "bar"], &["bar", "foo"]);
|
|
||||||
|
|
||||||
// Deshadowing
|
|
||||||
check(&["foo", "foo.bar"], &["foo"]);
|
|
||||||
check(&["foo.bar", "foo"], &["foo"]);
|
|
||||||
check(
|
|
||||||
&["foo.bar.a", "foo.bar.b", "foo.bar", "foo", "foo.bar.a.c"],
|
|
||||||
&["foo"],
|
|
||||||
);
|
|
||||||
check(
|
|
||||||
&["bar.one", "bar.two", "baz", "bar", "baz.one"],
|
|
||||||
&["bar", "baz"],
|
|
||||||
);
|
|
||||||
|
|
||||||
// Potential false positives
|
|
||||||
check(&["foo", "foobar"], &["foo", "foobar"]);
|
|
||||||
check(
|
|
||||||
&["foo", "foobar", "foo.bar", "foobar.baz"],
|
|
||||||
&["foo", "foobar"],
|
|
||||||
);
|
|
||||||
check(&["foo.bar", "foo.baz"], &["foo.bar", "foo.baz"]);
|
|
||||||
check(&["foo", "foo", "foo.bar", "foo.bar"], &["foo"]);
|
|
||||||
|
|
||||||
// Everything
|
|
||||||
check(
|
|
||||||
&[
|
|
||||||
"foo.inner",
|
|
||||||
"foo.inner.deeper",
|
|
||||||
"foo",
|
|
||||||
"bar",
|
|
||||||
"bar.sub",
|
|
||||||
"bar.sub.deep",
|
|
||||||
"foobar",
|
|
||||||
"baz.baz.bar",
|
|
||||||
"baz.baz",
|
|
||||||
"qux",
|
|
||||||
],
|
|
||||||
&["bar", "baz.baz", "foo", "foobar", "qux"],
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A package with duplicate module names.
|
|
||||||
#[test]
|
|
||||||
fn duplicate_module_names() {
|
|
||||||
let src = TempDir::new().unwrap();
|
|
||||||
let pyproject_toml = indoc! {r#"
|
|
||||||
[project]
|
|
||||||
name = "duplicate"
|
|
||||||
version = "1.0.0"
|
|
||||||
|
|
||||||
[tool.uv.build-backend]
|
|
||||||
module-name = ["foo", "foo", "bar.baz", "bar.baz.submodule"]
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
|
||||||
build-backend = "uv_build"
|
|
||||||
"#
|
|
||||||
};
|
|
||||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
|
||||||
fs_err::create_dir_all(src.path().join("src").join("foo")).unwrap();
|
|
||||||
File::create(src.path().join("src").join("foo").join("__init__.py")).unwrap();
|
|
||||||
fs_err::create_dir_all(src.path().join("src").join("bar").join("baz")).unwrap();
|
|
||||||
File::create(
|
|
||||||
src.path()
|
|
||||||
.join("src")
|
|
||||||
.join("bar")
|
|
||||||
.join("baz")
|
|
||||||
.join("__init__.py"),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let dist = TempDir::new().unwrap();
|
|
||||||
let build = build(src.path(), dist.path()).unwrap();
|
|
||||||
assert_snapshot!(build.source_dist_contents.join("\n"), @r"
|
|
||||||
duplicate-1.0.0/
|
|
||||||
duplicate-1.0.0/PKG-INFO
|
|
||||||
duplicate-1.0.0/pyproject.toml
|
|
||||||
duplicate-1.0.0/src
|
|
||||||
duplicate-1.0.0/src/bar
|
|
||||||
duplicate-1.0.0/src/bar/baz
|
|
||||||
duplicate-1.0.0/src/bar/baz/__init__.py
|
|
||||||
duplicate-1.0.0/src/foo
|
|
||||||
duplicate-1.0.0/src/foo/__init__.py
|
|
||||||
");
|
|
||||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
|
||||||
bar/
|
|
||||||
bar/baz/
|
|
||||||
bar/baz/__init__.py
|
|
||||||
duplicate-1.0.0.dist-info/
|
|
||||||
duplicate-1.0.0.dist-info/METADATA
|
|
||||||
duplicate-1.0.0.dist-info/RECORD
|
|
||||||
duplicate-1.0.0.dist-info/WHEEL
|
|
||||||
foo/
|
|
||||||
foo/__init__.py
|
|
||||||
");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,10 +3,10 @@ use std::ffi::OsStr;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::{self, FromStr};
|
use std::str::FromStr;
|
||||||
|
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::Deserialize;
|
||||||
use tracing::{debug, trace, warn};
|
use tracing::{debug, trace, warn};
|
||||||
use version_ranges::Ranges;
|
use version_ranges::Ranges;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
@ -21,7 +21,7 @@ use uv_pep508::{
|
||||||
use uv_pypi_types::{Metadata23, VerbatimParsedUrl};
|
use uv_pypi_types::{Metadata23, VerbatimParsedUrl};
|
||||||
|
|
||||||
use crate::serde_verbatim::SerdeVerbatim;
|
use crate::serde_verbatim::SerdeVerbatim;
|
||||||
use crate::{BuildBackendSettings, Error, error_on_venv};
|
use crate::{BuildBackendSettings, Error};
|
||||||
|
|
||||||
/// By default, we ignore generated python files.
|
/// By default, we ignore generated python files.
|
||||||
pub(crate) const DEFAULT_EXCLUDES: &[&str] = &["__pycache__", "*.pyc", "*.pyo"];
|
pub(crate) const DEFAULT_EXCLUDES: &[&str] = &["__pycache__", "*.pyc", "*.pyo"];
|
||||||
|
|
@ -40,7 +40,7 @@ pub enum ValidationError {
|
||||||
UnknownExtension(String),
|
UnknownExtension(String),
|
||||||
#[error("Can't infer content type because `{}` does not have an extension. Please use a support extension (`.md`, `.rst`, `.txt`) or set the content type manually.", _0.user_display())]
|
#[error("Can't infer content type because `{}` does not have an extension. Please use a support extension (`.md`, `.rst`, `.txt`) or set the content type manually.", _0.user_display())]
|
||||||
MissingExtension(PathBuf),
|
MissingExtension(PathBuf),
|
||||||
#[error("Unsupported content type: {0}")]
|
#[error("Unsupported content type: `{0}`")]
|
||||||
UnsupportedContentType(String),
|
UnsupportedContentType(String),
|
||||||
#[error("`project.description` must be a single line")]
|
#[error("`project.description` must be a single line")]
|
||||||
DescriptionNewlines,
|
DescriptionNewlines,
|
||||||
|
|
@ -51,29 +51,19 @@ pub enum ValidationError {
|
||||||
)]
|
)]
|
||||||
MixedLicenseGenerations,
|
MixedLicenseGenerations,
|
||||||
#[error(
|
#[error(
|
||||||
"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: {0}"
|
"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `{0}`"
|
||||||
)]
|
)]
|
||||||
InvalidGroup(String),
|
InvalidGroup(String),
|
||||||
#[error("Use `project.scripts` instead of `project.entry-points.console_scripts`")]
|
#[error("Use `project.scripts` instead of `project.entry-points.console_scripts`")]
|
||||||
ReservedScripts,
|
ReservedScripts,
|
||||||
#[error("Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`")]
|
#[error("Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`")]
|
||||||
ReservedGuiScripts,
|
ReservedGuiScripts,
|
||||||
#[error("`project.license` is not a valid SPDX expression: {0}")]
|
#[error("`project.license` is not a valid SPDX expression: `{0}`")]
|
||||||
InvalidSpdx(String, #[source] spdx::error::ParseError),
|
InvalidSpdx(String, #[source] spdx::error::ParseError),
|
||||||
#[error("`{field}` glob `{glob}` did not match any files")]
|
|
||||||
LicenseGlobNoMatches { field: String, glob: String },
|
|
||||||
#[error("License file `{}` must be UTF-8 encoded", _0)]
|
|
||||||
LicenseFileNotUtf8(String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Check if the build backend is matching the currently running uv version.
|
/// Check if the build backend is matching the currently running uv version.
|
||||||
pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
||||||
#[derive(Deserialize)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
struct PyProjectToml {
|
|
||||||
build_system: BuildSystem,
|
|
||||||
}
|
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml =
|
let pyproject_toml: PyProjectToml =
|
||||||
match fs_err::read_to_string(source_tree.join("pyproject.toml"))
|
match fs_err::read_to_string(source_tree.join("pyproject.toml"))
|
||||||
.map_err(|err| err.to_string())
|
.map_err(|err| err.to_string())
|
||||||
|
|
@ -83,14 +73,12 @@ pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
||||||
Ok(pyproject_toml) => pyproject_toml,
|
Ok(pyproject_toml) => pyproject_toml,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
debug!(
|
debug!(
|
||||||
"Not using uv build backend direct build for source tree `{name}`, \
|
"Not using uv build backend direct build of {name}, no pyproject.toml: {err}"
|
||||||
failed to parse pyproject.toml: {err}"
|
|
||||||
);
|
);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match pyproject_toml
|
match pyproject_toml
|
||||||
.build_system
|
|
||||||
.check_build_system(uv_version::version())
|
.check_build_system(uv_version::version())
|
||||||
.as_slice()
|
.as_slice()
|
||||||
{
|
{
|
||||||
|
|
@ -99,36 +87,16 @@ pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
||||||
// Any warning -> no match
|
// Any warning -> no match
|
||||||
[first, others @ ..] => {
|
[first, others @ ..] => {
|
||||||
debug!(
|
debug!(
|
||||||
"Not using uv build backend direct build of `{name}`, pyproject.toml does not match: {first}"
|
"Not using uv build backend direct build of {name}, pyproject.toml does not match: {first}"
|
||||||
);
|
);
|
||||||
for other in others {
|
for other in others {
|
||||||
trace!("Further uv build backend direct build of `{name}` mismatch: {other}");
|
trace!("Further uv build backend direct build of {name} mismatch: {other}");
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A package name as provided in a `pyproject.toml`.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
struct VerbatimPackageName {
|
|
||||||
/// The package name as given in the `pyproject.toml`.
|
|
||||||
given: String,
|
|
||||||
/// The normalized package name.
|
|
||||||
normalized: PackageName,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for VerbatimPackageName {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: Deserializer<'de>,
|
|
||||||
{
|
|
||||||
let given = String::deserialize(deserializer)?;
|
|
||||||
let normalized = PackageName::from_str(&given).map_err(serde::de::Error::custom)?;
|
|
||||||
Ok(Self { given, normalized })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A `pyproject.toml` as specified in PEP 517.
|
/// A `pyproject.toml` as specified in PEP 517.
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
#[serde(
|
#[serde(
|
||||||
|
|
@ -147,18 +115,15 @@ pub struct PyProjectToml {
|
||||||
|
|
||||||
impl PyProjectToml {
|
impl PyProjectToml {
|
||||||
pub(crate) fn name(&self) -> &PackageName {
|
pub(crate) fn name(&self) -> &PackageName {
|
||||||
&self.project.name.normalized
|
&self.project.name
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn version(&self) -> &Version {
|
pub(crate) fn version(&self) -> &Version {
|
||||||
&self.project.version
|
&self.project.version
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn parse(path: &Path) -> Result<Self, Error> {
|
pub(crate) fn parse(contents: &str) -> Result<Self, Error> {
|
||||||
let contents = fs_err::read_to_string(path)?;
|
Ok(toml::from_str(contents)?)
|
||||||
let pyproject_toml =
|
|
||||||
toml::from_str(&contents).map_err(|err| Error::Toml(path.to_path_buf(), err))?;
|
|
||||||
Ok(pyproject_toml)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn readme(&self) -> Option<&Readme> {
|
pub(crate) fn readme(&self) -> Option<&Readme> {
|
||||||
|
|
@ -196,9 +161,83 @@ impl PyProjectToml {
|
||||||
self.tool.as_ref()?.uv.as_ref()?.build_backend.as_ref()
|
self.tool.as_ref()?.uv.as_ref()?.build_backend.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See [`BuildSystem::check_build_system`].
|
/// Returns user-facing warnings if the `[build-system]` table looks suspicious.
|
||||||
|
///
|
||||||
|
/// Example of a valid table:
|
||||||
|
///
|
||||||
|
/// ```toml
|
||||||
|
/// [build-system]
|
||||||
|
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||||
|
/// build-backend = "uv_build"
|
||||||
|
/// ```
|
||||||
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
||||||
self.build_system.check_build_system(uv_version)
|
let mut warnings = Vec::new();
|
||||||
|
if self.build_system.build_backend.as_deref() != Some("uv_build") {
|
||||||
|
warnings.push(format!(
|
||||||
|
r#"The value for `build_system.build-backend` should be `"uv_build"`, not `"{}"`"#,
|
||||||
|
self.build_system.build_backend.clone().unwrap_or_default()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let uv_version =
|
||||||
|
Version::from_str(uv_version).expect("uv's own version is not PEP 440 compliant");
|
||||||
|
let next_minor = uv_version.release().get(1).copied().unwrap_or_default() + 1;
|
||||||
|
let next_breaking = Version::new([0, next_minor]);
|
||||||
|
|
||||||
|
let expected = || {
|
||||||
|
format!(
|
||||||
|
"Expected a single uv requirement in `build-system.requires`, found `{}`",
|
||||||
|
toml::to_string(&self.build_system.requires).unwrap_or_default()
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let [uv_requirement] = &self.build_system.requires.as_slice() else {
|
||||||
|
warnings.push(expected());
|
||||||
|
return warnings;
|
||||||
|
};
|
||||||
|
if uv_requirement.name.as_str() != "uv-build" {
|
||||||
|
warnings.push(expected());
|
||||||
|
return warnings;
|
||||||
|
}
|
||||||
|
let bounded = match &uv_requirement.version_or_url {
|
||||||
|
None => false,
|
||||||
|
Some(VersionOrUrl::Url(_)) => {
|
||||||
|
// We can't validate the url
|
||||||
|
true
|
||||||
|
}
|
||||||
|
Some(VersionOrUrl::VersionSpecifier(specifier)) => {
|
||||||
|
// We don't check how wide the range is (that's up to the user), we just
|
||||||
|
// check that the current version is compliant, to avoid accidentally using a
|
||||||
|
// too new or too old uv, and we check that an upper bound exists. The latter
|
||||||
|
// is very important to allow making breaking changes in uv without breaking
|
||||||
|
// the existing immutable source distributions on pypi.
|
||||||
|
if !specifier.contains(&uv_version) {
|
||||||
|
// This is allowed to happen when testing prereleases, but we should still warn.
|
||||||
|
warnings.push(format!(
|
||||||
|
r#"`build_system.requires = ["{uv_requirement}"]` does not contain the
|
||||||
|
current uv version {uv_version}"#,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Ranges::from(specifier.clone())
|
||||||
|
.bounding_range()
|
||||||
|
.map(|bounding_range| bounding_range.1 != Bound::Unbounded)
|
||||||
|
.unwrap_or(false)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if !bounded {
|
||||||
|
warnings.push(format!(
|
||||||
|
"`build_system.requires = [\"{}\"]` is missing an \
|
||||||
|
upper bound on the `uv_build` version such as `<{next_breaking}`. \
|
||||||
|
Without bounding the `uv_build` version, the source distribution will break \
|
||||||
|
when a future, breaking version of `uv_build` is released.",
|
||||||
|
// Use an underscore consistently, to avoid confusing users between a package name with dash and a
|
||||||
|
// module name with underscore
|
||||||
|
uv_requirement.verbatim()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Validate and convert a `pyproject.toml` to core metadata.
|
/// Validate and convert a `pyproject.toml` to core metadata.
|
||||||
|
|
@ -346,7 +385,97 @@ impl PyProjectToml {
|
||||||
"2.3"
|
"2.3"
|
||||||
};
|
};
|
||||||
|
|
||||||
let (license, license_expression, license_files) = self.license_metadata(root)?;
|
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
||||||
|
let (license, license_expression, license_files) =
|
||||||
|
if let Some(license_globs) = &self.project.license_files {
|
||||||
|
let license_expression = match &self.project.license {
|
||||||
|
None => None,
|
||||||
|
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
||||||
|
Some(License::Text { .. } | License::File { .. }) => {
|
||||||
|
return Err(ValidationError::MixedLicenseGenerations.into());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut license_files = Vec::new();
|
||||||
|
let mut license_globs_parsed = Vec::new();
|
||||||
|
for license_glob in license_globs {
|
||||||
|
let pep639_glob =
|
||||||
|
PortableGlobParser::Pep639
|
||||||
|
.parse(license_glob)
|
||||||
|
.map_err(|err| Error::PortableGlob {
|
||||||
|
field: license_glob.to_string(),
|
||||||
|
source: err,
|
||||||
|
})?;
|
||||||
|
license_globs_parsed.push(pep639_glob);
|
||||||
|
}
|
||||||
|
let license_globs =
|
||||||
|
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
||||||
|
Error::GlobSetTooLarge {
|
||||||
|
field: "tool.uv.build-backend.source-include".to_string(),
|
||||||
|
source: err,
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
for entry in WalkDir::new(root)
|
||||||
|
.sort_by_file_name()
|
||||||
|
.into_iter()
|
||||||
|
.filter_entry(|entry| {
|
||||||
|
license_globs.match_directory(
|
||||||
|
entry
|
||||||
|
.path()
|
||||||
|
.strip_prefix(root)
|
||||||
|
.expect("walkdir starts with root"),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
{
|
||||||
|
let entry = entry.map_err(|err| Error::WalkDir {
|
||||||
|
root: root.to_path_buf(),
|
||||||
|
err,
|
||||||
|
})?;
|
||||||
|
let relative = entry
|
||||||
|
.path()
|
||||||
|
.strip_prefix(root)
|
||||||
|
.expect("walkdir starts with root");
|
||||||
|
if !license_globs.match_path(relative) {
|
||||||
|
trace!("Not a license files match: `{}`", relative.user_display());
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if !entry.file_type().is_file() {
|
||||||
|
trace!(
|
||||||
|
"Not a file in license files match: `{}`",
|
||||||
|
relative.user_display()
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
debug!("License files match: `{}`", relative.user_display());
|
||||||
|
license_files.push(relative.portable_display().to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// The glob order may be unstable
|
||||||
|
license_files.sort();
|
||||||
|
|
||||||
|
(None, license_expression, license_files)
|
||||||
|
} else {
|
||||||
|
match &self.project.license {
|
||||||
|
None => (None, None, Vec::new()),
|
||||||
|
Some(License::Spdx(license_expression)) => {
|
||||||
|
(None, Some(license_expression.clone()), Vec::new())
|
||||||
|
}
|
||||||
|
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
||||||
|
Some(License::File { file }) => {
|
||||||
|
let text = fs_err::read_to_string(root.join(file))?;
|
||||||
|
(Some(text), None, Vec::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check that the license expression is a valid SPDX identifier.
|
||||||
|
if let Some(license_expression) = &license_expression {
|
||||||
|
if let Err(err) = spdx::Expression::parse(license_expression) {
|
||||||
|
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(konsti): https://peps.python.org/pep-0753/#label-normalization (Draft)
|
// TODO(konsti): https://peps.python.org/pep-0753/#label-normalization (Draft)
|
||||||
let project_urls = self
|
let project_urls = self
|
||||||
|
|
@ -391,7 +520,7 @@ impl PyProjectToml {
|
||||||
|
|
||||||
Ok(Metadata23 {
|
Ok(Metadata23 {
|
||||||
metadata_version: metadata_version.to_string(),
|
metadata_version: metadata_version.to_string(),
|
||||||
name: self.project.name.given.clone(),
|
name: self.project.name.to_string(),
|
||||||
version: self.project.version.to_string(),
|
version: self.project.version.to_string(),
|
||||||
// Not supported.
|
// Not supported.
|
||||||
platforms: vec![],
|
platforms: vec![],
|
||||||
|
|
@ -416,7 +545,7 @@ impl PyProjectToml {
|
||||||
license_files,
|
license_files,
|
||||||
classifiers: self.project.classifiers.clone().unwrap_or_default(),
|
classifiers: self.project.classifiers.clone().unwrap_or_default(),
|
||||||
requires_dist: requires_dist.iter().map(ToString::to_string).collect(),
|
requires_dist: requires_dist.iter().map(ToString::to_string).collect(),
|
||||||
provides_extra: extras.iter().map(ToString::to_string).collect(),
|
provides_extras: extras.iter().map(ToString::to_string).collect(),
|
||||||
// Not commonly set.
|
// Not commonly set.
|
||||||
provides_dist: vec![],
|
provides_dist: vec![],
|
||||||
// Not supported.
|
// Not supported.
|
||||||
|
|
@ -433,156 +562,6 @@ impl PyProjectToml {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse and validate the old (PEP 621) and new (PEP 639) license files.
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
fn license_metadata(
|
|
||||||
&self,
|
|
||||||
root: &Path,
|
|
||||||
) -> Result<(Option<String>, Option<String>, Vec<String>), Error> {
|
|
||||||
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
|
||||||
let (license, license_expression, license_files) = if let Some(license_globs) =
|
|
||||||
&self.project.license_files
|
|
||||||
{
|
|
||||||
let license_expression = match &self.project.license {
|
|
||||||
None => None,
|
|
||||||
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
|
||||||
Some(License::Text { .. } | License::File { .. }) => {
|
|
||||||
return Err(ValidationError::MixedLicenseGenerations.into());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut license_files = Vec::new();
|
|
||||||
let mut license_globs_parsed = Vec::with_capacity(license_globs.len());
|
|
||||||
let mut license_glob_matchers = Vec::with_capacity(license_globs.len());
|
|
||||||
|
|
||||||
for license_glob in license_globs {
|
|
||||||
let pep639_glob =
|
|
||||||
PortableGlobParser::Pep639
|
|
||||||
.parse(license_glob)
|
|
||||||
.map_err(|err| Error::PortableGlob {
|
|
||||||
field: license_glob.to_owned(),
|
|
||||||
source: err,
|
|
||||||
})?;
|
|
||||||
license_glob_matchers.push(pep639_glob.compile_matcher());
|
|
||||||
license_globs_parsed.push(pep639_glob);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track whether each user-specified glob matched so we can flag the unmatched ones.
|
|
||||||
let mut license_globs_matched = vec![false; license_globs_parsed.len()];
|
|
||||||
|
|
||||||
let license_globs =
|
|
||||||
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
|
||||||
Error::GlobSetTooLarge {
|
|
||||||
field: "project.license-files".to_string(),
|
|
||||||
source: err,
|
|
||||||
}
|
|
||||||
})?;
|
|
||||||
|
|
||||||
for entry in WalkDir::new(root)
|
|
||||||
.sort_by_file_name()
|
|
||||||
.into_iter()
|
|
||||||
.filter_entry(|entry| {
|
|
||||||
license_globs.match_directory(
|
|
||||||
entry
|
|
||||||
.path()
|
|
||||||
.strip_prefix(root)
|
|
||||||
.expect("walkdir starts with root"),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
{
|
|
||||||
let entry = entry.map_err(|err| Error::WalkDir {
|
|
||||||
root: root.to_path_buf(),
|
|
||||||
err,
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let relative = entry
|
|
||||||
.path()
|
|
||||||
.strip_prefix(root)
|
|
||||||
.expect("walkdir starts with root");
|
|
||||||
|
|
||||||
if !license_globs.match_path(relative) {
|
|
||||||
trace!("Not a license files match: {}", relative.user_display());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let file_type = entry.file_type();
|
|
||||||
|
|
||||||
if !(file_type.is_file() || file_type.is_symlink()) {
|
|
||||||
trace!(
|
|
||||||
"Not a file or symlink in license files match: {}",
|
|
||||||
relative.user_display()
|
|
||||||
);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
error_on_venv(entry.file_name(), entry.path())?;
|
|
||||||
|
|
||||||
debug!("License files match: {}", relative.user_display());
|
|
||||||
|
|
||||||
for (matched, matcher) in license_globs_matched
|
|
||||||
.iter_mut()
|
|
||||||
.zip(license_glob_matchers.iter())
|
|
||||||
{
|
|
||||||
if *matched {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if matcher.is_match(relative) {
|
|
||||||
*matched = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
license_files.push(relative.portable_display().to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some((pattern, _)) = license_globs_parsed
|
|
||||||
.into_iter()
|
|
||||||
.zip(license_globs_matched)
|
|
||||||
.find(|(_, matched)| !matched)
|
|
||||||
{
|
|
||||||
return Err(ValidationError::LicenseGlobNoMatches {
|
|
||||||
field: "project.license-files".to_string(),
|
|
||||||
glob: pattern.to_string(),
|
|
||||||
}
|
|
||||||
.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
for license_file in &license_files {
|
|
||||||
let file_path = root.join(license_file);
|
|
||||||
let bytes = fs_err::read(&file_path)?;
|
|
||||||
if str::from_utf8(&bytes).is_err() {
|
|
||||||
return Err(ValidationError::LicenseFileNotUtf8(license_file.clone()).into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The glob order may be unstable
|
|
||||||
license_files.sort();
|
|
||||||
|
|
||||||
(None, license_expression, license_files)
|
|
||||||
} else {
|
|
||||||
match &self.project.license {
|
|
||||||
None => (None, None, Vec::new()),
|
|
||||||
Some(License::Spdx(license_expression)) => {
|
|
||||||
(None, Some(license_expression.clone()), Vec::new())
|
|
||||||
}
|
|
||||||
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
|
||||||
Some(License::File { file }) => {
|
|
||||||
let text = fs_err::read_to_string(root.join(file))?;
|
|
||||||
(Some(text), None, Vec::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check that the license expression is a valid SPDX identifier.
|
|
||||||
if let Some(license_expression) = &license_expression {
|
|
||||||
if let Err(err) = spdx::Expression::parse(license_expression) {
|
|
||||||
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((license, license_expression, license_files))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Validate and convert the entrypoints in `pyproject.toml`, including console and GUI scripts,
|
/// Validate and convert the entrypoints in `pyproject.toml`, including console and GUI scripts,
|
||||||
/// to an `entry_points.txt`.
|
/// to an `entry_points.txt`.
|
||||||
///
|
///
|
||||||
|
|
@ -643,7 +622,7 @@ impl PyProjectToml {
|
||||||
{
|
{
|
||||||
warn!(
|
warn!(
|
||||||
"Entrypoint names should consist of letters, numbers, dots, underscores and \
|
"Entrypoint names should consist of letters, numbers, dots, underscores and \
|
||||||
dashes; non-compliant name: {name}"
|
dashes; non-compliant name: `{name}`"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -664,7 +643,7 @@ impl PyProjectToml {
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
struct Project {
|
struct Project {
|
||||||
/// The name of the project.
|
/// The name of the project.
|
||||||
name: VerbatimPackageName,
|
name: PackageName,
|
||||||
/// The version of the project.
|
/// The version of the project.
|
||||||
version: Version,
|
version: Version,
|
||||||
/// The summary description of the project in one line.
|
/// The summary description of the project in one line.
|
||||||
|
|
@ -801,6 +780,18 @@ pub(crate) enum Contact {
|
||||||
Email { email: String },
|
Email { email: String },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The `[build-system]` section of a pyproject.toml as specified in PEP 517.
|
||||||
|
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
struct BuildSystem {
|
||||||
|
/// PEP 508 dependencies required to execute the build system.
|
||||||
|
requires: Vec<SerdeVerbatim<Requirement<VerbatimParsedUrl>>>,
|
||||||
|
/// A string naming a Python object that will be used to perform the build.
|
||||||
|
build_backend: Option<String>,
|
||||||
|
/// <https://peps.python.org/pep-0517/#in-tree-build-backends>
|
||||||
|
backend_path: Option<Vec<String>>,
|
||||||
|
}
|
||||||
|
|
||||||
/// The `tool` section as specified in PEP 517.
|
/// The `tool` section as specified in PEP 517.
|
||||||
#[derive(Deserialize, Debug, Clone)]
|
#[derive(Deserialize, Debug, Clone)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
|
@ -817,100 +808,6 @@ pub(crate) struct ToolUv {
|
||||||
build_backend: Option<BuildBackendSettings>,
|
build_backend: Option<BuildBackendSettings>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The `[build-system]` section of a pyproject.toml as specified in PEP 517.
|
|
||||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
struct BuildSystem {
|
|
||||||
/// PEP 508 dependencies required to execute the build system.
|
|
||||||
requires: Vec<SerdeVerbatim<Requirement<VerbatimParsedUrl>>>,
|
|
||||||
/// A string naming a Python object that will be used to perform the build.
|
|
||||||
build_backend: Option<String>,
|
|
||||||
/// <https://peps.python.org/pep-0517/#in-tree-build-backends>
|
|
||||||
backend_path: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildSystem {
|
|
||||||
/// Check if the `[build-system]` table matches the uv build backend expectations and return
|
|
||||||
/// a list of warnings if it looks suspicious.
|
|
||||||
///
|
|
||||||
/// Example of a valid table:
|
|
||||||
///
|
|
||||||
/// ```toml
|
|
||||||
/// [build-system]
|
|
||||||
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
|
||||||
/// build-backend = "uv_build"
|
|
||||||
/// ```
|
|
||||||
pub(crate) fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
|
||||||
let mut warnings = Vec::new();
|
|
||||||
if self.build_backend.as_deref() != Some("uv_build") {
|
|
||||||
warnings.push(format!(
|
|
||||||
r#"The value for `build_system.build-backend` should be `"uv_build"`, not `"{}"`"#,
|
|
||||||
self.build_backend.clone().unwrap_or_default()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let uv_version =
|
|
||||||
Version::from_str(uv_version).expect("uv's own version is not PEP 440 compliant");
|
|
||||||
let next_minor = uv_version.release().get(1).copied().unwrap_or_default() + 1;
|
|
||||||
let next_breaking = Version::new([0, next_minor]);
|
|
||||||
|
|
||||||
let expected = || {
|
|
||||||
format!(
|
|
||||||
"Expected a single uv requirement in `build-system.requires`, found `{}`",
|
|
||||||
toml::to_string(&self.requires).unwrap_or_default()
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let [uv_requirement] = &self.requires.as_slice() else {
|
|
||||||
warnings.push(expected());
|
|
||||||
return warnings;
|
|
||||||
};
|
|
||||||
if uv_requirement.name.as_str() != "uv-build" {
|
|
||||||
warnings.push(expected());
|
|
||||||
return warnings;
|
|
||||||
}
|
|
||||||
let bounded = match &uv_requirement.version_or_url {
|
|
||||||
None => false,
|
|
||||||
Some(VersionOrUrl::Url(_)) => {
|
|
||||||
// We can't validate the url
|
|
||||||
true
|
|
||||||
}
|
|
||||||
Some(VersionOrUrl::VersionSpecifier(specifier)) => {
|
|
||||||
// We don't check how wide the range is (that's up to the user), we just
|
|
||||||
// check that the current version is compliant, to avoid accidentally using a
|
|
||||||
// too new or too old uv, and we check that an upper bound exists. The latter
|
|
||||||
// is very important to allow making breaking changes in uv without breaking
|
|
||||||
// the existing immutable source distributions on pypi.
|
|
||||||
if !specifier.contains(&uv_version) {
|
|
||||||
// This is allowed to happen when testing prereleases, but we should still warn.
|
|
||||||
warnings.push(format!(
|
|
||||||
r#"`build_system.requires = ["{uv_requirement}"]` does not contain the
|
|
||||||
current uv version {uv_version}"#,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
Ranges::from(specifier.clone())
|
|
||||||
.bounding_range()
|
|
||||||
.map(|bounding_range| bounding_range.1 != Bound::Unbounded)
|
|
||||||
.unwrap_or(false)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if !bounded {
|
|
||||||
warnings.push(format!(
|
|
||||||
"`build_system.requires = [\"{}\"]` is missing an \
|
|
||||||
upper bound on the `uv_build` version such as `<{next_breaking}`. \
|
|
||||||
Without bounding the `uv_build` version, the source distribution will break \
|
|
||||||
when a future, breaking version of `uv_build` is released.",
|
|
||||||
// Use an underscore consistently, to avoid confusing users between a package name with dash and a
|
|
||||||
// module name with underscore
|
|
||||||
uv_requirement.verbatim()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
warnings
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
@ -941,28 +838,6 @@ mod tests {
|
||||||
formatted
|
formatted
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn uppercase_package_name() {
|
|
||||||
let contents = r#"
|
|
||||||
[project]
|
|
||||||
name = "Hello-World"
|
|
||||||
version = "0.1.0"
|
|
||||||
|
|
||||||
[build-system]
|
|
||||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
|
||||||
build-backend = "uv_build"
|
|
||||||
"#;
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
|
||||||
let temp_dir = TempDir::new().unwrap();
|
|
||||||
|
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
|
||||||
Metadata-Version: 2.3
|
|
||||||
Name: Hello-World
|
|
||||||
Version: 0.1.0
|
|
||||||
");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn valid() {
|
fn valid() {
|
||||||
let temp_dir = TempDir::new().unwrap();
|
let temp_dir = TempDir::new().unwrap();
|
||||||
|
|
@ -1037,7 +912,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
};
|
};
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||||
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||||
|
|
@ -1131,7 +1006,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
};
|
};
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||||
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||||
|
|
@ -1223,7 +1098,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
};
|
};
|
||||||
|
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||||
|
|
||||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||||
|
|
@ -1284,7 +1159,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn build_system_valid() {
|
fn build_system_valid() {
|
||||||
let contents = extend_project("");
|
let contents = extend_project("");
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(&contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@""
|
@""
|
||||||
|
|
@ -1302,7 +1177,7 @@ mod tests {
|
||||||
requires = ["uv_build"]
|
requires = ["uv_build"]
|
||||||
build-backend = "uv_build"
|
build-backend = "uv_build"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@r###"`build_system.requires = ["uv_build"]` is missing an upper bound on the `uv_build` version such as `<0.5`. Without bounding the `uv_build` version, the source distribution will break when a future, breaking version of `uv_build` is released."###
|
@r###"`build_system.requires = ["uv_build"]` is missing an upper bound on the `uv_build` version such as `<0.5`. Without bounding the `uv_build` version, the source distribution will break when a future, breaking version of `uv_build` is released."###
|
||||||
|
|
@ -1320,7 +1195,7 @@ mod tests {
|
||||||
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
||||||
build-backend = "uv_build"
|
build-backend = "uv_build"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||||
|
|
@ -1338,7 +1213,7 @@ mod tests {
|
||||||
requires = ["setuptools"]
|
requires = ["setuptools"]
|
||||||
build-backend = "uv_build"
|
build-backend = "uv_build"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||||
|
|
@ -1356,7 +1231,7 @@ mod tests {
|
||||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||||
build-backend = "setuptools"
|
build-backend = "setuptools"
|
||||||
"#};
|
"#};
|
||||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||||
@r###"The value for `build_system.build-backend` should be `"uv_build"`, not `"setuptools"`"###
|
@r###"The value for `build_system.build-backend` should be `"uv_build"`, not `"setuptools"`"###
|
||||||
|
|
@ -1367,7 +1242,7 @@ mod tests {
|
||||||
fn minimal() {
|
fn minimal() {
|
||||||
let contents = extend_project("");
|
let contents = extend_project("");
|
||||||
|
|
||||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
let metadata = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -1386,14 +1261,15 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents).unwrap_err();
|
let err = PyProjectToml::parse(&contents).unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r#"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
TOML parse error at line 4, column 10
|
Invalid pyproject.toml
|
||||||
|
Caused by: TOML parse error at line 4, column 10
|
||||||
|
|
|
|
||||||
4 | readme = { path = "Readme.md" }
|
4 | readme = { path = "Readme.md" }
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||||
data did not match any variant of untagged enum Readme
|
data did not match any variant of untagged enum Readme
|
||||||
"#);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1403,7 +1279,7 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
|
|
@ -1425,14 +1301,14 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: `project.description` must be a single line
|
Caused by: `project.description` must be a single line
|
||||||
");
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1443,14 +1319,14 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
||||||
");
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1459,7 +1335,7 @@ mod tests {
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
let metadata = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -1477,17 +1353,17 @@ mod tests {
|
||||||
license = "MIT XOR Apache-2"
|
license = "MIT XOR Apache-2"
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
// TODO(konsti): We mess up the indentation in the error.
|
// TODO(konsti): We mess up the indentation in the error.
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: `project.license` is not a valid SPDX expression: MIT XOR Apache-2
|
Caused by: `project.license` is not a valid SPDX expression: `MIT XOR Apache-2`
|
||||||
Caused by: MIT XOR Apache-2
|
Caused by: MIT XOR Apache-2
|
||||||
^^^ unknown term
|
^^^ unknown term
|
||||||
");
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1497,18 +1373,18 @@ mod tests {
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
|
|
||||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
let err = PyProjectToml::parse(&contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_metadata(Path::new("/do/not/read"))
|
.to_metadata(Path::new("/do/not/read"))
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert_snapshot!(format_err(err), @r"
|
assert_snapshot!(format_err(err), @r###"
|
||||||
Invalid project metadata
|
Invalid pyproject.toml
|
||||||
Caused by: Dynamic metadata is not supported
|
Caused by: Dynamic metadata is not supported
|
||||||
");
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn script_error(contents: &str) -> String {
|
fn script_error(contents: &str) -> String {
|
||||||
let err = toml::from_str::<PyProjectToml>(contents)
|
let err = PyProjectToml::parse(contents)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_entry_points()
|
.to_entry_points()
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
|
|
@ -1522,7 +1398,7 @@ mod tests {
|
||||||
foo = "bar"
|
foo = "bar"
|
||||||
"#
|
"#
|
||||||
});
|
});
|
||||||
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: a@b");
|
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `a@b`");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::PathBuf;
|
||||||
use uv_macros::OptionsMetadata;
|
use uv_macros::OptionsMetadata;
|
||||||
|
|
||||||
/// Settings for the uv build backend (`uv_build`).
|
/// Settings for the uv build backend (`uv_build`).
|
||||||
|
|
@ -70,9 +70,6 @@ pub struct BuildBackendSettings {
|
||||||
pub default_excludes: bool,
|
pub default_excludes: bool,
|
||||||
|
|
||||||
/// Glob expressions which files and directories to exclude from the source distribution.
|
/// Glob expressions which files and directories to exclude from the source distribution.
|
||||||
///
|
|
||||||
/// These exclusions are also applied to wheels to ensure that a wheel built from a source tree
|
|
||||||
/// is consistent with a wheel built from a source distribution.
|
|
||||||
#[option(
|
#[option(
|
||||||
default = r#"[]"#,
|
default = r#"[]"#,
|
||||||
value_type = "list[str]",
|
value_type = "list[str]",
|
||||||
|
|
@ -207,16 +204,16 @@ pub enum ModuleName {
|
||||||
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
|
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
|
||||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||||
pub struct WheelDataIncludes {
|
pub struct WheelDataIncludes {
|
||||||
purelib: Option<PathBuf>,
|
purelib: Option<String>,
|
||||||
platlib: Option<PathBuf>,
|
platlib: Option<String>,
|
||||||
headers: Option<PathBuf>,
|
headers: Option<String>,
|
||||||
scripts: Option<PathBuf>,
|
scripts: Option<String>,
|
||||||
data: Option<PathBuf>,
|
data: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WheelDataIncludes {
|
impl WheelDataIncludes {
|
||||||
/// Yield all data directories name and corresponding paths.
|
/// Yield all data directories name and corresponding paths.
|
||||||
pub fn iter(&self) -> impl Iterator<Item = (&'static str, &Path)> {
|
pub fn iter(&self) -> impl Iterator<Item = (&'static str, &str)> {
|
||||||
[
|
[
|
||||||
("purelib", self.purelib.as_deref()),
|
("purelib", self.purelib.as_deref()),
|
||||||
("platlib", self.platlib.as_deref()),
|
("platlib", self.platlib.as_deref()),
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,7 @@
|
||||||
use crate::metadata::DEFAULT_EXCLUDES;
|
use crate::metadata::DEFAULT_EXCLUDES;
|
||||||
use crate::wheel::build_exclude_matcher;
|
use crate::wheel::build_exclude_matcher;
|
||||||
use crate::{
|
use crate::{
|
||||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
|
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots,
|
||||||
error_on_venv, find_roots,
|
|
||||||
};
|
};
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
|
|
@ -10,7 +9,7 @@ use fs_err::File;
|
||||||
use globset::{Glob, GlobSet};
|
use globset::{Glob, GlobSet};
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::io::{BufReader, Cursor};
|
use std::io::{BufReader, Cursor};
|
||||||
use std::path::{Component, Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use tar::{EntryType, Header};
|
use tar::{EntryType, Header};
|
||||||
use tracing::{debug, trace};
|
use tracing::{debug, trace};
|
||||||
use uv_distribution_filename::{SourceDistExtension, SourceDistFilename};
|
use uv_distribution_filename::{SourceDistExtension, SourceDistFilename};
|
||||||
|
|
@ -24,9 +23,9 @@ pub fn build_source_dist(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
source_dist_directory: &Path,
|
source_dist_directory: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<SourceDistFilename, Error> {
|
) -> Result<SourceDistFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
let filename = SourceDistFilename {
|
let filename = SourceDistFilename {
|
||||||
name: pyproject_toml.name().clone(),
|
name: pyproject_toml.name().clone(),
|
||||||
version: pyproject_toml.version().clone(),
|
version: pyproject_toml.version().clone(),
|
||||||
|
|
@ -34,7 +33,7 @@ pub fn build_source_dist(
|
||||||
};
|
};
|
||||||
let source_dist_path = source_dist_directory.join(filename.to_string());
|
let source_dist_path = source_dist_directory.join(filename.to_string());
|
||||||
let writer = TarGzWriter::new(&source_dist_path)?;
|
let writer = TarGzWriter::new(&source_dist_path)?;
|
||||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
write_source_dist(source_tree, writer, uv_version)?;
|
||||||
Ok(filename)
|
Ok(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -42,9 +41,9 @@ pub fn build_source_dist(
|
||||||
pub fn list_source_dist(
|
pub fn list_source_dist(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(SourceDistFilename, FileList), Error> {
|
) -> Result<(SourceDistFilename, FileList), Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
let filename = SourceDistFilename {
|
let filename = SourceDistFilename {
|
||||||
name: pyproject_toml.name().clone(),
|
name: pyproject_toml.name().clone(),
|
||||||
version: pyproject_toml.version().clone(),
|
version: pyproject_toml.version().clone(),
|
||||||
|
|
@ -52,7 +51,7 @@ pub fn list_source_dist(
|
||||||
};
|
};
|
||||||
let mut files = FileList::new();
|
let mut files = FileList::new();
|
||||||
let writer = ListWriter::new(&mut files);
|
let writer = ListWriter::new(&mut files);
|
||||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
write_source_dist(source_tree, writer, uv_version)?;
|
||||||
Ok((filename, files))
|
Ok((filename, files))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -61,7 +60,6 @@ fn source_dist_matcher(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
pyproject_toml: &PyProjectToml,
|
pyproject_toml: &PyProjectToml,
|
||||||
settings: BuildBackendSettings,
|
settings: BuildBackendSettings,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(GlobDirFilter, GlobSet), Error> {
|
) -> Result<(GlobDirFilter, GlobSet), Error> {
|
||||||
// File and directories to include in the source directory
|
// File and directories to include in the source directory
|
||||||
let mut include_globs = Vec::new();
|
let mut include_globs = Vec::new();
|
||||||
|
|
@ -76,7 +74,6 @@ fn source_dist_matcher(
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_ref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
for module_relative in modules_relative {
|
for module_relative in modules_relative {
|
||||||
// The wheel must not include any files included by the source distribution (at least until we
|
// The wheel must not include any files included by the source distribution (at least until we
|
||||||
|
|
@ -106,7 +103,7 @@ fn source_dist_matcher(
|
||||||
.and_then(|readme| readme.path())
|
.and_then(|readme| readme.path())
|
||||||
{
|
{
|
||||||
let readme = uv_fs::normalize_path(readme);
|
let readme = uv_fs::normalize_path(readme);
|
||||||
trace!("Including readme at: {}", readme.user_display());
|
trace!("Including readme at: `{}`", readme.user_display());
|
||||||
let readme = readme.portable_display().to_string();
|
let readme = readme.portable_display().to_string();
|
||||||
let glob = Glob::new(&globset::escape(&readme)).expect("escaped globset is parseable");
|
let glob = Glob::new(&globset::escape(&readme)).expect("escaped globset is parseable");
|
||||||
include_globs.push(glob);
|
include_globs.push(glob);
|
||||||
|
|
@ -114,7 +111,7 @@ fn source_dist_matcher(
|
||||||
|
|
||||||
// Include the license files
|
// Include the license files
|
||||||
for license_files in pyproject_toml.license_files_source_dist() {
|
for license_files in pyproject_toml.license_files_source_dist() {
|
||||||
trace!("Including license files at: {license_files}`");
|
trace!("Including license files at: `{license_files}`");
|
||||||
let glob = PortableGlobParser::Pep639
|
let glob = PortableGlobParser::Pep639
|
||||||
.parse(license_files)
|
.parse(license_files)
|
||||||
.map_err(|err| Error::PortableGlob {
|
.map_err(|err| Error::PortableGlob {
|
||||||
|
|
@ -126,18 +123,12 @@ fn source_dist_matcher(
|
||||||
|
|
||||||
// Include the data files
|
// Include the data files
|
||||||
for (name, directory) in settings.data.iter() {
|
for (name, directory) in settings.data.iter() {
|
||||||
let directory = uv_fs::normalize_path(directory);
|
let directory = uv_fs::normalize_path(Path::new(directory));
|
||||||
trace!("Including data ({}) at: {}", name, directory.user_display());
|
trace!(
|
||||||
if directory
|
"Including data ({}) at: `{}`",
|
||||||
.components()
|
name,
|
||||||
.next()
|
directory.user_display()
|
||||||
.is_some_and(|component| !matches!(component, Component::CurDir | Component::Normal(_)))
|
);
|
||||||
{
|
|
||||||
return Err(Error::InvalidDataRoot {
|
|
||||||
name: name.to_string(),
|
|
||||||
path: directory.to_path_buf(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let directory = directory.portable_display().to_string();
|
let directory = directory.portable_display().to_string();
|
||||||
let glob = PortableGlobParser::Uv
|
let glob = PortableGlobParser::Uv
|
||||||
.parse(&format!("{}/**", globset::escape(&directory)))
|
.parse(&format!("{}/**", globset::escape(&directory)))
|
||||||
|
|
@ -149,7 +140,7 @@ fn source_dist_matcher(
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(
|
debug!(
|
||||||
"Source distribution includes: {:?}",
|
"Source distribution includes: `{:?}`",
|
||||||
include_globs
|
include_globs
|
||||||
.iter()
|
.iter()
|
||||||
.map(ToString::to_string)
|
.map(ToString::to_string)
|
||||||
|
|
@ -184,9 +175,9 @@ fn write_source_dist(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
mut writer: impl DirectoryWriter,
|
mut writer: impl DirectoryWriter,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<SourceDistFilename, Error> {
|
) -> Result<SourceDistFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -220,7 +211,7 @@ fn write_source_dist(
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let (include_matcher, exclude_matcher) =
|
let (include_matcher, exclude_matcher) =
|
||||||
source_dist_matcher(source_tree, &pyproject_toml, settings, show_warnings)?;
|
source_dist_matcher(source_tree, &pyproject_toml, settings)?;
|
||||||
|
|
||||||
let mut files_visited = 0;
|
let mut files_visited = 0;
|
||||||
for entry in WalkDir::new(source_tree)
|
for entry in WalkDir::new(source_tree)
|
||||||
|
|
@ -261,12 +252,10 @@ fn write_source_dist(
|
||||||
.expect("walkdir starts with root");
|
.expect("walkdir starts with root");
|
||||||
|
|
||||||
if !include_matcher.match_path(relative) || exclude_matcher.is_match(relative) {
|
if !include_matcher.match_path(relative) || exclude_matcher.is_match(relative) {
|
||||||
trace!("Excluding from sdist: {}", relative.user_display());
|
trace!("Excluding from sdist: `{}`", relative.user_display());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
error_on_venv(entry.file_name(), entry.path())?;
|
|
||||||
|
|
||||||
let entry_path = Path::new(&top_level)
|
let entry_path = Path::new(&top_level)
|
||||||
.join(relative)
|
.join(relative)
|
||||||
.portable_display()
|
.portable_display()
|
||||||
|
|
@ -299,10 +288,6 @@ impl TarGzWriter {
|
||||||
impl DirectoryWriter for TarGzWriter {
|
impl DirectoryWriter for TarGzWriter {
|
||||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||||
let mut header = Header::new_gnu();
|
let mut header = Header::new_gnu();
|
||||||
// Work around bug in Python's std tar module
|
|
||||||
// https://github.com/python/cpython/issues/141707
|
|
||||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
|
||||||
header.set_entry_type(EntryType::Regular);
|
|
||||||
header.set_size(bytes.len() as u64);
|
header.set_size(bytes.len() as u64);
|
||||||
// Reasonable default to avoid 0o000 permissions, the user's umask will be applied on
|
// Reasonable default to avoid 0o000 permissions, the user's umask will be applied on
|
||||||
// unpacking.
|
// unpacking.
|
||||||
|
|
@ -316,10 +301,6 @@ impl DirectoryWriter for TarGzWriter {
|
||||||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
||||||
let metadata = fs_err::metadata(file)?;
|
let metadata = fs_err::metadata(file)?;
|
||||||
let mut header = Header::new_gnu();
|
let mut header = Header::new_gnu();
|
||||||
// Work around bug in Python's std tar module
|
|
||||||
// https://github.com/python/cpython/issues/141707
|
|
||||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
|
||||||
header.set_entry_type(EntryType::Regular);
|
|
||||||
// Preserve the executable bit, especially for scripts
|
// Preserve the executable bit, especially for scripts
|
||||||
#[cfg(unix)]
|
#[cfg(unix)]
|
||||||
let executable_bit = {
|
let executable_bit = {
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD as base64};
|
|
||||||
use fs_err::File;
|
use fs_err::File;
|
||||||
use globset::{GlobSet, GlobSetBuilder};
|
use globset::{GlobSet, GlobSetBuilder};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::io::{BufReader, Read, Write};
|
use std::io::{BufReader, Read, Write};
|
||||||
use std::path::{Component, Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::{io, mem};
|
use std::{io, mem};
|
||||||
use tracing::{debug, trace};
|
use tracing::{debug, trace};
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
@ -19,8 +18,7 @@ use uv_warnings::warn_user_once;
|
||||||
|
|
||||||
use crate::metadata::DEFAULT_EXCLUDES;
|
use crate::metadata::DEFAULT_EXCLUDES;
|
||||||
use crate::{
|
use crate::{
|
||||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
|
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots,
|
||||||
error_on_venv, find_roots,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Build a wheel from the source tree and place it in the output directory.
|
/// Build a wheel from the source tree and place it in the output directory.
|
||||||
|
|
@ -29,9 +27,9 @@ pub fn build_wheel(
|
||||||
wheel_dir: &Path,
|
wheel_dir: &Path,
|
||||||
metadata_directory: Option<&Path>,
|
metadata_directory: Option<&Path>,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<WheelFilename, Error> {
|
) -> Result<WheelFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -58,7 +56,6 @@ pub fn build_wheel(
|
||||||
&filename,
|
&filename,
|
||||||
uv_version,
|
uv_version,
|
||||||
wheel_writer,
|
wheel_writer,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(filename)
|
Ok(filename)
|
||||||
|
|
@ -68,9 +65,9 @@ pub fn build_wheel(
|
||||||
pub fn list_wheel(
|
pub fn list_wheel(
|
||||||
source_tree: &Path,
|
source_tree: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(WheelFilename, FileList), Error> {
|
) -> Result<(WheelFilename, FileList), Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -88,14 +85,7 @@ pub fn list_wheel(
|
||||||
|
|
||||||
let mut files = FileList::new();
|
let mut files = FileList::new();
|
||||||
let writer = ListWriter::new(&mut files);
|
let writer = ListWriter::new(&mut files);
|
||||||
write_wheel(
|
write_wheel(source_tree, &pyproject_toml, &filename, uv_version, writer)?;
|
||||||
source_tree,
|
|
||||||
&pyproject_toml,
|
|
||||||
&filename,
|
|
||||||
uv_version,
|
|
||||||
writer,
|
|
||||||
show_warnings,
|
|
||||||
)?;
|
|
||||||
Ok((filename, files))
|
Ok((filename, files))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -105,7 +95,6 @@ fn write_wheel(
|
||||||
filename: &WheelFilename,
|
filename: &WheelFilename,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
mut wheel_writer: impl DirectoryWriter,
|
mut wheel_writer: impl DirectoryWriter,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let settings = pyproject_toml
|
let settings = pyproject_toml
|
||||||
.settings()
|
.settings()
|
||||||
|
|
@ -141,7 +130,6 @@ fn write_wheel(
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_ref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut files_visited = 0;
|
let mut files_visited = 0;
|
||||||
|
|
@ -187,12 +175,10 @@ fn write_wheel(
|
||||||
.strip_prefix(&src_root)
|
.strip_prefix(&src_root)
|
||||||
.expect("walkdir starts with root");
|
.expect("walkdir starts with root");
|
||||||
if exclude_matcher.is_match(match_path) {
|
if exclude_matcher.is_match(match_path) {
|
||||||
trace!("Excluding from module: {}", match_path.user_display());
|
trace!("Excluding from module: `{}`", match_path.user_display());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
error_on_venv(entry.file_name(), entry.path())?;
|
|
||||||
|
|
||||||
let entry_path = entry_path.portable_display().to_string();
|
let entry_path = entry_path.portable_display().to_string();
|
||||||
debug!("Adding to wheel: {entry_path}");
|
debug!("Adding to wheel: {entry_path}");
|
||||||
wheel_writer.write_dir_entry(&entry, &entry_path)?;
|
wheel_writer.write_dir_entry(&entry, &entry_path)?;
|
||||||
|
|
@ -220,20 +206,7 @@ fn write_wheel(
|
||||||
|
|
||||||
// Add the data files
|
// Add the data files
|
||||||
for (name, directory) in settings.data.iter() {
|
for (name, directory) in settings.data.iter() {
|
||||||
debug!(
|
debug!("Adding {name} data files from: `{directory}`");
|
||||||
"Adding {name} data files from: {}",
|
|
||||||
directory.user_display()
|
|
||||||
);
|
|
||||||
if directory
|
|
||||||
.components()
|
|
||||||
.next()
|
|
||||||
.is_some_and(|component| !matches!(component, Component::CurDir | Component::Normal(_)))
|
|
||||||
{
|
|
||||||
return Err(Error::InvalidDataRoot {
|
|
||||||
name: name.to_string(),
|
|
||||||
path: directory.to_path_buf(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
let data_dir = format!(
|
let data_dir = format!(
|
||||||
"{}-{}.data/{}/",
|
"{}-{}.data/{}/",
|
||||||
pyproject_toml.name().as_dist_info_name(),
|
pyproject_toml.name().as_dist_info_name(),
|
||||||
|
|
@ -269,9 +242,9 @@ pub fn build_editable(
|
||||||
wheel_dir: &Path,
|
wheel_dir: &Path,
|
||||||
metadata_directory: Option<&Path>,
|
metadata_directory: Option<&Path>,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
show_warnings: bool,
|
|
||||||
) -> Result<WheelFilename, Error> {
|
) -> Result<WheelFilename, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -305,7 +278,6 @@ pub fn build_editable(
|
||||||
&settings.module_root,
|
&settings.module_root,
|
||||||
settings.module_name.as_ref(),
|
settings.module_name.as_ref(),
|
||||||
settings.namespace,
|
settings.namespace,
|
||||||
show_warnings,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
wheel_writer.write_bytes(
|
wheel_writer.write_bytes(
|
||||||
|
|
@ -313,7 +285,7 @@ pub fn build_editable(
|
||||||
src_root.as_os_str().as_encoded_bytes(),
|
src_root.as_os_str().as_encoded_bytes(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
debug!("Adding metadata files to: {}", wheel_path.user_display());
|
debug!("Adding metadata files to: `{}`", wheel_path.user_display());
|
||||||
let dist_info_dir = write_dist_info(
|
let dist_info_dir = write_dist_info(
|
||||||
&mut wheel_writer,
|
&mut wheel_writer,
|
||||||
&pyproject_toml,
|
&pyproject_toml,
|
||||||
|
|
@ -332,7 +304,8 @@ pub fn metadata(
|
||||||
metadata_directory: &Path,
|
metadata_directory: &Path,
|
||||||
uv_version: &str,
|
uv_version: &str,
|
||||||
) -> Result<String, Error> {
|
) -> Result<String, Error> {
|
||||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||||
|
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||||
warn_user_once!("{warning}");
|
warn_user_once!("{warning}");
|
||||||
}
|
}
|
||||||
|
|
@ -373,7 +346,7 @@ struct RecordEntry {
|
||||||
///
|
///
|
||||||
/// While the spec would allow backslashes, we always use portable paths with forward slashes.
|
/// While the spec would allow backslashes, we always use portable paths with forward slashes.
|
||||||
path: String,
|
path: String,
|
||||||
/// The urlsafe-base64-nopad encoded SHA256 of the files.
|
/// The SHA256 of the files.
|
||||||
hash: String,
|
hash: String,
|
||||||
/// The size of the file in bytes.
|
/// The size of the file in bytes.
|
||||||
size: usize,
|
size: usize,
|
||||||
|
|
@ -408,7 +381,7 @@ fn write_hashed(
|
||||||
}
|
}
|
||||||
Ok(RecordEntry {
|
Ok(RecordEntry {
|
||||||
path: path.to_string(),
|
path: path.to_string(),
|
||||||
hash: base64.encode(hasher.finalize()),
|
hash: format!("{:x}", hasher.finalize()),
|
||||||
size,
|
size,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
@ -538,17 +511,15 @@ fn wheel_subdir_from_globs(
|
||||||
.expect("walkdir starts with root");
|
.expect("walkdir starts with root");
|
||||||
|
|
||||||
if !matcher.match_path(relative) {
|
if !matcher.match_path(relative) {
|
||||||
trace!("Excluding {}: {}", globs_field, relative.user_display());
|
trace!("Excluding {}: `{}`", globs_field, relative.user_display());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
error_on_venv(entry.file_name(), entry.path())?;
|
|
||||||
|
|
||||||
let license_path = Path::new(target)
|
let license_path = Path::new(target)
|
||||||
.join(relative)
|
.join(relative)
|
||||||
.portable_display()
|
.portable_display()
|
||||||
.to_string();
|
.to_string();
|
||||||
debug!("Adding for {}: {}", globs_field, relative.user_display());
|
debug!("Adding for {}: `{}`", globs_field, relative.user_display());
|
||||||
wheel_writer.write_dir_entry(&entry, &license_path)?;
|
wheel_writer.write_dir_entry(&entry, &license_path)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -670,7 +641,7 @@ impl DirectoryWriter for ZipDirectoryWriter {
|
||||||
self.writer.start_file(path, options)?;
|
self.writer.start_file(path, options)?;
|
||||||
self.writer.write_all(bytes)?;
|
self.writer.write_all(bytes)?;
|
||||||
|
|
||||||
let hash = base64.encode(Sha256::new().chain_update(bytes).finalize());
|
let hash = format!("{:x}", Sha256::new().chain_update(bytes).finalize());
|
||||||
self.record.push(RecordEntry {
|
self.record.push(RecordEntry {
|
||||||
path: path.to_string(),
|
path: path.to_string(),
|
||||||
hash,
|
hash,
|
||||||
|
|
@ -748,7 +719,7 @@ impl FilesystemWriter {
|
||||||
impl DirectoryWriter for FilesystemWriter {
|
impl DirectoryWriter for FilesystemWriter {
|
||||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||||
trace!("Adding {}", path);
|
trace!("Adding {}", path);
|
||||||
let hash = base64.encode(Sha256::new().chain_update(bytes).finalize());
|
let hash = format!("{:x}", Sha256::new().chain_update(bytes).finalize());
|
||||||
self.record.push(RecordEntry {
|
self.record.push(RecordEntry {
|
||||||
path: path.to_string(),
|
path: path.to_string(),
|
||||||
hash,
|
hash,
|
||||||
|
|
@ -824,14 +795,14 @@ mod test {
|
||||||
fn test_record() {
|
fn test_record() {
|
||||||
let record = vec![RecordEntry {
|
let record = vec![RecordEntry {
|
||||||
path: "built_by_uv/__init__.py".to_string(),
|
path: "built_by_uv/__init__.py".to_string(),
|
||||||
hash: "ifhp5To6AGGlLAIz5kQtTXLegKii00BtnqC_05fteGU".to_string(),
|
hash: "89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865".to_string(),
|
||||||
size: 37,
|
size: 37,
|
||||||
}];
|
}];
|
||||||
|
|
||||||
let mut writer = Vec::new();
|
let mut writer = Vec::new();
|
||||||
write_record(&mut writer, "built_by_uv-0.1.0", record).unwrap();
|
write_record(&mut writer, "built_by_uv-0.1.0", record).unwrap();
|
||||||
assert_snapshot!(String::from_utf8(writer).unwrap(), @r"
|
assert_snapshot!(String::from_utf8(writer).unwrap(), @r"
|
||||||
built_by_uv/__init__.py,sha256=ifhp5To6AGGlLAIz5kQtTXLegKii00BtnqC_05fteGU,37
|
built_by_uv/__init__.py,sha256=89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865,37
|
||||||
built_by_uv-0.1.0/RECORD,,
|
built_by_uv-0.1.0/RECORD,,
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
@ -840,7 +811,7 @@ mod test {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_prepare_metadata() {
|
fn test_prepare_metadata() {
|
||||||
let metadata_dir = TempDir::new().unwrap();
|
let metadata_dir = TempDir::new().unwrap();
|
||||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||||
metadata(built_by_uv, metadata_dir.path(), "1.0.0+test").unwrap();
|
metadata(built_by_uv, metadata_dir.path(), "1.0.0+test").unwrap();
|
||||||
|
|
||||||
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
||||||
|
|
@ -890,9 +861,9 @@ mod test {
|
||||||
.path()
|
.path()
|
||||||
.join("built_by_uv-0.1.0.dist-info/RECORD");
|
.join("built_by_uv-0.1.0.dist-info/RECORD");
|
||||||
assert_snapshot!(fs_err::read_to_string(record_file).unwrap(), @r###"
|
assert_snapshot!(fs_err::read_to_string(record_file).unwrap(), @r###"
|
||||||
built_by_uv-0.1.0.dist-info/WHEEL,sha256=PaG_oOj9G2zCRqoLK0SjWBVZbGAMtIXDmm-MEGw9Wo0,83
|
built_by_uv-0.1.0.dist-info/WHEEL,sha256=3da1bfa0e8fd1b6cc246aa0b2b44a35815596c600cb485c39a6f8c106c3d5a8d,83
|
||||||
built_by_uv-0.1.0.dist-info/entry_points.txt,sha256=-IO6yaq6x6HSl-zWH96rZmgYvfyHlH00L5WQoCpz-YI,50
|
built_by_uv-0.1.0.dist-info/entry_points.txt,sha256=f883bac9aabac7a1d297ecd61fdeab666818bdfc87947d342f9590a02a73f982,50
|
||||||
built_by_uv-0.1.0.dist-info/METADATA,sha256=m6EkVvKrGmqx43b_VR45LHD37IZxPYC0NI6Qx9_UXLE,474
|
built_by_uv-0.1.0.dist-info/METADATA,sha256=9ba12456f2ab1a6ab1e376ff551e392c70f7ec86713d80b4348e90c7dfd45cb1,474
|
||||||
built_by_uv-0.1.0.dist-info/RECORD,,
|
built_by_uv-0.1.0.dist-info/RECORD,,
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-build-frontend"
|
name = "uv-build-frontend"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "Build wheels from source distributions"
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -16,16 +17,13 @@ doctest = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
uv-auth = { workspace = true }
|
|
||||||
uv-cache-key = { workspace = true }
|
uv-cache-key = { workspace = true }
|
||||||
uv-configuration = { workspace = true }
|
uv-configuration = { workspace = true }
|
||||||
uv-distribution = { workspace = true }
|
uv-distribution = { workspace = true }
|
||||||
uv-distribution-types = { workspace = true }
|
uv-distribution-types = { workspace = true }
|
||||||
uv-fs = { workspace = true }
|
uv-fs = { workspace = true }
|
||||||
uv-normalize = { workspace = true }
|
|
||||||
uv-pep440 = { workspace = true }
|
uv-pep440 = { workspace = true }
|
||||||
uv-pep508 = { workspace = true }
|
uv-pep508 = { workspace = true }
|
||||||
uv-preview = { workspace = true }
|
|
||||||
uv-pypi-types = { workspace = true }
|
uv-pypi-types = { workspace = true }
|
||||||
uv-python = { workspace = true }
|
uv-python = { workspace = true }
|
||||||
uv-static = { workspace = true }
|
uv-static = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-build-frontend
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-frontend).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -13,8 +13,8 @@ use tracing::error;
|
||||||
use uv_configuration::BuildOutput;
|
use uv_configuration::BuildOutput;
|
||||||
use uv_distribution_types::IsBuildBackendError;
|
use uv_distribution_types::IsBuildBackendError;
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_normalize::PackageName;
|
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
|
use uv_pep508::PackageName;
|
||||||
use uv_types::AnyErrorBuild;
|
use uv_types::AnyErrorBuild;
|
||||||
|
|
||||||
/// e.g. `pygraphviz/graphviz_wrap.c:3020:10: fatal error: graphviz/cgraph.h: No such file or directory`
|
/// e.g. `pygraphviz/graphviz_wrap.c:3020:10: fatal error: graphviz/cgraph.h: No such file or directory`
|
||||||
|
|
@ -46,10 +46,9 @@ static LD_NOT_FOUND_RE: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
static WHEEL_NOT_FOUND_RE: LazyLock<Regex> =
|
static WHEEL_NOT_FOUND_RE: LazyLock<Regex> =
|
||||||
LazyLock::new(|| Regex::new(r"error: invalid command 'bdist_wheel'").unwrap());
|
LazyLock::new(|| Regex::new(r"error: invalid command 'bdist_wheel'").unwrap());
|
||||||
|
|
||||||
/// e.g. `ModuleNotFoundError`
|
/// e.g. `ModuleNotFoundError: No module named 'torch'`
|
||||||
static MODULE_NOT_FOUND: LazyLock<Regex> = LazyLock::new(|| {
|
static TORCH_NOT_FOUND_RE: LazyLock<Regex> =
|
||||||
Regex::new("ModuleNotFoundError: No module named ['\"]([^'\"]+)['\"]").unwrap()
|
LazyLock::new(|| Regex::new(r"ModuleNotFoundError: No module named 'torch'").unwrap());
|
||||||
});
|
|
||||||
|
|
||||||
/// e.g. `ModuleNotFoundError: No module named 'distutils'`
|
/// e.g. `ModuleNotFoundError: No module named 'distutils'`
|
||||||
static DISTUTILS_NOT_FOUND_RE: LazyLock<Regex> =
|
static DISTUTILS_NOT_FOUND_RE: LazyLock<Regex> =
|
||||||
|
|
@ -91,10 +90,6 @@ pub enum Error {
|
||||||
NoSourceDistBuilds,
|
NoSourceDistBuilds,
|
||||||
#[error("Cyclic build dependency detected for `{0}`")]
|
#[error("Cyclic build dependency detected for `{0}`")]
|
||||||
CyclicBuildDependency(PackageName),
|
CyclicBuildDependency(PackageName),
|
||||||
#[error(
|
|
||||||
"Extra build requirement `{0}` was declared with `match-runtime = true`, but `{1}` does not declare static metadata, making runtime-matching impossible"
|
|
||||||
)]
|
|
||||||
UnmatchedRuntime(PackageName, PackageName),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IsBuildBackendError for Error {
|
impl IsBuildBackendError for Error {
|
||||||
|
|
@ -110,8 +105,7 @@ impl IsBuildBackendError for Error {
|
||||||
| Self::Virtualenv(_)
|
| Self::Virtualenv(_)
|
||||||
| Self::NoSourceDistBuild(_)
|
| Self::NoSourceDistBuild(_)
|
||||||
| Self::NoSourceDistBuilds
|
| Self::NoSourceDistBuilds
|
||||||
| Self::CyclicBuildDependency(_)
|
| Self::CyclicBuildDependency(_) => false,
|
||||||
| Self::UnmatchedRuntime(_, _) => false,
|
|
||||||
Self::CommandFailed(_, _)
|
Self::CommandFailed(_, _)
|
||||||
| Self::BuildBackend(_)
|
| Self::BuildBackend(_)
|
||||||
| Self::MissingHeader(_)
|
| Self::MissingHeader(_)
|
||||||
|
|
@ -136,59 +130,6 @@ pub struct MissingHeaderCause {
|
||||||
version_id: Option<String>,
|
version_id: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract the package name from a version specifier string.
|
|
||||||
/// Uses PEP 508 naming rules but more lenient for hinting purposes.
|
|
||||||
fn extract_package_name(version_id: &str) -> &str {
|
|
||||||
// https://peps.python.org/pep-0508/#names
|
|
||||||
// ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$ with re.IGNORECASE
|
|
||||||
// Since we're only using this for a hint, we're more lenient than what we would be doing if this was used for parsing
|
|
||||||
let end = version_id
|
|
||||||
.char_indices()
|
|
||||||
.take_while(|(_, char)| matches!(char, 'A'..='Z' | 'a'..='z' | '0'..='9' | '.' | '-' | '_'))
|
|
||||||
.last()
|
|
||||||
.map_or(0, |(i, c)| i + c.len_utf8());
|
|
||||||
|
|
||||||
if end == 0 {
|
|
||||||
version_id
|
|
||||||
} else {
|
|
||||||
&version_id[..end]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Write a hint about missing build dependencies.
|
|
||||||
fn hint_build_dependency(
|
|
||||||
f: &mut std::fmt::Formatter<'_>,
|
|
||||||
display_name: &str,
|
|
||||||
package_name: &str,
|
|
||||||
package: &str,
|
|
||||||
) -> std::fmt::Result {
|
|
||||||
let table_key = if package_name.contains('.') {
|
|
||||||
format!("\"{package_name}\"")
|
|
||||||
} else {
|
|
||||||
package_name.to_string()
|
|
||||||
};
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"This error likely indicates that `{}` depends on `{}`, but doesn't declare it as a build dependency. \
|
|
||||||
If `{}` is a first-party package, consider adding `{}` to its `{}`. \
|
|
||||||
Otherwise, either add it to your `pyproject.toml` under:\n\
|
|
||||||
\n\
|
|
||||||
[tool.uv.extra-build-dependencies]\n\
|
|
||||||
{} = [\"{}\"]\n\
|
|
||||||
\n\
|
|
||||||
or `{}` into the environment and re-run with `{}`.",
|
|
||||||
display_name.cyan(),
|
|
||||||
package.cyan(),
|
|
||||||
package_name.cyan(),
|
|
||||||
package.cyan(),
|
|
||||||
"build-system.requires".green(),
|
|
||||||
table_key.cyan(),
|
|
||||||
package.cyan(),
|
|
||||||
format!("uv pip install {package}").green(),
|
|
||||||
"--no-build-isolation".green(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for MissingHeaderCause {
|
impl Display for MissingHeaderCause {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
match &self.missing_library {
|
match &self.missing_library {
|
||||||
|
|
@ -249,15 +190,29 @@ impl Display for MissingHeaderCause {
|
||||||
if let (Some(package_name), Some(package_version)) =
|
if let (Some(package_name), Some(package_version)) =
|
||||||
(&self.package_name, &self.package_version)
|
(&self.package_name, &self.package_version)
|
||||||
{
|
{
|
||||||
hint_build_dependency(
|
write!(
|
||||||
f,
|
f,
|
||||||
&format!("{package_name}@{package_version}"),
|
"This error likely indicates that `{}` depends on `{}`, but doesn't declare it as a build dependency. If `{}` is a first-party package, consider adding `{}` to its `{}`. Otherwise, `{}` into the environment and re-run with `{}`.",
|
||||||
package_name.as_str(),
|
format!("{package_name}@{package_version}").cyan(),
|
||||||
package,
|
package.cyan(),
|
||||||
|
package_name.cyan(),
|
||||||
|
package.cyan(),
|
||||||
|
"build-system.requires".green(),
|
||||||
|
format!("uv pip install {package}").green(),
|
||||||
|
"--no-build-isolation".green(),
|
||||||
)
|
)
|
||||||
} else if let Some(version_id) = &self.version_id {
|
} else if let Some(version_id) = &self.version_id {
|
||||||
let package_name = extract_package_name(version_id);
|
write!(
|
||||||
hint_build_dependency(f, package_name, package_name, package)
|
f,
|
||||||
|
"This error likely indicates that `{}` depends on `{}`, but doesn't declare it as a build dependency. If `{}` is a first-party package, consider adding `{}` to its `{}`. Otherwise, `{}` into the environment and re-run with `{}`.",
|
||||||
|
version_id.cyan(),
|
||||||
|
package.cyan(),
|
||||||
|
version_id.cyan(),
|
||||||
|
package.cyan(),
|
||||||
|
"build-system.requires".green(),
|
||||||
|
format!("uv pip install {package}").green(),
|
||||||
|
"--no-build-isolation".green(),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
|
|
@ -392,22 +347,13 @@ impl Error {
|
||||||
Some(MissingLibrary::Linker(library.to_string()))
|
Some(MissingLibrary::Linker(library.to_string()))
|
||||||
} else if WHEEL_NOT_FOUND_RE.is_match(line.trim()) {
|
} else if WHEEL_NOT_FOUND_RE.is_match(line.trim()) {
|
||||||
Some(MissingLibrary::BuildDependency("wheel".to_string()))
|
Some(MissingLibrary::BuildDependency("wheel".to_string()))
|
||||||
|
} else if TORCH_NOT_FOUND_RE.is_match(line.trim()) {
|
||||||
|
Some(MissingLibrary::BuildDependency("torch".to_string()))
|
||||||
} else if DISTUTILS_NOT_FOUND_RE.is_match(line.trim()) {
|
} else if DISTUTILS_NOT_FOUND_RE.is_match(line.trim()) {
|
||||||
Some(MissingLibrary::DeprecatedModule(
|
Some(MissingLibrary::DeprecatedModule(
|
||||||
"distutils".to_string(),
|
"distutils".to_string(),
|
||||||
Version::new([3, 12]),
|
Version::new([3, 12]),
|
||||||
))
|
))
|
||||||
} else if let Some(caps) = MODULE_NOT_FOUND.captures(line.trim()) {
|
|
||||||
if let Some(module_match) = caps.get(1) {
|
|
||||||
let module_name = module_match.as_str();
|
|
||||||
let package_name = match crate::pipreqs::MODULE_MAPPING.lookup(module_name) {
|
|
||||||
Some(package) => package.to_string(),
|
|
||||||
None => module_name.to_string(),
|
|
||||||
};
|
|
||||||
Some(MissingLibrary::BuildDependency(package_name))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
@ -468,8 +414,8 @@ mod test {
|
||||||
use std::process::ExitStatus;
|
use std::process::ExitStatus;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use uv_configuration::BuildOutput;
|
use uv_configuration::BuildOutput;
|
||||||
use uv_normalize::PackageName;
|
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
|
use uv_pep508::PackageName;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn missing_header() {
|
fn missing_header() {
|
||||||
|
|
@ -619,7 +565,7 @@ mod test {
|
||||||
.to_string()
|
.to_string()
|
||||||
.replace("exit status: ", "exit code: ");
|
.replace("exit status: ", "exit code: ");
|
||||||
let formatted = anstream::adapter::strip_str(&formatted);
|
let formatted = anstream::adapter::strip_str(&formatted);
|
||||||
insta::assert_snapshot!(formatted, @r#"
|
insta::assert_snapshot!(formatted, @r###"
|
||||||
Failed building wheel through setup.py (exit code: 0)
|
Failed building wheel through setup.py (exit code: 0)
|
||||||
|
|
||||||
[stderr]
|
[stderr]
|
||||||
|
|
@ -630,13 +576,8 @@ mod test {
|
||||||
|
|
||||||
error: invalid command 'bdist_wheel'
|
error: invalid command 'bdist_wheel'
|
||||||
|
|
||||||
hint: This error likely indicates that `pygraphviz-1.11` depends on `wheel`, but doesn't declare it as a build dependency. If `pygraphviz-1.11` is a first-party package, consider adding `wheel` to its `build-system.requires`. Otherwise, either add it to your `pyproject.toml` under:
|
hint: This error likely indicates that `pygraphviz-1.11` depends on `wheel`, but doesn't declare it as a build dependency. If `pygraphviz-1.11` is a first-party package, consider adding `wheel` to its `build-system.requires`. Otherwise, `uv pip install wheel` into the environment and re-run with `--no-build-isolation`.
|
||||||
|
"###);
|
||||||
[tool.uv.extra-build-dependencies]
|
|
||||||
"pygraphviz-1.11" = ["wheel"]
|
|
||||||
|
|
||||||
or `uv pip install wheel` into the environment and re-run with `--no-build-isolation`.
|
|
||||||
"#);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@
|
||||||
//! <https://packaging.python.org/en/latest/specifications/source-distribution-format/>
|
//! <https://packaging.python.org/en/latest/specifications/source-distribution-format/>
|
||||||
|
|
||||||
mod error;
|
mod error;
|
||||||
mod pipreqs;
|
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
|
|
@ -28,19 +27,16 @@ use tokio::io::AsyncBufReadExt;
|
||||||
use tokio::process::Command;
|
use tokio::process::Command;
|
||||||
use tokio::sync::{Mutex, Semaphore};
|
use tokio::sync::{Mutex, Semaphore};
|
||||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||||
use uv_auth::CredentialsCache;
|
|
||||||
use uv_cache_key::cache_digest;
|
use uv_cache_key::cache_digest;
|
||||||
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
|
use uv_configuration::Preview;
|
||||||
|
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||||
use uv_distribution::BuildRequires;
|
use uv_distribution::BuildRequires;
|
||||||
use uv_distribution_types::{
|
use uv_distribution_types::{ExtraBuildRequires, IndexLocations, Requirement, Resolution};
|
||||||
ConfigSettings, ExtraBuildRequirement, ExtraBuildRequires, IndexLocations, Requirement,
|
use uv_fs::LockedFile;
|
||||||
Resolution,
|
|
||||||
};
|
|
||||||
use uv_fs::{LockedFile, LockedFileMode};
|
|
||||||
use uv_fs::{PythonExt, Simplified};
|
use uv_fs::{PythonExt, Simplified};
|
||||||
use uv_normalize::PackageName;
|
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
use uv_preview::Preview;
|
use uv_pep508::PackageName;
|
||||||
use uv_pypi_types::VerbatimParsedUrl;
|
use uv_pypi_types::VerbatimParsedUrl;
|
||||||
use uv_python::{Interpreter, PythonEnvironment};
|
use uv_python::{Interpreter, PythonEnvironment};
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
@ -292,7 +288,6 @@ impl SourceBuild {
|
||||||
mut environment_variables: FxHashMap<OsString, OsString>,
|
mut environment_variables: FxHashMap<OsString, OsString>,
|
||||||
level: BuildOutput,
|
level: BuildOutput,
|
||||||
concurrent_builds: usize,
|
concurrent_builds: usize,
|
||||||
credentials_cache: &CredentialsCache,
|
|
||||||
preview: Preview,
|
preview: Preview,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let temp_dir = build_context.cache().venv_dir()?;
|
let temp_dir = build_context.cache().venv_dir()?;
|
||||||
|
|
@ -303,6 +298,7 @@ impl SourceBuild {
|
||||||
source.to_path_buf()
|
source.to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
|
||||||
// Check if we have a PEP 517 build backend.
|
// Check if we have a PEP 517 build backend.
|
||||||
let (pep517_backend, project) = Self::extract_pep517_backend(
|
let (pep517_backend, project) = Self::extract_pep517_backend(
|
||||||
&source_tree,
|
&source_tree,
|
||||||
|
|
@ -311,7 +307,7 @@ impl SourceBuild {
|
||||||
locations,
|
locations,
|
||||||
source_strategy,
|
source_strategy,
|
||||||
workspace_cache,
|
workspace_cache,
|
||||||
credentials_cache,
|
&default_backend,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| *err)?;
|
.map_err(|err| *err)?;
|
||||||
|
|
@ -327,28 +323,13 @@ impl SourceBuild {
|
||||||
.or(fallback_package_version)
|
.or(fallback_package_version)
|
||||||
.cloned();
|
.cloned();
|
||||||
|
|
||||||
let extra_build_dependencies = package_name
|
let extra_build_dependencies: Vec<Requirement> = package_name
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|name| extra_build_requires.get(name).cloned())
|
.and_then(|name| extra_build_requires.get(name).cloned())
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|requirement| {
|
.map(Requirement::from)
|
||||||
match requirement {
|
.collect();
|
||||||
ExtraBuildRequirement {
|
|
||||||
requirement,
|
|
||||||
match_runtime: true,
|
|
||||||
} if requirement.source.is_empty() => {
|
|
||||||
Err(Error::UnmatchedRuntime(
|
|
||||||
requirement.name.clone(),
|
|
||||||
// SAFETY: if `package_name` is `None`, the iterator is empty.
|
|
||||||
package_name.clone().unwrap(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
requirement => Ok(requirement),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map_ok(Requirement::from)
|
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
|
||||||
|
|
||||||
// Create a virtual environment, or install into the shared environment if requested.
|
// Create a virtual environment, or install into the shared environment if requested.
|
||||||
let venv = if let Some(venv) = build_isolation.shared_environment(package_name.as_ref()) {
|
let venv = if let Some(venv) = build_isolation.shared_environment(package_name.as_ref()) {
|
||||||
|
|
@ -359,9 +340,7 @@ impl SourceBuild {
|
||||||
interpreter.clone(),
|
interpreter.clone(),
|
||||||
uv_virtualenv::Prompt::None,
|
uv_virtualenv::Prompt::None,
|
||||||
false,
|
false,
|
||||||
uv_virtualenv::OnExisting::Remove(
|
uv_virtualenv::OnExisting::Remove,
|
||||||
uv_virtualenv::RemovalReason::TemporaryEnvironment,
|
|
||||||
),
|
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
|
|
@ -383,6 +362,7 @@ impl SourceBuild {
|
||||||
let resolved_requirements = Self::get_resolved_requirements(
|
let resolved_requirements = Self::get_resolved_requirements(
|
||||||
build_context,
|
build_context,
|
||||||
source_build_context,
|
source_build_context,
|
||||||
|
&default_backend,
|
||||||
&pep517_backend,
|
&pep517_backend,
|
||||||
extra_build_dependencies,
|
extra_build_dependencies,
|
||||||
build_stack,
|
build_stack,
|
||||||
|
|
@ -454,7 +434,6 @@ impl SourceBuild {
|
||||||
&environment_variables,
|
&environment_variables,
|
||||||
&modified_path,
|
&modified_path,
|
||||||
&temp_dir,
|
&temp_dir,
|
||||||
credentials_cache,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
@ -493,16 +472,12 @@ impl SourceBuild {
|
||||||
"uv-setuptools-{}.lock",
|
"uv-setuptools-{}.lock",
|
||||||
cache_digest(&canonical_source_path)
|
cache_digest(&canonical_source_path)
|
||||||
));
|
));
|
||||||
source_tree_lock = LockedFile::acquire(
|
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||||
lock_path,
|
.await
|
||||||
LockedFileMode::Exclusive,
|
.inspect_err(|err| {
|
||||||
self.source_tree.to_string_lossy(),
|
warn!("Failed to acquire build lock: {err}");
|
||||||
)
|
})
|
||||||
.await
|
.ok();
|
||||||
.inspect_err(|err| {
|
|
||||||
warn!("Failed to acquire build lock: {err}");
|
|
||||||
})
|
|
||||||
.ok();
|
|
||||||
}
|
}
|
||||||
Ok(source_tree_lock)
|
Ok(source_tree_lock)
|
||||||
}
|
}
|
||||||
|
|
@ -510,12 +485,13 @@ impl SourceBuild {
|
||||||
async fn get_resolved_requirements(
|
async fn get_resolved_requirements(
|
||||||
build_context: &impl BuildContext,
|
build_context: &impl BuildContext,
|
||||||
source_build_context: SourceBuildContext,
|
source_build_context: SourceBuildContext,
|
||||||
|
default_backend: &Pep517Backend,
|
||||||
pep517_backend: &Pep517Backend,
|
pep517_backend: &Pep517Backend,
|
||||||
extra_build_dependencies: Vec<Requirement>,
|
extra_build_dependencies: Vec<Requirement>,
|
||||||
build_stack: &BuildStack,
|
build_stack: &BuildStack,
|
||||||
) -> Result<Resolution, Error> {
|
) -> Result<Resolution, Error> {
|
||||||
Ok(
|
Ok(
|
||||||
if pep517_backend.requirements == DEFAULT_BACKEND.requirements
|
if pep517_backend.requirements == default_backend.requirements
|
||||||
&& extra_build_dependencies.is_empty()
|
&& extra_build_dependencies.is_empty()
|
||||||
{
|
{
|
||||||
let mut resolution = source_build_context.default_resolution.lock().await;
|
let mut resolution = source_build_context.default_resolution.lock().await;
|
||||||
|
|
@ -523,7 +499,7 @@ impl SourceBuild {
|
||||||
resolved_requirements.clone()
|
resolved_requirements.clone()
|
||||||
} else {
|
} else {
|
||||||
let resolved_requirements = build_context
|
let resolved_requirements = build_context
|
||||||
.resolve(&DEFAULT_BACKEND.requirements, build_stack)
|
.resolve(&default_backend.requirements, build_stack)
|
||||||
.await
|
.await
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
Error::RequirementsResolve("`setup.py` build", err.into())
|
Error::RequirementsResolve("`setup.py` build", err.into())
|
||||||
|
|
@ -563,7 +539,7 @@ impl SourceBuild {
|
||||||
locations: &IndexLocations,
|
locations: &IndexLocations,
|
||||||
source_strategy: SourceStrategy,
|
source_strategy: SourceStrategy,
|
||||||
workspace_cache: &WorkspaceCache,
|
workspace_cache: &WorkspaceCache,
|
||||||
credentials_cache: &CredentialsCache,
|
default_backend: &Pep517Backend,
|
||||||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||||
Ok(toml) => {
|
Ok(toml) => {
|
||||||
|
|
@ -592,7 +568,6 @@ impl SourceBuild {
|
||||||
locations,
|
locations,
|
||||||
source_strategy,
|
source_strategy,
|
||||||
workspace_cache,
|
workspace_cache,
|
||||||
credentials_cache,
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(Error::Lowering)?;
|
.map_err(Error::Lowering)?;
|
||||||
|
|
@ -662,7 +637,7 @@ impl SourceBuild {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_BACKEND.clone()
|
default_backend.clone()
|
||||||
};
|
};
|
||||||
Ok((backend, pyproject_toml.project))
|
Ok((backend, pyproject_toml.project))
|
||||||
}
|
}
|
||||||
|
|
@ -678,7 +653,7 @@ impl SourceBuild {
|
||||||
// the default backend, to match `build`. `pip` uses `setup.py` directly in this
|
// the default backend, to match `build`. `pip` uses `setup.py` directly in this
|
||||||
// case, but plans to make PEP 517 builds the default in the future.
|
// case, but plans to make PEP 517 builds the default in the future.
|
||||||
// See: https://github.com/pypa/pip/issues/9175.
|
// See: https://github.com/pypa/pip/issues/9175.
|
||||||
Ok((DEFAULT_BACKEND.clone(), None))
|
Ok((default_backend.clone(), None))
|
||||||
}
|
}
|
||||||
Err(err) => Err(Box::new(err.into())),
|
Err(err) => Err(Box::new(err.into())),
|
||||||
}
|
}
|
||||||
|
|
@ -965,7 +940,6 @@ async fn create_pep517_build_environment(
|
||||||
environment_variables: &FxHashMap<OsString, OsString>,
|
environment_variables: &FxHashMap<OsString, OsString>,
|
||||||
modified_path: &OsString,
|
modified_path: &OsString,
|
||||||
temp_dir: &TempDir,
|
temp_dir: &TempDir,
|
||||||
credentials_cache: &CredentialsCache,
|
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
// Write the hook output to a file so that we can read it back reliably.
|
// Write the hook output to a file so that we can read it back reliably.
|
||||||
let outfile = temp_dir
|
let outfile = temp_dir
|
||||||
|
|
@ -1060,7 +1034,6 @@ async fn create_pep517_build_environment(
|
||||||
locations,
|
locations,
|
||||||
source_strategy,
|
source_strategy,
|
||||||
workspace_cache,
|
workspace_cache,
|
||||||
credentials_cache,
|
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.map_err(Error::Lowering)?;
|
.map_err(Error::Lowering)?;
|
||||||
|
|
@ -1167,16 +1140,8 @@ impl PythonRunner {
|
||||||
.envs(environment_variables)
|
.envs(environment_variables)
|
||||||
.env(EnvVars::PATH, modified_path)
|
.env(EnvVars::PATH, modified_path)
|
||||||
.env(EnvVars::VIRTUAL_ENV, venv.root())
|
.env(EnvVars::VIRTUAL_ENV, venv.root())
|
||||||
// NOTE: it would be nice to get colored output from build backends,
|
.env(EnvVars::CLICOLOR_FORCE, "1")
|
||||||
// but setting CLICOLOR_FORCE=1 changes the output of underlying
|
|
||||||
// tools, which might mess with wrappers trying to parse their
|
|
||||||
// output.
|
|
||||||
.env(EnvVars::PYTHONIOENCODING, "utf-8:backslashreplace")
|
.env(EnvVars::PYTHONIOENCODING, "utf-8:backslashreplace")
|
||||||
// Remove potentially-sensitive environment variables.
|
|
||||||
.env_remove(EnvVars::PYX_API_KEY)
|
|
||||||
.env_remove(EnvVars::UV_API_KEY)
|
|
||||||
.env_remove(EnvVars::PYX_AUTH_TOKEN)
|
|
||||||
.env_remove(EnvVars::UV_AUTH_TOKEN)
|
|
||||||
.stdout(std::process::Stdio::piped())
|
.stdout(std::process::Stdio::piped())
|
||||||
.stderr(std::process::Stdio::piped())
|
.stderr(std::process::Stdio::piped())
|
||||||
.spawn()
|
.spawn()
|
||||||
|
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
use std::str::FromStr;
|
|
||||||
use std::sync::LazyLock;
|
|
||||||
|
|
||||||
use rustc_hash::FxHashMap;
|
|
||||||
use uv_normalize::PackageName;
|
|
||||||
|
|
||||||
/// A mapping from module name to PyPI package name.
|
|
||||||
pub(crate) struct ModuleMap<'a>(FxHashMap<&'a str, PackageName>);
|
|
||||||
|
|
||||||
impl<'a> ModuleMap<'a> {
|
|
||||||
/// Generate a [`ModuleMap`] from a string representation, encoded in `${module}:{package}` format.
|
|
||||||
fn from_str(source: &'a str) -> Self {
|
|
||||||
let mut mapping = FxHashMap::default();
|
|
||||||
for line in source.lines() {
|
|
||||||
if let Some((module, package)) = line.split_once(':') {
|
|
||||||
let module = module.trim();
|
|
||||||
let package = PackageName::from_str(package.trim()).unwrap();
|
|
||||||
mapping.insert(module, package);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Self(mapping)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Look up a PyPI package name for a given module name.
|
|
||||||
pub(crate) fn lookup(&self, module: &str) -> Option<&PackageName> {
|
|
||||||
self.0.get(module)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A mapping from module name to PyPI package name.
|
|
||||||
pub(crate) static MODULE_MAPPING: LazyLock<ModuleMap> =
|
|
||||||
LazyLock::new(|| ModuleMap::from_str(include_str!("pipreqs/mapping")));
|
|
||||||
|
|
@ -1,201 +0,0 @@
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright {yyyy} {name of copyright owner}
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,22 +1,19 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-build"
|
name = "uv-build"
|
||||||
version = "0.9.18"
|
version = "0.8.6"
|
||||||
description = "A Python build backend"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
uv-build-backend = { workspace = true }
|
uv-build-backend = { workspace = true }
|
||||||
uv-logging = { workspace = true }
|
|
||||||
uv-version = { workspace = true }
|
uv-version = { workspace = true }
|
||||||
|
|
||||||
anstream = { workspace = true }
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
[project]
|
[project]
|
||||||
name = "uv-build"
|
name = "uv-build"
|
||||||
version = "0.9.18"
|
version = "0.8.6"
|
||||||
description = "The uv build backend"
|
description = "The uv build backend"
|
||||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||||
requires-python = ">=3.8"
|
requires-python = ">=3.8"
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ def main():
|
||||||
"Use `uv build` or another build frontend instead.",
|
"Use `uv build` or another build frontend instead.",
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
if "--help" in sys.argv or "-h" in sys.argv:
|
if "--help" in sys.argv:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
else:
|
else:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
||||||
|
|
@ -1,32 +1,10 @@
|
||||||
|
use anyhow::{Context, Result, bail};
|
||||||
use std::env;
|
use std::env;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::{Context, Result, bail};
|
|
||||||
use tracing_subscriber::filter::LevelFilter;
|
|
||||||
use tracing_subscriber::layer::SubscriberExt;
|
|
||||||
use tracing_subscriber::util::SubscriberInitExt;
|
|
||||||
use tracing_subscriber::{EnvFilter, Layer};
|
|
||||||
|
|
||||||
use uv_logging::UvFormat;
|
|
||||||
|
|
||||||
/// Entrypoint for the `uv-build` Python package.
|
/// Entrypoint for the `uv-build` Python package.
|
||||||
fn main() -> Result<()> {
|
fn main() -> Result<()> {
|
||||||
// Support configuring the log level with `RUST_LOG` (shows only the error level by default) and
|
|
||||||
// color.
|
|
||||||
//
|
|
||||||
// This configuration is a simplified version of the uv logging configuration. When using
|
|
||||||
// uv_build through uv proper, the uv logging configuration applies.
|
|
||||||
let filter = EnvFilter::builder()
|
|
||||||
.with_default_directive(LevelFilter::OFF.into())
|
|
||||||
.from_env()
|
|
||||||
.context("Invalid RUST_LOG directives")?;
|
|
||||||
let stderr_layer = tracing_subscriber::fmt::layer()
|
|
||||||
.event_format(UvFormat::default())
|
|
||||||
.with_writer(std::sync::Mutex::new(anstream::stderr()))
|
|
||||||
.with_filter(filter);
|
|
||||||
tracing_subscriber::registry().with(stderr_layer).init();
|
|
||||||
|
|
||||||
// Handrolled to avoid the large clap dependency
|
// Handrolled to avoid the large clap dependency
|
||||||
let mut args = env::args_os();
|
let mut args = env::args_os();
|
||||||
// Skip the name of the binary
|
// Skip the name of the binary
|
||||||
|
|
@ -44,7 +22,6 @@ fn main() -> Result<()> {
|
||||||
&env::current_dir()?,
|
&env::current_dir()?,
|
||||||
&sdist_directory,
|
&sdist_directory,
|
||||||
uv_version::version(),
|
uv_version::version(),
|
||||||
false,
|
|
||||||
)?;
|
)?;
|
||||||
// Tell the build frontend about the name of the artifact we built
|
// Tell the build frontend about the name of the artifact we built
|
||||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||||
|
|
@ -57,7 +34,6 @@ fn main() -> Result<()> {
|
||||||
&wheel_directory,
|
&wheel_directory,
|
||||||
metadata_directory.as_deref(),
|
metadata_directory.as_deref(),
|
||||||
uv_version::version(),
|
uv_version::version(),
|
||||||
false,
|
|
||||||
)?;
|
)?;
|
||||||
// Tell the build frontend about the name of the artifact we built
|
// Tell the build frontend about the name of the artifact we built
|
||||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||||
|
|
@ -70,7 +46,6 @@ fn main() -> Result<()> {
|
||||||
&wheel_directory,
|
&wheel_directory,
|
||||||
metadata_directory.as_deref(),
|
metadata_directory.as_deref(),
|
||||||
uv_version::version(),
|
uv_version::version(),
|
||||||
false,
|
|
||||||
)?;
|
)?;
|
||||||
// Tell the build frontend about the name of the artifact we built
|
// Tell the build frontend about the name of the artifact we built
|
||||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cache-info"
|
name = "uv-cache-info"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -16,8 +16,6 @@ doctest = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
uv-fs = { workspace = true }
|
|
||||||
|
|
||||||
fs-err = { workspace = true }
|
fs-err = { workspace = true }
|
||||||
globwalk = { workspace = true }
|
globwalk = { workspace = true }
|
||||||
schemars = { workspace = true, optional = true }
|
schemars = { workspace = true, optional = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cache-info
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-info).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::cmp::max;
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
use uv_fs::Simplified;
|
|
||||||
|
|
||||||
use crate::git_info::{Commit, Tags};
|
use crate::git_info::{Commit, Tags};
|
||||||
use crate::glob::cluster_globs;
|
use crate::glob::cluster_globs;
|
||||||
use crate::timestamp::Timestamp;
|
use crate::timestamp::Timestamp;
|
||||||
|
|
@ -64,7 +63,7 @@ impl CacheInfo {
|
||||||
pub fn from_directory(directory: &Path) -> Result<Self, CacheInfoError> {
|
pub fn from_directory(directory: &Path) -> Result<Self, CacheInfoError> {
|
||||||
let mut commit = None;
|
let mut commit = None;
|
||||||
let mut tags = None;
|
let mut tags = None;
|
||||||
let mut last_changed: Option<(PathBuf, Timestamp)> = None;
|
let mut timestamp = None;
|
||||||
let mut directories = BTreeMap::new();
|
let mut directories = BTreeMap::new();
|
||||||
let mut env = BTreeMap::new();
|
let mut env = BTreeMap::new();
|
||||||
|
|
||||||
|
|
@ -129,12 +128,7 @@ impl CacheInfo {
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let timestamp = Timestamp::from_metadata(&metadata);
|
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
|
||||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
|
||||||
}) {
|
|
||||||
last_changed = Some((path, timestamp));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
CacheKey::Directory { dir } => {
|
CacheKey::Directory { dir } => {
|
||||||
// Treat the path as a directory.
|
// Treat the path as a directory.
|
||||||
|
|
@ -264,25 +258,14 @@ impl CacheInfo {
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let timestamp = Timestamp::from_metadata(&metadata);
|
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
|
||||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
|
||||||
}) {
|
|
||||||
last_changed = Some((entry.into_path(), timestamp));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let timestamp = if let Some((path, timestamp)) = last_changed {
|
debug!(
|
||||||
debug!(
|
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}"
|
||||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}. Most recently modified: {}",
|
);
|
||||||
path.user_display()
|
|
||||||
);
|
|
||||||
Some(timestamp)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
timestamp,
|
timestamp,
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cache-key"
|
name = "uv-cache-key"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "Generic functionality for caching paths, URLs, and other resources across platforms."
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cache-key
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-key).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -4,7 +4,7 @@ use std::hash::{Hash, Hasher};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
||||||
|
|
||||||
|
|
@ -98,7 +98,7 @@ impl CanonicalUrl {
|
||||||
Self(url)
|
Self(url)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -139,18 +139,8 @@ impl std::fmt::Display for CanonicalUrl {
|
||||||
/// `https://github.com/pypa/package.git#subdirectory=pkg_b` would map to different
|
/// `https://github.com/pypa/package.git#subdirectory=pkg_b` would map to different
|
||||||
/// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same
|
/// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same
|
||||||
/// resource.
|
/// resource.
|
||||||
///
|
|
||||||
/// The additional information it holds should only be used to discriminate between
|
|
||||||
/// sources that hold the exact same commit in their canonical representation,
|
|
||||||
/// but may differ in the contents such as when Git LFS is enabled.
|
|
||||||
///
|
|
||||||
/// A different cache key will be computed when Git LFS is enabled.
|
|
||||||
/// When Git LFS is `false` or `None`, the cache key remains unchanged.
|
|
||||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||||
pub struct RepositoryUrl {
|
pub struct RepositoryUrl(DisplaySafeUrl);
|
||||||
repo_url: DisplaySafeUrl,
|
|
||||||
with_lfs: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RepositoryUrl {
|
impl RepositoryUrl {
|
||||||
pub fn new(url: &DisplaySafeUrl) -> Self {
|
pub fn new(url: &DisplaySafeUrl) -> Self {
|
||||||
|
|
@ -171,31 +161,19 @@ impl RepositoryUrl {
|
||||||
url.set_fragment(None);
|
url.set_fragment(None);
|
||||||
url.set_query(None);
|
url.set_query(None);
|
||||||
|
|
||||||
Self {
|
Self(url)
|
||||||
repo_url: url,
|
|
||||||
with_lfs: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn with_lfs(mut self, lfs: Option<bool>) -> Self {
|
|
||||||
self.with_lfs = lfs;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CacheKey for RepositoryUrl {
|
impl CacheKey for RepositoryUrl {
|
||||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||||
// possible changes in how the URL crate does hashing.
|
// possible changes in how the URL crate does hashing.
|
||||||
self.repo_url.as_str().cache_key(state);
|
self.0.as_str().cache_key(state);
|
||||||
if let Some(true) = self.with_lfs {
|
|
||||||
1u8.cache_key(state);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -203,10 +181,7 @@ impl Hash for RepositoryUrl {
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||||
// possible changes in how the URL crate does hashing.
|
// possible changes in how the URL crate does hashing.
|
||||||
self.repo_url.as_str().hash(state);
|
self.0.as_str().hash(state);
|
||||||
if let Some(true) = self.with_lfs {
|
|
||||||
1u8.hash(state);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -214,13 +189,13 @@ impl Deref for RepositoryUrl {
|
||||||
type Target = Url;
|
type Target = Url;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.repo_url
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for RepositoryUrl {
|
impl std::fmt::Display for RepositoryUrl {
|
||||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||||
std::fmt::Display::fmt(&self.repo_url, f)
|
std::fmt::Display::fmt(&self.0, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -229,7 +204,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn user_credential_does_not_affect_cache_key() -> Result<(), DisplaySafeUrlError> {
|
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||||
let mut hasher = CacheKeyHasher::new();
|
let mut hasher = CacheKeyHasher::new();
|
||||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||||
.cache_key(&mut hasher);
|
.cache_key(&mut hasher);
|
||||||
|
|
@ -279,7 +254,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn canonical_url() -> Result<(), DisplaySafeUrlError> {
|
fn canonical_url() -> Result<(), url::ParseError> {
|
||||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||||
|
|
@ -308,14 +283,6 @@ mod tests {
|
||||||
)?,
|
)?,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Two URLs should _not_ be considered equal if they differ in Git LFS enablement.
|
|
||||||
assert_ne!(
|
|
||||||
CanonicalUrl::parse(
|
|
||||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
|
||||||
)?,
|
|
||||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||||
assert_ne!(
|
assert_ne!(
|
||||||
CanonicalUrl::parse(
|
CanonicalUrl::parse(
|
||||||
|
|
@ -368,7 +335,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn repository_url() -> Result<(), DisplaySafeUrlError> {
|
fn repository_url() -> Result<(), url::ParseError> {
|
||||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||||
|
|
@ -411,76 +378,6 @@ mod tests {
|
||||||
)?,
|
)?,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
|
||||||
// differ in Git LFS enablement.
|
|
||||||
assert_eq!(
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
|
||||||
)?,
|
|
||||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn repository_url_with_lfs() -> Result<(), DisplaySafeUrlError> {
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let repo_url_basic = hasher.finish();
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let repo_url_with_fragments = hasher.finish();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
repo_url_basic, repo_url_with_fragments,
|
|
||||||
"repository urls should have the exact cache keys as fragments are removed",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.with_lfs(None)
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let git_url_with_fragments = hasher.finish();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
repo_url_with_fragments, git_url_with_fragments,
|
|
||||||
"both structs should have the exact cache keys as fragments are still removed",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.with_lfs(Some(false))
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let git_url_with_fragments_and_lfs_false = hasher.finish();
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
git_url_with_fragments, git_url_with_fragments_and_lfs_false,
|
|
||||||
"both structs should have the exact cache keys as lfs false should not influence them",
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut hasher = CacheKeyHasher::new();
|
|
||||||
RepositoryUrl::parse(
|
|
||||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
|
||||||
)?
|
|
||||||
.with_lfs(Some(true))
|
|
||||||
.cache_key(&mut hasher);
|
|
||||||
let git_url_with_fragments_and_lfs_true = hasher.finish();
|
|
||||||
|
|
||||||
assert_ne!(
|
|
||||||
git_url_with_fragments, git_url_with_fragments_and_lfs_true,
|
|
||||||
"both structs should have different cache keys as one has Git LFS enabled",
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cache"
|
name = "uv-cache"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "Generate stable hash digests across versions and platforms."
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -34,6 +35,5 @@ rustc-hash = { workspace = true }
|
||||||
same-file = { workspace = true }
|
same-file = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
walkdir = { workspace = true }
|
walkdir = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cache
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
|
|
||||||
use crate::Cache;
|
use crate::Cache;
|
||||||
use clap::{Parser, ValueHint};
|
use clap::Parser;
|
||||||
use tracing::{debug, warn};
|
use tracing::{debug, warn};
|
||||||
|
|
||||||
#[derive(Parser, Debug, Clone)]
|
#[derive(Parser, Debug, Clone)]
|
||||||
|
|
@ -27,7 +27,7 @@ pub struct CacheArgs {
|
||||||
/// `%LOCALAPPDATA%\uv\cache` on Windows.
|
/// `%LOCALAPPDATA%\uv\cache` on Windows.
|
||||||
///
|
///
|
||||||
/// To view the location of the cache directory, run `uv cache dir`.
|
/// To view the location of the cache directory, run `uv cache dir`.
|
||||||
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR, value_hint = ValueHint::DirPath)]
|
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR)]
|
||||||
pub cache_dir: Option<PathBuf>,
|
pub cache_dir: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,10 +7,11 @@ use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use tracing::{debug, trace, warn};
|
use tracing::debug;
|
||||||
|
|
||||||
|
pub use archive::ArchiveId;
|
||||||
use uv_cache_info::Timestamp;
|
use uv_cache_info::Timestamp;
|
||||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, Simplified, cachedir, directories};
|
use uv_fs::{LockedFile, cachedir, directories};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
use uv_pypi_types::ResolutionMetadata;
|
use uv_pypi_types::ResolutionMetadata;
|
||||||
|
|
||||||
|
|
@ -21,7 +22,6 @@ use crate::removal::Remover;
|
||||||
pub use crate::removal::{Removal, rm_rf};
|
pub use crate::removal::{Removal, rm_rf};
|
||||||
pub use crate::wheel::WheelCache;
|
pub use crate::wheel::WheelCache;
|
||||||
use crate::wheel::WheelCacheKind;
|
use crate::wheel::WheelCacheKind;
|
||||||
pub use archive::ArchiveId;
|
|
||||||
|
|
||||||
mod archive;
|
mod archive;
|
||||||
mod by_timestamp;
|
mod by_timestamp;
|
||||||
|
|
@ -35,17 +35,6 @@ mod wheel;
|
||||||
/// Must be kept in-sync with the version in [`CacheBucket::to_str`].
|
/// Must be kept in-sync with the version in [`CacheBucket::to_str`].
|
||||||
pub const ARCHIVE_VERSION: u8 = 0;
|
pub const ARCHIVE_VERSION: u8 = 0;
|
||||||
|
|
||||||
/// Error locking a cache entry or shard
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
pub enum Error {
|
|
||||||
#[error(transparent)]
|
|
||||||
Io(#[from] io::Error),
|
|
||||||
#[error("Could not make the path absolute")]
|
|
||||||
Absolute(#[source] io::Error),
|
|
||||||
#[error("Could not acquire lock")]
|
|
||||||
Acquire(#[from] LockedFileError),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A [`CacheEntry`] which may or may not exist yet.
|
/// A [`CacheEntry`] which may or may not exist yet.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct CacheEntry(PathBuf);
|
pub struct CacheEntry(PathBuf);
|
||||||
|
|
@ -91,14 +80,9 @@ impl CacheEntry {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquire the [`CacheEntry`] as an exclusive lock.
|
/// Acquire the [`CacheEntry`] as an exclusive lock.
|
||||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||||
fs_err::create_dir_all(self.dir())?;
|
fs_err::create_dir_all(self.dir())?;
|
||||||
Ok(LockedFile::acquire(
|
LockedFile::acquire(self.path(), self.path().display()).await
|
||||||
self.path(),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
self.path().display(),
|
|
||||||
)
|
|
||||||
.await?)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -125,14 +109,9 @@ impl CacheShard {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquire the cache entry as an exclusive lock.
|
/// Acquire the cache entry as an exclusive lock.
|
||||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||||
fs_err::create_dir_all(self.as_ref())?;
|
fs_err::create_dir_all(self.as_ref())?;
|
||||||
Ok(LockedFile::acquire(
|
LockedFile::acquire(self.join(".lock"), self.display()).await
|
||||||
self.join(".lock"),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
self.display(),
|
|
||||||
)
|
|
||||||
.await?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the [`CacheShard`] as a [`PathBuf`].
|
/// Return the [`CacheShard`] as a [`PathBuf`].
|
||||||
|
|
@ -156,8 +135,6 @@ impl Deref for CacheShard {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The main cache abstraction.
|
/// The main cache abstraction.
|
||||||
///
|
|
||||||
/// While the cache is active, it holds a read (shared) lock that prevents cache cleaning
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct Cache {
|
pub struct Cache {
|
||||||
/// The cache directory.
|
/// The cache directory.
|
||||||
|
|
@ -169,9 +146,6 @@ pub struct Cache {
|
||||||
/// Included to ensure that the temporary directory exists for the length of the operation, but
|
/// Included to ensure that the temporary directory exists for the length of the operation, but
|
||||||
/// is dropped at the end as appropriate.
|
/// is dropped at the end as appropriate.
|
||||||
temp_dir: Option<Arc<tempfile::TempDir>>,
|
temp_dir: Option<Arc<tempfile::TempDir>>,
|
||||||
/// Ensure that `uv cache` operations don't remove items from the cache that are used by another
|
|
||||||
/// uv process.
|
|
||||||
lock_file: Option<Arc<LockedFile>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Cache {
|
impl Cache {
|
||||||
|
|
@ -181,7 +155,6 @@ impl Cache {
|
||||||
root: root.into(),
|
root: root.into(),
|
||||||
refresh: Refresh::None(Timestamp::now()),
|
refresh: Refresh::None(Timestamp::now()),
|
||||||
temp_dir: None,
|
temp_dir: None,
|
||||||
lock_file: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -192,7 +165,6 @@ impl Cache {
|
||||||
root: temp_dir.path().to_path_buf(),
|
root: temp_dir.path().to_path_buf(),
|
||||||
refresh: Refresh::None(Timestamp::now()),
|
refresh: Refresh::None(Timestamp::now()),
|
||||||
temp_dir: Some(Arc::new(temp_dir)),
|
temp_dir: Some(Arc::new(temp_dir)),
|
||||||
lock_file: None,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -202,69 +174,6 @@ impl Cache {
|
||||||
Self { refresh, ..self }
|
Self { refresh, ..self }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Acquire a lock that allows removing entries from the cache.
|
|
||||||
pub async fn with_exclusive_lock(self) -> Result<Self, LockedFileError> {
|
|
||||||
let Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
// Release the existing lock, avoid deadlocks from a cloned cache.
|
|
||||||
if let Some(lock_file) = lock_file {
|
|
||||||
drop(
|
|
||||||
Arc::try_unwrap(lock_file).expect(
|
|
||||||
"cloning the cache before acquiring an exclusive lock causes a deadlock",
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let lock_file = LockedFile::acquire(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
root.simplified_display(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file: Some(Arc::new(lock_file)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Acquire a lock that allows removing entries from the cache, if available.
|
|
||||||
///
|
|
||||||
/// If the lock is not immediately available, returns [`Err`] with self.
|
|
||||||
pub fn with_exclusive_lock_no_wait(self) -> Result<Self, Self> {
|
|
||||||
let Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file,
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
match LockedFile::acquire_no_wait(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Exclusive,
|
|
||||||
root.simplified_display(),
|
|
||||||
) {
|
|
||||||
Some(lock_file) => Ok(Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file: Some(Arc::new(lock_file)),
|
|
||||||
}),
|
|
||||||
None => Err(Self {
|
|
||||||
root,
|
|
||||||
refresh,
|
|
||||||
temp_dir,
|
|
||||||
lock_file,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the root of the cache.
|
/// Return the root of the cache.
|
||||||
pub fn root(&self) -> &Path {
|
pub fn root(&self) -> &Path {
|
||||||
&self.root
|
&self.root
|
||||||
|
|
@ -401,8 +310,10 @@ impl Cache {
|
||||||
self.temp_dir.is_some()
|
self.temp_dir.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Populate the cache scaffold.
|
/// Initialize the [`Cache`].
|
||||||
fn create_base_files(root: &PathBuf) -> io::Result<()> {
|
pub fn init(self) -> Result<Self, io::Error> {
|
||||||
|
let root = &self.root;
|
||||||
|
|
||||||
// Create the cache directory, if it doesn't exist.
|
// Create the cache directory, if it doesn't exist.
|
||||||
fs_err::create_dir_all(root)?;
|
fs_err::create_dir_all(root)?;
|
||||||
|
|
||||||
|
|
@ -448,101 +359,21 @@ impl Cache {
|
||||||
.join(".git"),
|
.join(".git"),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Initialize the [`Cache`].
|
|
||||||
pub async fn init(self) -> Result<Self, Error> {
|
|
||||||
let root = &self.root;
|
|
||||||
|
|
||||||
Self::create_base_files(root)?;
|
|
||||||
|
|
||||||
// Block cache removal operations from interfering.
|
|
||||||
let lock_file = match LockedFile::acquire(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Shared,
|
|
||||||
root.simplified_display(),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok(lock_file) => Some(Arc::new(lock_file)),
|
|
||||||
Err(err)
|
|
||||||
if err
|
|
||||||
.as_io_error()
|
|
||||||
.is_some_and(|err| err.kind() == io::ErrorKind::Unsupported) =>
|
|
||||||
{
|
|
||||||
warn!(
|
|
||||||
"Shared locking is not supported by the current platform or filesystem, \
|
|
||||||
reduced parallel process safety with `uv cache clean` and `uv cache prune`."
|
|
||||||
);
|
|
||||||
None
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err.into()),
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
root: std::path::absolute(root)?,
|
||||||
lock_file,
|
|
||||||
..self
|
..self
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Initialize the [`Cache`], assuming that there are no other uv processes running.
|
|
||||||
pub fn init_no_wait(self) -> Result<Option<Self>, Error> {
|
|
||||||
let root = &self.root;
|
|
||||||
|
|
||||||
Self::create_base_files(root)?;
|
|
||||||
|
|
||||||
// Block cache removal operations from interfering.
|
|
||||||
let Some(lock_file) = LockedFile::acquire_no_wait(
|
|
||||||
root.join(".lock"),
|
|
||||||
LockedFileMode::Shared,
|
|
||||||
root.simplified_display(),
|
|
||||||
) else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
Ok(Some(Self {
|
|
||||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
|
||||||
lock_file: Some(Arc::new(lock_file)),
|
|
||||||
..self
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Clear the cache, removing all entries.
|
/// Clear the cache, removing all entries.
|
||||||
pub fn clear(self, reporter: Box<dyn CleanReporter>) -> Result<Removal, io::Error> {
|
pub fn clear(&self, reporter: Box<dyn CleanReporter>) -> Result<Removal, io::Error> {
|
||||||
// Remove everything but `.lock`, Windows does not allow removal of a locked file
|
Remover::new(reporter).rm_rf(&self.root)
|
||||||
let mut removal = Remover::new(reporter).rm_rf(&self.root, true)?;
|
|
||||||
let Self {
|
|
||||||
root, lock_file, ..
|
|
||||||
} = self;
|
|
||||||
|
|
||||||
// Remove the `.lock` file, unlocking it first
|
|
||||||
if let Some(lock) = lock_file {
|
|
||||||
drop(lock);
|
|
||||||
fs_err::remove_file(root.join(".lock"))?;
|
|
||||||
}
|
|
||||||
removal.num_files += 1;
|
|
||||||
|
|
||||||
// Remove the root directory
|
|
||||||
match fs_err::remove_dir(root) {
|
|
||||||
Ok(()) => {
|
|
||||||
removal.num_dirs += 1;
|
|
||||||
}
|
|
||||||
// On Windows, when `--force` is used, the `.lock` file can exist and be unremovable,
|
|
||||||
// so we make this non-fatal
|
|
||||||
Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => {
|
|
||||||
trace!("Failed to remove root cache directory: not empty");
|
|
||||||
}
|
|
||||||
Err(err) => return Err(err),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(removal)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove a package from the cache.
|
/// Remove a package from the cache.
|
||||||
///
|
///
|
||||||
/// Returns the number of entries removed from the cache.
|
/// Returns the number of entries removed from the cache.
|
||||||
pub fn remove(&self, name: &PackageName) -> io::Result<Removal> {
|
pub fn remove(&self, name: &PackageName) -> Result<Removal, io::Error> {
|
||||||
// Collect the set of referenced archives.
|
// Collect the set of referenced archives.
|
||||||
let references = self.find_archive_references()?;
|
let references = self.find_archive_references()?;
|
||||||
|
|
||||||
|
|
@ -576,7 +407,6 @@ impl Cache {
|
||||||
if entry.file_name() == "CACHEDIR.TAG"
|
if entry.file_name() == "CACHEDIR.TAG"
|
||||||
|| entry.file_name() == ".gitignore"
|
|| entry.file_name() == ".gitignore"
|
||||||
|| entry.file_name() == ".git"
|
|| entry.file_name() == ".git"
|
||||||
|| entry.file_name() == ".lock"
|
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
@ -1157,8 +987,6 @@ pub enum CacheBucket {
|
||||||
Environments,
|
Environments,
|
||||||
/// Cached Python downloads
|
/// Cached Python downloads
|
||||||
Python,
|
Python,
|
||||||
/// Downloaded tool binaries (e.g., Ruff).
|
|
||||||
Binaries,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CacheBucket {
|
impl CacheBucket {
|
||||||
|
|
@ -1172,7 +1000,7 @@ impl CacheBucket {
|
||||||
Self::Interpreter => "interpreter-v4",
|
Self::Interpreter => "interpreter-v4",
|
||||||
// Note that when bumping this, you'll also need to bump it
|
// Note that when bumping this, you'll also need to bump it
|
||||||
// in `crates/uv/tests/it/cache_clean.rs`.
|
// in `crates/uv/tests/it/cache_clean.rs`.
|
||||||
Self::Simple => "simple-v18",
|
Self::Simple => "simple-v16",
|
||||||
// Note that when bumping this, you'll also need to bump it
|
// Note that when bumping this, you'll also need to bump it
|
||||||
// in `crates/uv/tests/it/cache_prune.rs`.
|
// in `crates/uv/tests/it/cache_prune.rs`.
|
||||||
Self::Wheels => "wheels-v5",
|
Self::Wheels => "wheels-v5",
|
||||||
|
|
@ -1182,7 +1010,6 @@ impl CacheBucket {
|
||||||
Self::Builds => "builds-v0",
|
Self::Builds => "builds-v0",
|
||||||
Self::Environments => "environments-v2",
|
Self::Environments => "environments-v2",
|
||||||
Self::Python => "python-v0",
|
Self::Python => "python-v0",
|
||||||
Self::Binaries => "binaries-v0",
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1289,8 +1116,7 @@ impl CacheBucket {
|
||||||
| Self::Archive
|
| Self::Archive
|
||||||
| Self::Builds
|
| Self::Builds
|
||||||
| Self::Environments
|
| Self::Environments
|
||||||
| Self::Python
|
| Self::Python => {
|
||||||
| Self::Binaries => {
|
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1309,7 +1135,6 @@ impl CacheBucket {
|
||||||
Self::Archive,
|
Self::Archive,
|
||||||
Self::Builds,
|
Self::Builds,
|
||||||
Self::Environments,
|
Self::Environments,
|
||||||
Self::Binaries,
|
|
||||||
]
|
]
|
||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
|
|
@ -1387,30 +1212,35 @@ impl Refresh {
|
||||||
/// Combine two [`Refresh`] policies, taking the "max" of the two policies.
|
/// Combine two [`Refresh`] policies, taking the "max" of the two policies.
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn combine(self, other: Self) -> Self {
|
pub fn combine(self, other: Self) -> Self {
|
||||||
|
/// Return the maximum of two timestamps.
|
||||||
|
fn max(a: Timestamp, b: Timestamp) -> Timestamp {
|
||||||
|
if a > b { a } else { b }
|
||||||
|
}
|
||||||
|
|
||||||
match (self, other) {
|
match (self, other) {
|
||||||
// If the policy is `None`, return the existing refresh policy.
|
// If the policy is `None`, return the existing refresh policy.
|
||||||
// Take the `max` of the two timestamps.
|
// Take the `max` of the two timestamps.
|
||||||
(Self::None(t1), Self::None(t2)) => Self::None(t1.max(t2)),
|
(Self::None(t1), Self::None(t2)) => Self::None(max(t1, t2)),
|
||||||
(Self::None(t1), Self::All(t2)) => Self::All(t1.max(t2)),
|
(Self::None(t1), Self::All(t2)) => Self::All(max(t1, t2)),
|
||||||
(Self::None(t1), Self::Packages(packages, paths, t2)) => {
|
(Self::None(t1), Self::Packages(packages, paths, t2)) => {
|
||||||
Self::Packages(packages, paths, t1.max(t2))
|
Self::Packages(packages, paths, max(t1, t2))
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the policy is `All`, refresh all packages.
|
// If the policy is `All`, refresh all packages.
|
||||||
(Self::All(t1), Self::None(t2) | Self::All(t2) | Self::Packages(.., t2)) => {
|
(Self::All(t1), Self::None(t2)) => Self::All(max(t1, t2)),
|
||||||
Self::All(t1.max(t2))
|
(Self::All(t1), Self::All(t2)) => Self::All(max(t1, t2)),
|
||||||
}
|
(Self::All(t1), Self::Packages(.., t2)) => Self::All(max(t1, t2)),
|
||||||
|
|
||||||
// If the policy is `Packages`, take the "max" of the two policies.
|
// If the policy is `Packages`, take the "max" of the two policies.
|
||||||
(Self::Packages(packages, paths, t1), Self::None(t2)) => {
|
(Self::Packages(packages, paths, t1), Self::None(t2)) => {
|
||||||
Self::Packages(packages, paths, t1.max(t2))
|
Self::Packages(packages, paths, max(t1, t2))
|
||||||
}
|
}
|
||||||
(Self::Packages(.., t1), Self::All(t2)) => Self::All(t1.max(t2)),
|
(Self::Packages(.., t1), Self::All(t2)) => Self::All(max(t1, t2)),
|
||||||
(Self::Packages(packages1, paths1, t1), Self::Packages(packages2, paths2, t2)) => {
|
(Self::Packages(packages1, paths1, t1), Self::Packages(packages2, paths2, t2)) => {
|
||||||
Self::Packages(
|
Self::Packages(
|
||||||
packages1.into_iter().chain(packages2).collect(),
|
packages1.into_iter().chain(packages2).collect(),
|
||||||
paths1.into_iter().chain(paths2).collect(),
|
paths1.into_iter().chain(paths2).collect(),
|
||||||
t1.max(t2),
|
max(t1, t2),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ use crate::CleanReporter;
|
||||||
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
||||||
/// the number of files and directories removed, along with a total byte count.
|
/// the number of files and directories removed, along with a total byte count.
|
||||||
pub fn rm_rf(path: impl AsRef<Path>) -> io::Result<Removal> {
|
pub fn rm_rf(path: impl AsRef<Path>) -> io::Result<Removal> {
|
||||||
Remover::default().rm_rf(path, false)
|
Remover::default().rm_rf(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A builder for a [`Remover`] that can remove files and directories.
|
/// A builder for a [`Remover`] that can remove files and directories.
|
||||||
|
|
@ -29,13 +29,9 @@ impl Remover {
|
||||||
|
|
||||||
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
||||||
/// the number of files and directories removed, along with a total byte count.
|
/// the number of files and directories removed, along with a total byte count.
|
||||||
pub(crate) fn rm_rf(
|
pub(crate) fn rm_rf(&self, path: impl AsRef<Path>) -> io::Result<Removal> {
|
||||||
&self,
|
|
||||||
path: impl AsRef<Path>,
|
|
||||||
skip_locked_file: bool,
|
|
||||||
) -> io::Result<Removal> {
|
|
||||||
let mut removal = Removal::default();
|
let mut removal = Removal::default();
|
||||||
removal.rm_rf(path.as_ref(), self.reporter.as_deref(), skip_locked_file)?;
|
removal.rm_rf(path.as_ref(), self.reporter.as_deref())?;
|
||||||
Ok(removal)
|
Ok(removal)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -56,12 +52,7 @@ pub struct Removal {
|
||||||
|
|
||||||
impl Removal {
|
impl Removal {
|
||||||
/// Recursively remove a file or directory and all its contents.
|
/// Recursively remove a file or directory and all its contents.
|
||||||
fn rm_rf(
|
fn rm_rf(&mut self, path: &Path, reporter: Option<&dyn CleanReporter>) -> io::Result<()> {
|
||||||
&mut self,
|
|
||||||
path: &Path,
|
|
||||||
reporter: Option<&dyn CleanReporter>,
|
|
||||||
skip_locked_file: bool,
|
|
||||||
) -> io::Result<()> {
|
|
||||||
let metadata = match fs_err::symlink_metadata(path) {
|
let metadata = match fs_err::symlink_metadata(path) {
|
||||||
Ok(metadata) => metadata,
|
Ok(metadata) => metadata,
|
||||||
Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(()),
|
Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(()),
|
||||||
|
|
@ -73,22 +64,9 @@ impl Removal {
|
||||||
|
|
||||||
// Remove the file.
|
// Remove the file.
|
||||||
self.total_bytes += metadata.len();
|
self.total_bytes += metadata.len();
|
||||||
if metadata.is_symlink() {
|
if cfg!(windows) && metadata.is_symlink() {
|
||||||
#[cfg(windows)]
|
// Remove the junction.
|
||||||
{
|
remove_dir(path)?;
|
||||||
use std::os::windows::fs::FileTypeExt;
|
|
||||||
|
|
||||||
if metadata.file_type().is_symlink_dir() {
|
|
||||||
remove_dir(path)?;
|
|
||||||
} else {
|
|
||||||
remove_file(path)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
{
|
|
||||||
remove_file(path)?;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
remove_file(path)?;
|
remove_file(path)?;
|
||||||
}
|
}
|
||||||
|
|
@ -109,44 +87,18 @@ impl Removal {
|
||||||
if set_readable(dir).unwrap_or(false) {
|
if set_readable(dir).unwrap_or(false) {
|
||||||
// Retry the operation; if we _just_ `self.rm_rf(dir)` and continue,
|
// Retry the operation; if we _just_ `self.rm_rf(dir)` and continue,
|
||||||
// `walkdir` may give us duplicate entries for the directory.
|
// `walkdir` may give us duplicate entries for the directory.
|
||||||
return self.rm_rf(path, reporter, skip_locked_file);
|
return self.rm_rf(path, reporter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let entry = entry?;
|
let entry = entry?;
|
||||||
|
if cfg!(windows) && entry.file_type().is_symlink() {
|
||||||
// Remove the exclusive lock last.
|
// Remove the junction.
|
||||||
if skip_locked_file
|
|
||||||
&& entry.file_name() == ".lock"
|
|
||||||
&& entry
|
|
||||||
.path()
|
|
||||||
.strip_prefix(path)
|
|
||||||
.is_ok_and(|suffix| suffix == Path::new(".lock"))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if entry.file_type().is_symlink() && {
|
|
||||||
#[cfg(windows)]
|
|
||||||
{
|
|
||||||
use std::os::windows::fs::FileTypeExt;
|
|
||||||
entry.file_type().is_symlink_dir()
|
|
||||||
}
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
{
|
|
||||||
false
|
|
||||||
}
|
|
||||||
} {
|
|
||||||
self.num_files += 1;
|
self.num_files += 1;
|
||||||
remove_dir(entry.path())?;
|
remove_dir(entry.path())?;
|
||||||
} else if entry.file_type().is_dir() {
|
} else if entry.file_type().is_dir() {
|
||||||
// Remove the directory with the exclusive lock last.
|
|
||||||
if skip_locked_file && entry.path() == path {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.num_dirs += 1;
|
self.num_dirs += 1;
|
||||||
|
|
||||||
// The contents should have been removed by now, but sometimes a race condition is
|
// The contents should have been removed by now, but sometimes a race condition is
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ pub enum WheelCache<'a> {
|
||||||
Path(&'a DisplaySafeUrl),
|
Path(&'a DisplaySafeUrl),
|
||||||
/// An editable dependency, which we key by URL.
|
/// An editable dependency, which we key by URL.
|
||||||
Editable(&'a DisplaySafeUrl),
|
Editable(&'a DisplaySafeUrl),
|
||||||
/// A Git dependency, which we key by URL (including LFS state), SHA.
|
/// A Git dependency, which we key by URL and SHA.
|
||||||
///
|
///
|
||||||
/// Note that this variant only exists for source distributions; wheels can't be delivered
|
/// Note that this variant only exists for source distributions; wheels can't be delivered
|
||||||
/// through Git.
|
/// through Git.
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-cli"
|
name = "uv-cli"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
description = "The command line interface for the uv binary."
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
homepage = { workspace = true }
|
homepage = { workspace = true }
|
||||||
|
documentation = { workspace = true }
|
||||||
repository = { workspace = true }
|
repository = { workspace = true }
|
||||||
authors = { workspace = true }
|
authors = { workspace = true }
|
||||||
license = { workspace = true }
|
license = { workspace = true }
|
||||||
|
|
@ -16,14 +17,12 @@ doctest = false
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
uv-auth = { workspace = true }
|
|
||||||
uv-cache = { workspace = true, features = ["clap"] }
|
uv-cache = { workspace = true, features = ["clap"] }
|
||||||
uv-configuration = { workspace = true, features = ["clap"] }
|
uv-configuration = { workspace = true, features = ["clap"] }
|
||||||
uv-distribution-types = { workspace = true }
|
uv-distribution-types = { workspace = true }
|
||||||
uv-install-wheel = { workspace = true, features = ["clap"], default-features = false }
|
uv-install-wheel = { workspace = true, features = ["clap"], default-features = false }
|
||||||
uv-normalize = { workspace = true }
|
uv-normalize = { workspace = true }
|
||||||
uv-pep508 = { workspace = true }
|
uv-pep508 = { workspace = true }
|
||||||
uv-preview = { workspace = true }
|
|
||||||
uv-pypi-types = { workspace = true }
|
uv-pypi-types = { workspace = true }
|
||||||
uv-python = { workspace = true, features = ["clap", "schemars"]}
|
uv-python = { workspace = true, features = ["clap", "schemars"]}
|
||||||
uv-redacted = { workspace = true }
|
uv-redacted = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
|
||||||
|
|
||||||
# uv-cli
|
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cli).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,8 +1,7 @@
|
||||||
use anstream::eprintln;
|
use anstream::eprintln;
|
||||||
|
|
||||||
use uv_cache::Refresh;
|
use uv_cache::Refresh;
|
||||||
use uv_configuration::{BuildIsolation, Reinstall, Upgrade};
|
use uv_configuration::{ConfigSettings, PackageConfigSettings};
|
||||||
use uv_distribution_types::{ConfigSettings, PackageConfigSettings, Requirement};
|
|
||||||
use uv_resolver::{ExcludeNewer, ExcludeNewerPackage, PrereleaseMode};
|
use uv_resolver::{ExcludeNewer, ExcludeNewerPackage, PrereleaseMode};
|
||||||
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
|
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
|
||||||
use uv_warnings::owo_colors::OwoColorize;
|
use uv_warnings::owo_colors::OwoColorize;
|
||||||
|
|
@ -334,10 +333,8 @@ pub fn resolver_options(
|
||||||
.filter_map(Maybe::into_option)
|
.filter_map(Maybe::into_option)
|
||||||
.collect()
|
.collect()
|
||||||
}),
|
}),
|
||||||
upgrade: Upgrade::from_args(
|
upgrade: flag(upgrade, no_upgrade, "no-upgrade"),
|
||||||
flag(upgrade, no_upgrade, "no-upgrade"),
|
upgrade_package: Some(upgrade_package),
|
||||||
upgrade_package.into_iter().map(Requirement::from).collect(),
|
|
||||||
),
|
|
||||||
index_strategy,
|
index_strategy,
|
||||||
keyring_provider,
|
keyring_provider,
|
||||||
resolution,
|
resolution,
|
||||||
|
|
@ -355,10 +352,8 @@ pub fn resolver_options(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect::<PackageConfigSettings>()
|
.collect::<PackageConfigSettings>()
|
||||||
}),
|
}),
|
||||||
build_isolation: BuildIsolation::from_args(
|
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||||
flag(no_build_isolation, build_isolation, "build-isolation"),
|
no_build_isolation_package: Some(no_build_isolation_package),
|
||||||
no_build_isolation_package,
|
|
||||||
),
|
|
||||||
extra_build_dependencies: None,
|
extra_build_dependencies: None,
|
||||||
extra_build_variables: None,
|
extra_build_variables: None,
|
||||||
exclude_newer: ExcludeNewer::from_args(
|
exclude_newer: ExcludeNewer::from_args(
|
||||||
|
|
@ -366,7 +361,6 @@ pub fn resolver_options(
|
||||||
exclude_newer_package.unwrap_or_default(),
|
exclude_newer_package.unwrap_or_default(),
|
||||||
),
|
),
|
||||||
link_mode,
|
link_mode,
|
||||||
torch_backend: None,
|
|
||||||
no_build: flag(no_build, build, "build"),
|
no_build: flag(no_build, build, "build"),
|
||||||
no_build_package: Some(no_build_package),
|
no_build_package: Some(no_build_package),
|
||||||
no_binary: flag(no_binary, binary, "binary"),
|
no_binary: flag(no_binary, binary, "binary"),
|
||||||
|
|
@ -448,14 +442,18 @@ pub fn resolver_installer_options(
|
||||||
.filter_map(Maybe::into_option)
|
.filter_map(Maybe::into_option)
|
||||||
.collect()
|
.collect()
|
||||||
}),
|
}),
|
||||||
upgrade: Upgrade::from_args(
|
upgrade: flag(upgrade, no_upgrade, "upgrade"),
|
||||||
flag(upgrade, no_upgrade, "upgrade"),
|
upgrade_package: if upgrade_package.is_empty() {
|
||||||
upgrade_package.into_iter().map(Requirement::from).collect(),
|
None
|
||||||
),
|
} else {
|
||||||
reinstall: Reinstall::from_args(
|
Some(upgrade_package)
|
||||||
flag(reinstall, no_reinstall, "reinstall"),
|
},
|
||||||
reinstall_package,
|
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||||
),
|
reinstall_package: if reinstall_package.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(reinstall_package)
|
||||||
|
},
|
||||||
index_strategy,
|
index_strategy,
|
||||||
keyring_provider,
|
keyring_provider,
|
||||||
resolution,
|
resolution,
|
||||||
|
|
@ -473,10 +471,12 @@ pub fn resolver_installer_options(
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect::<PackageConfigSettings>()
|
.collect::<PackageConfigSettings>()
|
||||||
}),
|
}),
|
||||||
build_isolation: BuildIsolation::from_args(
|
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||||
flag(no_build_isolation, build_isolation, "build-isolation"),
|
no_build_isolation_package: if no_build_isolation_package.is_empty() {
|
||||||
no_build_isolation_package,
|
None
|
||||||
),
|
} else {
|
||||||
|
Some(no_build_isolation_package)
|
||||||
|
},
|
||||||
extra_build_dependencies: None,
|
extra_build_dependencies: None,
|
||||||
extra_build_variables: None,
|
extra_build_variables: None,
|
||||||
exclude_newer,
|
exclude_newer,
|
||||||
|
|
@ -496,6 +496,5 @@ pub fn resolver_installer_options(
|
||||||
Some(no_binary_package)
|
Some(no_binary_package)
|
||||||
},
|
},
|
||||||
no_sources: if no_sources { Some(true) } else { None },
|
no_sources: if no_sources { Some(true) } else { None },
|
||||||
torch_backend: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,9 +3,7 @@
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
use uv_pep508::{PackageName, uv_pep440::Version};
|
||||||
use uv_normalize::PackageName;
|
|
||||||
use uv_pep508::uv_pep440::Version;
|
|
||||||
|
|
||||||
/// Information about the git repository where uv was built from.
|
/// Information about the git repository where uv was built from.
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "uv-client"
|
name = "uv-client"
|
||||||
version = "0.0.8"
|
version = "0.0.1"
|
||||||
description = "This is an internal component crate of uv"
|
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
|
||||||
homepage = { workspace = true }
|
|
||||||
repository = { workspace = true }
|
|
||||||
authors = { workspace = true }
|
|
||||||
license = { workspace = true }
|
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
doctest = false
|
doctest = false
|
||||||
|
|
@ -28,7 +22,6 @@ uv-normalize = { workspace = true }
|
||||||
uv-pep440 = { workspace = true }
|
uv-pep440 = { workspace = true }
|
||||||
uv-pep508 = { workspace = true }
|
uv-pep508 = { workspace = true }
|
||||||
uv-platform-tags = { workspace = true }
|
uv-platform-tags = { workspace = true }
|
||||||
uv-preview = { workspace = true }
|
|
||||||
uv-pypi-types = { workspace = true }
|
uv-pypi-types = { workspace = true }
|
||||||
uv-small-str = { workspace = true }
|
uv-small-str = { workspace = true }
|
||||||
uv-redacted = { workspace = true }
|
uv-redacted = { workspace = true }
|
||||||
|
|
@ -38,14 +31,12 @@ uv-version = { workspace = true }
|
||||||
uv-warnings = { workspace = true }
|
uv-warnings = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
astral-tl = { workspace = true }
|
|
||||||
async-trait = { workspace = true }
|
async-trait = { workspace = true }
|
||||||
async_http_range_reader = { workspace = true }
|
async_http_range_reader = { workspace = true }
|
||||||
async_zip = { workspace = true }
|
async_zip = { workspace = true }
|
||||||
bytecheck = { workspace = true }
|
bytecheck = { workspace = true }
|
||||||
fs-err = { workspace = true, features = ["tokio"] }
|
fs-err = { workspace = true, features = ["tokio"] }
|
||||||
futures = { workspace = true }
|
futures = { workspace = true }
|
||||||
h2 = { workspace = true }
|
|
||||||
html-escape = { workspace = true }
|
html-escape = { workspace = true }
|
||||||
http = { workspace = true }
|
http = { workspace = true }
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
|
|
@ -61,6 +52,7 @@ serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
sys-info = { workspace = true }
|
sys-info = { workspace = true }
|
||||||
thiserror = { workspace = true }
|
thiserror = { workspace = true }
|
||||||
|
tl = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
tokio-util = { workspace = true }
|
tokio-util = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
|
|
@ -72,9 +64,5 @@ http-body-util = { workspace = true }
|
||||||
hyper = { workspace = true }
|
hyper = { workspace = true }
|
||||||
hyper-util = { workspace = true }
|
hyper-util = { workspace = true }
|
||||||
insta = { workspace = true }
|
insta = { workspace = true }
|
||||||
rcgen = { workspace = true }
|
|
||||||
rustls = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
tokio-rustls = { workspace = true }
|
|
||||||
wiremock = { workspace = true }
|
wiremock = { workspace = true }
|
||||||
tempfile = { workspace = true }
|
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,5 @@
|
||||||
<!-- This file is generated. DO NOT EDIT -->
|
# `pypi-client`
|
||||||
|
|
||||||
# uv-client
|
A general-use client for interacting with PyPI.
|
||||||
|
|
||||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
Loosely modeled after Orogene's `oro-client`.
|
||||||
is unstable and will have frequent breaking changes.
|
|
||||||
|
|
||||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
|
||||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-client).
|
|
||||||
|
|
||||||
See uv's
|
|
||||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
|
||||||
for details on versioning.
|
|
||||||
|
|
|
||||||
|
|
@ -21,36 +21,35 @@ use reqwest_middleware::{ClientWithMiddleware, Middleware};
|
||||||
use reqwest_retry::policies::ExponentialBackoff;
|
use reqwest_retry::policies::ExponentialBackoff;
|
||||||
use reqwest_retry::{
|
use reqwest_retry::{
|
||||||
DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy,
|
DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy,
|
||||||
default_on_request_error,
|
|
||||||
};
|
};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tracing::{debug, trace};
|
use tracing::{debug, trace};
|
||||||
use url::ParseError;
|
use url::ParseError;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_auth::{AuthMiddleware, Credentials, CredentialsCache, Indexes, PyxTokenStore};
|
use uv_auth::Credentials;
|
||||||
|
use uv_auth::{AuthMiddleware, Indexes};
|
||||||
use uv_configuration::{KeyringProviderType, TrustedHost};
|
use uv_configuration::{KeyringProviderType, TrustedHost};
|
||||||
use uv_fs::Simplified;
|
use uv_fs::Simplified;
|
||||||
use uv_pep508::MarkerEnvironment;
|
use uv_pep508::MarkerEnvironment;
|
||||||
use uv_platform_tags::Platform;
|
use uv_platform_tags::Platform;
|
||||||
use uv_preview::Preview;
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_redacted::DisplaySafeUrlError;
|
|
||||||
use uv_static::EnvVars;
|
use uv_static::EnvVars;
|
||||||
use uv_version::version;
|
use uv_version::version;
|
||||||
use uv_warnings::warn_user_once;
|
use uv_warnings::warn_user_once;
|
||||||
|
|
||||||
|
use crate::Connectivity;
|
||||||
use crate::linehaul::LineHaul;
|
use crate::linehaul::LineHaul;
|
||||||
use crate::middleware::OfflineMiddleware;
|
use crate::middleware::OfflineMiddleware;
|
||||||
use crate::tls::read_identity;
|
use crate::tls::read_identity;
|
||||||
use crate::{Connectivity, WrappedReqwestError};
|
|
||||||
|
|
||||||
|
/// Do not use this value directly outside tests, use [`retries_from_env`] instead.
|
||||||
pub const DEFAULT_RETRIES: u32 = 3;
|
pub const DEFAULT_RETRIES: u32 = 3;
|
||||||
|
|
||||||
/// Maximum number of redirects to follow before giving up.
|
/// Maximum number of redirects to follow before giving up.
|
||||||
///
|
///
|
||||||
/// This is the default used by [`reqwest`].
|
/// This is the default used by [`reqwest`].
|
||||||
pub const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||||
|
|
||||||
/// Selectively skip parts or the entire auth middleware.
|
/// Selectively skip parts or the entire auth middleware.
|
||||||
#[derive(Debug, Clone, Copy, Default)]
|
#[derive(Debug, Clone, Copy, Default)]
|
||||||
|
|
@ -69,7 +68,6 @@ pub enum AuthIntegration {
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct BaseClientBuilder<'a> {
|
pub struct BaseClientBuilder<'a> {
|
||||||
keyring: KeyringProviderType,
|
keyring: KeyringProviderType,
|
||||||
preview: Preview,
|
|
||||||
allow_insecure_host: Vec<TrustedHost>,
|
allow_insecure_host: Vec<TrustedHost>,
|
||||||
native_tls: bool,
|
native_tls: bool,
|
||||||
built_in_root_certs: bool,
|
built_in_root_certs: bool,
|
||||||
|
|
@ -78,10 +76,8 @@ pub struct BaseClientBuilder<'a> {
|
||||||
markers: Option<&'a MarkerEnvironment>,
|
markers: Option<&'a MarkerEnvironment>,
|
||||||
platform: Option<&'a Platform>,
|
platform: Option<&'a Platform>,
|
||||||
auth_integration: AuthIntegration,
|
auth_integration: AuthIntegration,
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
|
||||||
credentials_cache: Arc<CredentialsCache>,
|
|
||||||
indexes: Indexes,
|
indexes: Indexes,
|
||||||
timeout: Duration,
|
default_timeout: Duration,
|
||||||
extra_middleware: Option<ExtraMiddleware>,
|
extra_middleware: Option<ExtraMiddleware>,
|
||||||
proxies: Vec<Proxy>,
|
proxies: Vec<Proxy>,
|
||||||
redirect_policy: RedirectPolicy,
|
redirect_policy: RedirectPolicy,
|
||||||
|
|
@ -89,10 +85,6 @@ pub struct BaseClientBuilder<'a> {
|
||||||
///
|
///
|
||||||
/// A policy allowing propagation is insecure and should only be available for test code.
|
/// A policy allowing propagation is insecure and should only be available for test code.
|
||||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
||||||
/// Optional custom reqwest client to use instead of creating a new one.
|
|
||||||
custom_client: Option<Client>,
|
|
||||||
/// uv subcommand in which this client is being used
|
|
||||||
subcommand: Option<Vec<String>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The policy for handling HTTP redirects.
|
/// The policy for handling HTTP redirects.
|
||||||
|
|
@ -104,8 +96,6 @@ pub enum RedirectPolicy {
|
||||||
BypassMiddleware,
|
BypassMiddleware,
|
||||||
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
||||||
RetriggerMiddleware,
|
RetriggerMiddleware,
|
||||||
/// No redirect for non-cloneable (e.g., streaming) requests with custom redirect logic.
|
|
||||||
NoRedirect,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RedirectPolicy {
|
impl RedirectPolicy {
|
||||||
|
|
@ -113,7 +103,6 @@ impl RedirectPolicy {
|
||||||
match self {
|
match self {
|
||||||
Self::BypassMiddleware => reqwest::redirect::Policy::default(),
|
Self::BypassMiddleware => reqwest::redirect::Policy::default(),
|
||||||
Self::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
Self::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
||||||
Self::NoRedirect => reqwest::redirect::Policy::none(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -132,9 +121,14 @@ impl Debug for ExtraMiddleware {
|
||||||
|
|
||||||
impl Default for BaseClientBuilder<'_> {
|
impl Default for BaseClientBuilder<'_> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
|
Self::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BaseClientBuilder<'_> {
|
||||||
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
keyring: KeyringProviderType::default(),
|
keyring: KeyringProviderType::default(),
|
||||||
preview: Preview::default(),
|
|
||||||
allow_insecure_host: vec![],
|
allow_insecure_host: vec![],
|
||||||
native_tls: false,
|
native_tls: false,
|
||||||
built_in_root_certs: false,
|
built_in_root_certs: false,
|
||||||
|
|
@ -143,50 +137,17 @@ impl Default for BaseClientBuilder<'_> {
|
||||||
markers: None,
|
markers: None,
|
||||||
platform: None,
|
platform: None,
|
||||||
auth_integration: AuthIntegration::default(),
|
auth_integration: AuthIntegration::default(),
|
||||||
credentials_cache: Arc::new(CredentialsCache::default()),
|
|
||||||
indexes: Indexes::new(),
|
indexes: Indexes::new(),
|
||||||
timeout: Duration::from_secs(30),
|
default_timeout: Duration::from_secs(30),
|
||||||
extra_middleware: None,
|
extra_middleware: None,
|
||||||
proxies: vec![],
|
proxies: vec![],
|
||||||
redirect_policy: RedirectPolicy::default(),
|
redirect_policy: RedirectPolicy::default(),
|
||||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
||||||
custom_client: None,
|
|
||||||
subcommand: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> BaseClientBuilder<'a> {
|
impl<'a> BaseClientBuilder<'a> {
|
||||||
pub fn new(
|
|
||||||
connectivity: Connectivity,
|
|
||||||
native_tls: bool,
|
|
||||||
allow_insecure_host: Vec<TrustedHost>,
|
|
||||||
preview: Preview,
|
|
||||||
timeout: Duration,
|
|
||||||
retries: u32,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
preview,
|
|
||||||
allow_insecure_host,
|
|
||||||
native_tls,
|
|
||||||
retries,
|
|
||||||
connectivity,
|
|
||||||
timeout,
|
|
||||||
..Self::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Use a custom reqwest client instead of creating a new one.
|
|
||||||
///
|
|
||||||
/// This allows you to provide your own reqwest client with custom configuration.
|
|
||||||
/// Note that some configuration options from this builder will still be applied
|
|
||||||
/// to the client via middleware.
|
|
||||||
#[must_use]
|
|
||||||
pub fn custom_client(mut self, client: Client) -> Self {
|
|
||||||
self.custom_client = Some(client);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn keyring(mut self, keyring_type: KeyringProviderType) -> Self {
|
pub fn keyring(mut self, keyring_type: KeyringProviderType) -> Self {
|
||||||
self.keyring = keyring_type;
|
self.keyring = keyring_type;
|
||||||
|
|
@ -211,6 +172,15 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise use the default
|
||||||
|
/// retries.
|
||||||
|
///
|
||||||
|
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||||
|
pub fn retries_from_env(mut self) -> Result<Self, RetryParsingError> {
|
||||||
|
self.retries = retries_from_env()?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||||
self.native_tls = native_tls;
|
self.native_tls = native_tls;
|
||||||
|
|
@ -248,8 +218,8 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
pub fn default_timeout(mut self, default_timeout: Duration) -> Self {
|
||||||
self.timeout = timeout;
|
self.default_timeout = default_timeout;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -283,36 +253,12 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn subcommand(mut self, subcommand: Vec<String>) -> Self {
|
|
||||||
self.subcommand = Some(subcommand);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
|
||||||
&self.credentials_cache
|
|
||||||
}
|
|
||||||
|
|
||||||
/// See [`CredentialsCache::store_credentials_from_url`].
|
|
||||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
|
||||||
self.credentials_cache.store_credentials_from_url(url)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// See [`CredentialsCache::store_credentials`].
|
|
||||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
|
||||||
self.credentials_cache.store_credentials(url, credentials);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_native_tls(&self) -> bool {
|
|
||||||
self.native_tls
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_offline(&self) -> bool {
|
pub fn is_offline(&self) -> bool {
|
||||||
matches!(self.connectivity, Connectivity::Offline)
|
matches!(self.connectivity, Connectivity::Offline)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a [`RetryPolicy`] for the client.
|
/// Create a [`RetryPolicy`] for the client.
|
||||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
fn retry_policy(&self) -> ExponentialBackoff {
|
||||||
let mut builder = ExponentialBackoff::builder();
|
let mut builder = ExponentialBackoff::builder();
|
||||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||||
|
|
@ -321,14 +267,63 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(&self) -> BaseClient {
|
pub fn build(&self) -> BaseClient {
|
||||||
let timeout = self.timeout;
|
// Create user agent.
|
||||||
|
let mut user_agent_string = format!("uv/{}", version());
|
||||||
|
|
||||||
|
// Add linehaul metadata.
|
||||||
|
if let Some(markers) = self.markers {
|
||||||
|
let linehaul = LineHaul::new(markers, self.platform);
|
||||||
|
if let Ok(output) = serde_json::to_string(&linehaul) {
|
||||||
|
let _ = write!(user_agent_string, " {output}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for the presence of an `SSL_CERT_FILE`.
|
||||||
|
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
||||||
|
let path_exists = Path::new(&path).exists();
|
||||||
|
if !path_exists {
|
||||||
|
warn_user_once!(
|
||||||
|
"Ignoring invalid `SSL_CERT_FILE`. File does not exist: {}.",
|
||||||
|
path.simplified_display().cyan()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
path_exists
|
||||||
|
});
|
||||||
|
|
||||||
|
// Timeout options, matching https://doc.rust-lang.org/nightly/cargo/reference/config.html#httptimeout
|
||||||
|
// `UV_REQUEST_TIMEOUT` is provided for backwards compatibility with v0.1.6
|
||||||
|
let timeout = env::var(EnvVars::UV_HTTP_TIMEOUT)
|
||||||
|
.or_else(|_| env::var(EnvVars::UV_REQUEST_TIMEOUT))
|
||||||
|
.or_else(|_| env::var(EnvVars::HTTP_TIMEOUT))
|
||||||
|
.and_then(|value| {
|
||||||
|
value.parse::<u64>()
|
||||||
|
.map(Duration::from_secs)
|
||||||
|
.or_else(|_| {
|
||||||
|
// On parse error, warn and use the default timeout
|
||||||
|
warn_user_once!("Ignoring invalid value from environment for `UV_HTTP_TIMEOUT`. Expected an integer number of seconds, got \"{value}\".");
|
||||||
|
Ok(self.default_timeout)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.unwrap_or(self.default_timeout);
|
||||||
debug!("Using request timeout of {}s", timeout.as_secs());
|
debug!("Using request timeout of {}s", timeout.as_secs());
|
||||||
|
|
||||||
// Use the custom client if provided, otherwise create a new one
|
// Create a secure client that validates certificates.
|
||||||
let (raw_client, raw_dangerous_client) = match &self.custom_client {
|
let raw_client = self.create_client(
|
||||||
Some(client) => (client.clone(), client.clone()),
|
&user_agent_string,
|
||||||
None => self.create_secure_and_insecure_clients(timeout),
|
timeout,
|
||||||
};
|
ssl_cert_file_exists,
|
||||||
|
Security::Secure,
|
||||||
|
self.redirect_policy,
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create an insecure client that accepts invalid certificates.
|
||||||
|
let raw_dangerous_client = self.create_client(
|
||||||
|
&user_agent_string,
|
||||||
|
timeout,
|
||||||
|
ssl_cert_file_exists,
|
||||||
|
Security::Insecure,
|
||||||
|
self.redirect_policy,
|
||||||
|
);
|
||||||
|
|
||||||
// Wrap in any relevant middleware and handle connectivity.
|
// Wrap in any relevant middleware and handle connectivity.
|
||||||
let client = RedirectClientWithMiddleware {
|
let client = RedirectClientWithMiddleware {
|
||||||
|
|
@ -351,7 +346,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
dangerous_client,
|
dangerous_client,
|
||||||
raw_dangerous_client,
|
raw_dangerous_client,
|
||||||
timeout,
|
timeout,
|
||||||
credentials_cache: self.credentials_cache.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -378,112 +372,14 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
raw_client: existing.raw_client.clone(),
|
raw_client: existing.raw_client.clone(),
|
||||||
raw_dangerous_client: existing.raw_dangerous_client.clone(),
|
raw_dangerous_client: existing.raw_dangerous_client.clone(),
|
||||||
timeout: existing.timeout,
|
timeout: existing.timeout,
|
||||||
credentials_cache: existing.credentials_cache.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_secure_and_insecure_clients(&self, timeout: Duration) -> (Client, Client) {
|
|
||||||
// Create user agent.
|
|
||||||
let mut user_agent_string = format!("uv/{}", version());
|
|
||||||
|
|
||||||
// Add linehaul metadata.
|
|
||||||
let linehaul = LineHaul::new(self.markers, self.platform, self.subcommand.clone());
|
|
||||||
if let Ok(output) = serde_json::to_string(&linehaul) {
|
|
||||||
let _ = write!(user_agent_string, " {output}");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Checks for the presence of `SSL_CERT_FILE`.
|
|
||||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
|
||||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
|
||||||
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
|
||||||
let path_exists = Path::new(&path).exists();
|
|
||||||
if !path_exists {
|
|
||||||
warn_user_once!(
|
|
||||||
"Ignoring invalid `SSL_CERT_FILE`. File does not exist: {}.",
|
|
||||||
path.simplified_display().cyan()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
path_exists
|
|
||||||
});
|
|
||||||
|
|
||||||
// Checks for the presence of `SSL_CERT_DIR`.
|
|
||||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
|
||||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
|
||||||
let ssl_cert_dir_exists = env::var_os(EnvVars::SSL_CERT_DIR)
|
|
||||||
.filter(|v| !v.is_empty())
|
|
||||||
.is_some_and(|dirs| {
|
|
||||||
// Parse `SSL_CERT_DIR`, with support for multiple entries using
|
|
||||||
// a platform-specific delimiter (`:` on Unix, `;` on Windows)
|
|
||||||
let (existing, missing): (Vec<_>, Vec<_>) =
|
|
||||||
env::split_paths(&dirs).partition(|p| p.exists());
|
|
||||||
|
|
||||||
if existing.is_empty() {
|
|
||||||
let end_note = if missing.len() == 1 {
|
|
||||||
"The directory does not exist."
|
|
||||||
} else {
|
|
||||||
"The entries do not exist."
|
|
||||||
};
|
|
||||||
warn_user_once!(
|
|
||||||
"Ignoring invalid `SSL_CERT_DIR`. {end_note}: {}.",
|
|
||||||
missing
|
|
||||||
.iter()
|
|
||||||
.map(Simplified::simplified_display)
|
|
||||||
.join(", ")
|
|
||||||
.cyan()
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Warn on any missing entries
|
|
||||||
if !missing.is_empty() {
|
|
||||||
let end_note = if missing.len() == 1 {
|
|
||||||
"The following directory does not exist:"
|
|
||||||
} else {
|
|
||||||
"The following entries do not exist:"
|
|
||||||
};
|
|
||||||
warn_user_once!(
|
|
||||||
"Invalid entries in `SSL_CERT_DIR`. {end_note}: {}.",
|
|
||||||
missing
|
|
||||||
.iter()
|
|
||||||
.map(Simplified::simplified_display)
|
|
||||||
.join(", ")
|
|
||||||
.cyan()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Proceed while ignoring missing entries
|
|
||||||
true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create a secure client that validates certificates.
|
|
||||||
let raw_client = self.create_client(
|
|
||||||
&user_agent_string,
|
|
||||||
timeout,
|
|
||||||
ssl_cert_file_exists,
|
|
||||||
ssl_cert_dir_exists,
|
|
||||||
Security::Secure,
|
|
||||||
self.redirect_policy,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create an insecure client that accepts invalid certificates.
|
|
||||||
let raw_dangerous_client = self.create_client(
|
|
||||||
&user_agent_string,
|
|
||||||
timeout,
|
|
||||||
ssl_cert_file_exists,
|
|
||||||
ssl_cert_dir_exists,
|
|
||||||
Security::Insecure,
|
|
||||||
self.redirect_policy,
|
|
||||||
);
|
|
||||||
|
|
||||||
(raw_client, raw_dangerous_client)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_client(
|
fn create_client(
|
||||||
&self,
|
&self,
|
||||||
user_agent: &str,
|
user_agent: &str,
|
||||||
timeout: Duration,
|
timeout: Duration,
|
||||||
ssl_cert_file_exists: bool,
|
ssl_cert_file_exists: bool,
|
||||||
ssl_cert_dir_exists: bool,
|
|
||||||
security: Security,
|
security: Security,
|
||||||
redirect_policy: RedirectPolicy,
|
redirect_policy: RedirectPolicy,
|
||||||
) -> Client {
|
) -> Client {
|
||||||
|
|
@ -502,7 +398,7 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
Security::Insecure => client_builder.danger_accept_invalid_certs(true),
|
Security::Insecure => client_builder.danger_accept_invalid_certs(true),
|
||||||
};
|
};
|
||||||
|
|
||||||
let client_builder = if self.native_tls || ssl_cert_file_exists || ssl_cert_dir_exists {
|
let client_builder = if self.native_tls || ssl_cert_file_exists {
|
||||||
client_builder.tls_built_in_native_certs(true)
|
client_builder.tls_built_in_native_certs(true)
|
||||||
} else {
|
} else {
|
||||||
client_builder.tls_built_in_webpki_certs(true)
|
client_builder.tls_built_in_webpki_certs(true)
|
||||||
|
|
@ -536,30 +432,6 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
fn apply_middleware(&self, client: Client) -> ClientWithMiddleware {
|
fn apply_middleware(&self, client: Client) -> ClientWithMiddleware {
|
||||||
match self.connectivity {
|
match self.connectivity {
|
||||||
Connectivity::Online => {
|
Connectivity::Online => {
|
||||||
// Create a base client to using in the authentication middleware.
|
|
||||||
let base_client = {
|
|
||||||
let mut client = reqwest_middleware::ClientBuilder::new(client.clone());
|
|
||||||
|
|
||||||
// Avoid uncloneable errors with a streaming body during publish.
|
|
||||||
if self.retries > 0 {
|
|
||||||
// Initialize the retry strategy.
|
|
||||||
let retry_strategy = RetryTransientMiddleware::new_with_policy_and_strategy(
|
|
||||||
self.retry_policy(),
|
|
||||||
UvRetryableStrategy,
|
|
||||||
);
|
|
||||||
client = client.with(retry_strategy);
|
|
||||||
}
|
|
||||||
|
|
||||||
// When supplied, add the extra middleware.
|
|
||||||
if let Some(extra_middleware) = &self.extra_middleware {
|
|
||||||
for middleware in &extra_middleware.0 {
|
|
||||||
client = client.with_arc(middleware.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
client.build()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut client = reqwest_middleware::ClientBuilder::new(client);
|
let mut client = reqwest_middleware::ClientBuilder::new(client);
|
||||||
|
|
||||||
// Avoid uncloneable errors with a streaming body during publish.
|
// Avoid uncloneable errors with a streaming body during publish.
|
||||||
|
|
@ -572,38 +444,20 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
client = client.with(retry_strategy);
|
client = client.with(retry_strategy);
|
||||||
}
|
}
|
||||||
|
|
||||||
// When supplied, add the extra middleware.
|
|
||||||
if let Some(extra_middleware) = &self.extra_middleware {
|
|
||||||
for middleware in &extra_middleware.0 {
|
|
||||||
client = client.with_arc(middleware.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize the authentication middleware to set headers.
|
// Initialize the authentication middleware to set headers.
|
||||||
match self.auth_integration {
|
match self.auth_integration {
|
||||||
AuthIntegration::Default => {
|
AuthIntegration::Default => {
|
||||||
let mut auth_middleware = AuthMiddleware::new()
|
let auth_middleware = AuthMiddleware::new()
|
||||||
.with_cache_arc(self.credentials_cache.clone())
|
|
||||||
.with_base_client(base_client)
|
|
||||||
.with_indexes(self.indexes.clone())
|
.with_indexes(self.indexes.clone())
|
||||||
.with_keyring(self.keyring.to_provider())
|
.with_keyring(self.keyring.to_provider());
|
||||||
.with_preview(self.preview);
|
|
||||||
if let Ok(token_store) = PyxTokenStore::from_settings() {
|
|
||||||
auth_middleware = auth_middleware.with_pyx_token_store(token_store);
|
|
||||||
}
|
|
||||||
client = client.with(auth_middleware);
|
client = client.with(auth_middleware);
|
||||||
}
|
}
|
||||||
AuthIntegration::OnlyAuthenticated => {
|
AuthIntegration::OnlyAuthenticated => {
|
||||||
let mut auth_middleware = AuthMiddleware::new()
|
let auth_middleware = AuthMiddleware::new()
|
||||||
.with_cache_arc(self.credentials_cache.clone())
|
|
||||||
.with_base_client(base_client)
|
|
||||||
.with_indexes(self.indexes.clone())
|
.with_indexes(self.indexes.clone())
|
||||||
.with_keyring(self.keyring.to_provider())
|
.with_keyring(self.keyring.to_provider())
|
||||||
.with_preview(self.preview)
|
|
||||||
.with_only_authenticated(true);
|
.with_only_authenticated(true);
|
||||||
if let Ok(token_store) = PyxTokenStore::from_settings() {
|
|
||||||
auth_middleware = auth_middleware.with_pyx_token_store(token_store);
|
|
||||||
}
|
|
||||||
client = client.with(auth_middleware);
|
client = client.with(auth_middleware);
|
||||||
}
|
}
|
||||||
AuthIntegration::NoAuthMiddleware => {
|
AuthIntegration::NoAuthMiddleware => {
|
||||||
|
|
@ -611,6 +465,13 @@ impl<'a> BaseClientBuilder<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// When supplied add the extra middleware
|
||||||
|
if let Some(extra_middleware) = &self.extra_middleware {
|
||||||
|
for middleware in &extra_middleware.0 {
|
||||||
|
client = client.with_arc(middleware.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
client.build()
|
client.build()
|
||||||
}
|
}
|
||||||
Connectivity::Offline => reqwest_middleware::ClientBuilder::new(client)
|
Connectivity::Offline => reqwest_middleware::ClientBuilder::new(client)
|
||||||
|
|
@ -639,8 +500,6 @@ pub struct BaseClient {
|
||||||
allow_insecure_host: Vec<TrustedHost>,
|
allow_insecure_host: Vec<TrustedHost>,
|
||||||
/// The number of retries to attempt on transient errors.
|
/// The number of retries to attempt on transient errors.
|
||||||
retries: u32,
|
retries: u32,
|
||||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
|
||||||
credentials_cache: Arc<CredentialsCache>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
|
@ -663,7 +522,7 @@ impl BaseClient {
|
||||||
|
|
||||||
/// Executes a request, applying redirect policy.
|
/// Executes a request, applying redirect policy.
|
||||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||||
let client = self.for_host(&DisplaySafeUrl::from_url(req.url().clone()));
|
let client = self.for_host(&DisplaySafeUrl::from(req.url().clone()));
|
||||||
client.execute(req).await
|
client.execute(req).await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -686,15 +545,7 @@ impl BaseClient {
|
||||||
|
|
||||||
/// The [`RetryPolicy`] for the client.
|
/// The [`RetryPolicy`] for the client.
|
||||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
pub fn retry_policy(&self) -> ExponentialBackoff {
|
||||||
let mut builder = ExponentialBackoff::builder();
|
ExponentialBackoff::builder().build_with_max_retries(self.retries)
|
||||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
|
||||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
|
||||||
}
|
|
||||||
builder.build_with_max_retries(self.retries)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
|
||||||
&self.credentials_cache
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -713,17 +564,17 @@ pub struct RedirectClientWithMiddleware {
|
||||||
|
|
||||||
impl RedirectClientWithMiddleware {
|
impl RedirectClientWithMiddleware {
|
||||||
/// Convenience method to make a `GET` request to a URL.
|
/// Convenience method to make a `GET` request to a URL.
|
||||||
pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder<'_> {
|
pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||||
RequestBuilder::new(self.client.get(url), self)
|
RequestBuilder::new(self.client.get(url), self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method to make a `POST` request to a URL.
|
/// Convenience method to make a `POST` request to a URL.
|
||||||
pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder<'_> {
|
pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||||
RequestBuilder::new(self.client.post(url), self)
|
RequestBuilder::new(self.client.post(url), self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method to make a `HEAD` request to a URL.
|
/// Convenience method to make a `HEAD` request to a URL.
|
||||||
pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder<'_> {
|
pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||||
RequestBuilder::new(self.client.head(url), self)
|
RequestBuilder::new(self.client.head(url), self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -732,7 +583,6 @@ impl RedirectClientWithMiddleware {
|
||||||
match self.redirect_policy {
|
match self.redirect_policy {
|
||||||
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
||||||
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
||||||
RedirectPolicy::NoRedirect => self.client.execute(req).await,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -798,7 +648,7 @@ fn request_into_redirect(
|
||||||
res: &Response,
|
res: &Response,
|
||||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||||
) -> reqwest_middleware::Result<Option<Request>> {
|
) -> reqwest_middleware::Result<Option<Request>> {
|
||||||
let original_req_url = DisplaySafeUrl::from_url(req.url().clone());
|
let original_req_url = DisplaySafeUrl::from(req.url().clone());
|
||||||
let status = res.status();
|
let status = res.status();
|
||||||
let should_redirect = match status {
|
let should_redirect = match status {
|
||||||
StatusCode::MOVED_PERMANENTLY
|
StatusCode::MOVED_PERMANENTLY
|
||||||
|
|
@ -851,7 +701,7 @@ fn request_into_redirect(
|
||||||
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
||||||
Ok(url) => url,
|
Ok(url) => url,
|
||||||
// Per RFC 7231, URLs should be resolved against the request URL.
|
// Per RFC 7231, URLs should be resolved against the request URL.
|
||||||
Err(DisplaySafeUrlError::Url(ParseError::RelativeUrlWithoutBase)) => original_req_url.join(location).map_err(|err| {
|
Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| {
|
||||||
reqwest_middleware::Error::Middleware(anyhow!(
|
reqwest_middleware::Error::Middleware(anyhow!(
|
||||||
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
||||||
))
|
))
|
||||||
|
|
@ -1029,7 +879,7 @@ impl RetryableStrategy for UvRetryableStrategy {
|
||||||
None | Some(Retryable::Fatal)
|
None | Some(Retryable::Fatal)
|
||||||
if res
|
if res
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.is_err_and(|err| is_transient_network_error(err)) =>
|
.is_err_and(|err| is_extended_transient_error(err)) =>
|
||||||
{
|
{
|
||||||
Some(Retryable::Transient)
|
Some(Retryable::Transient)
|
||||||
}
|
}
|
||||||
|
|
@ -1057,15 +907,12 @@ impl RetryableStrategy for UvRetryableStrategy {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the error looks like a network error that should be retried.
|
/// Check for additional transient error kinds not supported by the default retry strategy in `reqwest_retry`.
|
||||||
///
|
///
|
||||||
/// There are two cases that the default retry strategy is missing:
|
/// These cases should be safe to retry with [`Retryable::Transient`].
|
||||||
/// * Inside the reqwest or reqwest-middleware error is an `io::Error` such as a broken pipe
|
pub fn is_extended_transient_error(err: &dyn Error) -> bool {
|
||||||
/// * When streaming a response, a reqwest error may be hidden several layers behind errors
|
|
||||||
/// of different crates processing the stream, including `io::Error` layers.
|
|
||||||
pub fn is_transient_network_error(err: &(dyn Error + 'static)) -> bool {
|
|
||||||
// First, try to show a nice trace log
|
// First, try to show a nice trace log
|
||||||
if let Some((Some(status), Some(url))) = find_source::<WrappedReqwestError>(&err)
|
if let Some((Some(status), Some(url))) = find_source::<crate::WrappedReqwestError>(&err)
|
||||||
.map(|request_err| (request_err.status(), request_err.url()))
|
.map(|request_err| (request_err.status(), request_err.url()))
|
||||||
{
|
{
|
||||||
trace!("Considering retry of response HTTP {status} for {url}");
|
trace!("Considering retry of response HTTP {status} for {url}");
|
||||||
|
|
@ -1073,88 +920,38 @@ pub fn is_transient_network_error(err: &(dyn Error + 'static)) -> bool {
|
||||||
trace!("Considering retry of error: {err:?}");
|
trace!("Considering retry of error: {err:?}");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut has_known_error = false;
|
// IO Errors may be nested through custom IO errors.
|
||||||
// IO Errors or reqwest errors may be nested through custom IO errors or stream processing
|
let mut has_io_error = false;
|
||||||
// crates
|
for io_err in find_sources::<io::Error>(&err) {
|
||||||
let mut current_source = Some(err);
|
has_io_error = true;
|
||||||
while let Some(source) = current_source {
|
let retryable_io_err_kinds = [
|
||||||
if let Some(reqwest_err) = source.downcast_ref::<WrappedReqwestError>() {
|
// https://github.com/astral-sh/uv/issues/12054
|
||||||
has_known_error = true;
|
io::ErrorKind::BrokenPipe,
|
||||||
if let reqwest_middleware::Error::Reqwest(reqwest_err) = &**reqwest_err {
|
// From reqwest-middleware
|
||||||
if default_on_request_error(reqwest_err) == Some(Retryable::Transient) {
|
io::ErrorKind::ConnectionAborted,
|
||||||
trace!("Retrying nested reqwest middleware error");
|
// https://github.com/astral-sh/uv/issues/3514
|
||||||
return true;
|
io::ErrorKind::ConnectionReset,
|
||||||
}
|
// https://github.com/astral-sh/uv/issues/14699
|
||||||
if is_retryable_status_error(reqwest_err) {
|
io::ErrorKind::InvalidData,
|
||||||
trace!("Retrying nested reqwest middleware status code error");
|
// https://github.com/astral-sh/uv/issues/9246
|
||||||
return true;
|
io::ErrorKind::UnexpectedEof,
|
||||||
}
|
];
|
||||||
}
|
if retryable_io_err_kinds.contains(&io_err.kind()) {
|
||||||
|
trace!("Retrying error: `{}`", io_err.kind());
|
||||||
trace!("Cannot retry nested reqwest middleware error");
|
|
||||||
} else if let Some(reqwest_err) = source.downcast_ref::<reqwest::Error>() {
|
|
||||||
has_known_error = true;
|
|
||||||
if default_on_request_error(reqwest_err) == Some(Retryable::Transient) {
|
|
||||||
trace!("Retrying nested reqwest error");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if is_retryable_status_error(reqwest_err) {
|
|
||||||
trace!("Retrying nested reqwest status code error");
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
trace!("Cannot retry nested reqwest error");
|
|
||||||
} else if source.downcast_ref::<h2::Error>().is_some() {
|
|
||||||
// All h2 errors look like errors that should be retried
|
|
||||||
// https://github.com/astral-sh/uv/issues/15916
|
|
||||||
trace!("Retrying nested h2 error");
|
|
||||||
return true;
|
return true;
|
||||||
} else if let Some(io_err) = source.downcast_ref::<io::Error>() {
|
|
||||||
has_known_error = true;
|
|
||||||
let retryable_io_err_kinds = [
|
|
||||||
// https://github.com/astral-sh/uv/issues/12054
|
|
||||||
io::ErrorKind::BrokenPipe,
|
|
||||||
// From reqwest-middleware
|
|
||||||
io::ErrorKind::ConnectionAborted,
|
|
||||||
// https://github.com/astral-sh/uv/issues/3514
|
|
||||||
io::ErrorKind::ConnectionReset,
|
|
||||||
// https://github.com/astral-sh/uv/issues/14699
|
|
||||||
io::ErrorKind::InvalidData,
|
|
||||||
// https://github.com/astral-sh/uv/issues/9246
|
|
||||||
io::ErrorKind::UnexpectedEof,
|
|
||||||
];
|
|
||||||
if retryable_io_err_kinds.contains(&io_err.kind()) {
|
|
||||||
trace!("Retrying error: `{}`", io_err.kind());
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
trace!(
|
|
||||||
"Cannot retry IO error `{}`, not a retryable IO error kind",
|
|
||||||
io_err.kind()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
trace!(
|
||||||
current_source = source.source();
|
"Cannot retry IO error `{}`, not a retryable IO error kind",
|
||||||
|
io_err.kind()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if !has_known_error {
|
if !has_io_error {
|
||||||
trace!("Cannot retry error: Neither an IO error nor a reqwest error");
|
trace!("Cannot retry error: not an extended IO error");
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the error is a status code error that is retryable.
|
|
||||||
///
|
|
||||||
/// Port of `reqwest_retry::default_on_request_success`.
|
|
||||||
fn is_retryable_status_error(reqwest_err: &reqwest::Error) -> bool {
|
|
||||||
let Some(status) = reqwest_err.status() else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
status.is_server_error()
|
|
||||||
|| status == StatusCode::REQUEST_TIMEOUT
|
|
||||||
|| status == StatusCode::TOO_MANY_REQUESTS
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Find the first source error of a specific type.
|
/// Find the first source error of a specific type.
|
||||||
///
|
///
|
||||||
/// See <https://github.com/seanmonstar/reqwest/issues/1602#issuecomment-1220996681>
|
/// See <https://github.com/seanmonstar/reqwest/issues/1602#issuecomment-1220996681>
|
||||||
|
|
@ -1169,6 +966,15 @@ fn find_source<E: Error + 'static>(orig: &dyn Error) -> Option<&E> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return all errors in the chain of a specific type.
|
||||||
|
///
|
||||||
|
/// This handles cases such as nested `io::Error`s.
|
||||||
|
///
|
||||||
|
/// See <https://github.com/seanmonstar/reqwest/issues/1602#issuecomment-1220996681>
|
||||||
|
fn find_sources<E: Error + 'static>(orig: &dyn Error) -> impl Iterator<Item = &E> {
|
||||||
|
iter::successors(find_source::<E>(orig), |&err| find_source(err))
|
||||||
|
}
|
||||||
|
|
||||||
// TODO(konsti): Remove once we find a native home for `retries_from_env`
|
// TODO(konsti): Remove once we find a native home for `retries_from_env`
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
pub enum RetryParsingError {
|
pub enum RetryParsingError {
|
||||||
|
|
@ -1176,14 +982,26 @@ pub enum RetryParsingError {
|
||||||
ParseInt(#[from] ParseIntError),
|
ParseInt(#[from] ParseIntError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change.
|
||||||
|
///
|
||||||
|
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||||
|
pub fn retries_from_env() -> Result<u32, RetryParsingError> {
|
||||||
|
// TODO(zanieb): We should probably parse this in another layer, but there's not a natural
|
||||||
|
// fit for it right now
|
||||||
|
if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) {
|
||||||
|
Ok(value.to_string_lossy().as_ref().parse::<u32>()?)
|
||||||
|
} else {
|
||||||
|
Ok(DEFAULT_RETRIES)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use insta::assert_debug_snapshot;
|
|
||||||
use reqwest::{Client, Method};
|
use reqwest::{Client, Method};
|
||||||
use wiremock::matchers::{method, path};
|
use wiremock::matchers::method;
|
||||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||||
|
|
||||||
use crate::base_client::request_into_redirect;
|
use crate::base_client::request_into_redirect;
|
||||||
|
|
@ -1376,71 +1194,4 @@ mod tests {
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Enumerate which status codes we are retrying.
|
|
||||||
#[tokio::test]
|
|
||||||
async fn retried_status_codes() -> Result<()> {
|
|
||||||
let server = MockServer::start().await;
|
|
||||||
let client = Client::default();
|
|
||||||
let middleware_client = ClientWithMiddleware::default();
|
|
||||||
let mut retried = Vec::new();
|
|
||||||
for status in 100..599 {
|
|
||||||
// Test all standard status codes and and example for a non-RFC code used in the wild.
|
|
||||||
if StatusCode::from_u16(status)?.canonical_reason().is_none() && status != 420 {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Mock::given(path(format!("/{status}")))
|
|
||||||
.respond_with(ResponseTemplate::new(status))
|
|
||||||
.mount(&server)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let response = middleware_client
|
|
||||||
.get(format!("{}/{}", server.uri(), status))
|
|
||||||
.send()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let middleware_retry =
|
|
||||||
DefaultRetryableStrategy.handle(&response) == Some(Retryable::Transient);
|
|
||||||
|
|
||||||
let response = client
|
|
||||||
.get(format!("{}/{}", server.uri(), status))
|
|
||||||
.send()
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let uv_retry = match response.error_for_status() {
|
|
||||||
Ok(_) => false,
|
|
||||||
Err(err) => is_transient_network_error(&err),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Ensure we're retrying the same status code as the reqwest_retry crate. We may choose
|
|
||||||
// to deviate from this later.
|
|
||||||
assert_eq!(middleware_retry, uv_retry);
|
|
||||||
if uv_retry {
|
|
||||||
retried.push(status);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_debug_snapshot!(retried, @r"
|
|
||||||
[
|
|
||||||
100,
|
|
||||||
102,
|
|
||||||
408,
|
|
||||||
429,
|
|
||||||
500,
|
|
||||||
501,
|
|
||||||
502,
|
|
||||||
503,
|
|
||||||
504,
|
|
||||||
505,
|
|
||||||
506,
|
|
||||||
507,
|
|
||||||
508,
|
|
||||||
510,
|
|
||||||
511,
|
|
||||||
]
|
|
||||||
");
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,33 +14,13 @@ use uv_fs::write_atomic;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::BaseClient;
|
use crate::BaseClient;
|
||||||
use crate::base_client::is_transient_network_error;
|
use crate::base_client::is_extended_transient_error;
|
||||||
use crate::error::ProblemDetails;
|
|
||||||
use crate::{
|
use crate::{
|
||||||
Error, ErrorKind,
|
Error, ErrorKind,
|
||||||
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
|
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
|
||||||
rkyvutil::OwnedArchive,
|
rkyvutil::OwnedArchive,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Extract problem details from an HTTP response if it has the correct content type
|
|
||||||
///
|
|
||||||
/// Note: This consumes the response body, so it should only be called when there's an error status.
|
|
||||||
async fn extract_problem_details(response: Response) -> Option<ProblemDetails> {
|
|
||||||
match response.bytes().await {
|
|
||||||
Ok(bytes) => match serde_json::from_slice(&bytes) {
|
|
||||||
Ok(details) => Some(details),
|
|
||||||
Err(err) => {
|
|
||||||
warn!("Failed to parse problem details: {err}");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(err) => {
|
|
||||||
warn!("Failed to read response body for problem details: {err}");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A trait the generalizes (de)serialization at a high level.
|
/// A trait the generalizes (de)serialization at a high level.
|
||||||
///
|
///
|
||||||
/// The main purpose of this trait is to make the `CachedClient` work for
|
/// The main purpose of this trait is to make the `CachedClient` work for
|
||||||
|
|
@ -161,7 +141,7 @@ impl<CallbackError: std::error::Error + 'static> CachedClientError<CallbackError
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn error(&self) -> &(dyn std::error::Error + 'static) {
|
fn error(&self) -> &dyn std::error::Error {
|
||||||
match self {
|
match self {
|
||||||
Self::Client { err, .. } => err,
|
Self::Client { err, .. } => err,
|
||||||
Self::Callback { err, .. } => err,
|
Self::Callback { err, .. } => err,
|
||||||
|
|
@ -472,8 +452,7 @@ impl CachedClient {
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(name = "read_and_parse_cache", skip_all, fields(file = %cache_entry.path().display()
|
#[instrument(name="read_and_parse_cache", skip_all, fields(file = %cache_entry.path().display()))]
|
||||||
))]
|
|
||||||
async fn read_cache(cache_entry: &CacheEntry) -> Option<DataWithCachePolicy> {
|
async fn read_cache(cache_entry: &CacheEntry) -> Option<DataWithCachePolicy> {
|
||||||
match DataWithCachePolicy::from_path_async(cache_entry.path()).await {
|
match DataWithCachePolicy::from_path_async(cache_entry.path()).await {
|
||||||
Ok(data) => Some(data),
|
Ok(data) => Some(data),
|
||||||
|
|
@ -557,36 +536,16 @@ impl CachedClient {
|
||||||
cached: DataWithCachePolicy,
|
cached: DataWithCachePolicy,
|
||||||
new_cache_policy_builder: CachePolicyBuilder,
|
new_cache_policy_builder: CachePolicyBuilder,
|
||||||
) -> Result<CachedResponse, Error> {
|
) -> Result<CachedResponse, Error> {
|
||||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
let url = DisplaySafeUrl::from(req.url().clone());
|
||||||
debug!("Sending revalidation request for: {url}");
|
debug!("Sending revalidation request for: {url}");
|
||||||
let mut response = self
|
let mut response = self
|
||||||
.0
|
.0
|
||||||
.execute(req)
|
.execute(req)
|
||||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||||
.await
|
.await
|
||||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||||
|
.error_for_status()
|
||||||
// Check for HTTP error status and extract problem details if available
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
if let Err(status_error) = response.error_for_status_ref() {
|
|
||||||
// Clone the response to extract problem details before the error consumes it
|
|
||||||
let problem_details = if response
|
|
||||||
.headers()
|
|
||||||
.get("content-type")
|
|
||||||
.and_then(|ct| ct.to_str().ok())
|
|
||||||
.map(|ct| ct == "application/problem+json")
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
extract_problem_details(response).await
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
return Err(ErrorKind::from_reqwest_with_problem_details(
|
|
||||||
url.clone(),
|
|
||||||
status_error,
|
|
||||||
problem_details,
|
|
||||||
)
|
|
||||||
.into());
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the user set a custom `Cache-Control` header, override it.
|
// If the user set a custom `Cache-Control` header, override it.
|
||||||
if let CacheControl::Override(header) = cache_control {
|
if let CacheControl::Override(header) = cache_control {
|
||||||
|
|
@ -627,7 +586,7 @@ impl CachedClient {
|
||||||
req: Request,
|
req: Request,
|
||||||
cache_control: CacheControl<'_>,
|
cache_control: CacheControl<'_>,
|
||||||
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
||||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
let url = DisplaySafeUrl::from(req.url().clone());
|
||||||
trace!("Sending fresh {} request for {}", req.method(), url);
|
trace!("Sending fresh {} request for {}", req.method(), url);
|
||||||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||||
let mut response = self
|
let mut response = self
|
||||||
|
|
@ -651,25 +610,9 @@ impl CachedClient {
|
||||||
.map(|retries| retries.value());
|
.map(|retries| retries.value());
|
||||||
|
|
||||||
if let Err(status_error) = response.error_for_status_ref() {
|
if let Err(status_error) = response.error_for_status_ref() {
|
||||||
let problem_details = if response
|
|
||||||
.headers()
|
|
||||||
.get("content-type")
|
|
||||||
.and_then(|ct| ct.to_str().ok())
|
|
||||||
.map(|ct| ct.starts_with("application/problem+json"))
|
|
||||||
.unwrap_or(false)
|
|
||||||
{
|
|
||||||
extract_problem_details(response).await
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
return Err(CachedClientError::<Error>::Client {
|
return Err(CachedClientError::<Error>::Client {
|
||||||
retries: retry_count,
|
retries: retry_count,
|
||||||
err: ErrorKind::from_reqwest_with_problem_details(
|
err: ErrorKind::from_reqwest(url, status_error).into(),
|
||||||
url,
|
|
||||||
status_error,
|
|
||||||
problem_details,
|
|
||||||
)
|
|
||||||
.into(),
|
|
||||||
}
|
}
|
||||||
.into());
|
.into());
|
||||||
}
|
}
|
||||||
|
|
@ -737,21 +680,19 @@ impl CachedClient {
|
||||||
|
|
||||||
if result
|
if result
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.is_err_and(|err| is_transient_network_error(err.error()))
|
.is_err_and(|err| is_extended_transient_error(err.error()))
|
||||||
{
|
{
|
||||||
// If middleware already retried, consider that in our retry budget
|
// If middleware already retried, consider that in our retry budget
|
||||||
let total_retries = past_retries + middleware_retries;
|
let total_retries = past_retries + middleware_retries;
|
||||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||||
|
debug!(
|
||||||
|
"Transient failure while handling response from {}; retrying...",
|
||||||
|
req.url(),
|
||||||
|
);
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
|
|
||||||
debug!(
|
|
||||||
"Transient failure while handling response from {}; retrying after {:.1}s...",
|
|
||||||
req.url(),
|
|
||||||
duration.as_secs_f32(),
|
|
||||||
);
|
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
past_retries += 1;
|
past_retries += 1;
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -798,19 +739,18 @@ impl CachedClient {
|
||||||
if result
|
if result
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.err()
|
.err()
|
||||||
.is_some_and(|err| is_transient_network_error(err.error()))
|
.is_some_and(|err| is_extended_transient_error(err.error()))
|
||||||
{
|
{
|
||||||
let total_retries = past_retries + middleware_retries;
|
let total_retries = past_retries + middleware_retries;
|
||||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||||
|
debug!(
|
||||||
|
"Transient failure while handling response from {}; retrying...",
|
||||||
|
req.url(),
|
||||||
|
);
|
||||||
let duration = execute_after
|
let duration = execute_after
|
||||||
.duration_since(SystemTime::now())
|
.duration_since(SystemTime::now())
|
||||||
.unwrap_or_else(|_| Duration::default());
|
.unwrap_or_else(|_| Duration::default());
|
||||||
debug!(
|
|
||||||
"Transient failure while handling response from {}; retrying after {}s...",
|
|
||||||
req.url(),
|
|
||||||
duration.as_secs(),
|
|
||||||
);
|
|
||||||
tokio::time::sleep(duration).await;
|
tokio::time::sleep(duration).await;
|
||||||
past_retries += 1;
|
past_retries += 1;
|
||||||
continue;
|
continue;
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,9 @@
|
||||||
use async_http_range_reader::AsyncHttpRangeReaderError;
|
|
||||||
use async_zip::error::ZipError;
|
|
||||||
use serde::Deserialize;
|
|
||||||
use std::fmt::{Display, Formatter};
|
use std::fmt::{Display, Formatter};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use uv_cache::Error as CacheError;
|
use async_http_range_reader::AsyncHttpRangeReaderError;
|
||||||
|
use async_zip::error::ZipError;
|
||||||
|
|
||||||
use uv_distribution_filename::{WheelFilename, WheelFilenameError};
|
use uv_distribution_filename::{WheelFilename, WheelFilenameError};
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
@ -13,61 +11,6 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
use crate::middleware::OfflineError;
|
use crate::middleware::OfflineError;
|
||||||
use crate::{FlatIndexError, html};
|
use crate::{FlatIndexError, html};
|
||||||
|
|
||||||
/// RFC 9457 Problem Details for HTTP APIs
|
|
||||||
///
|
|
||||||
/// This structure represents the standard format for machine-readable details
|
|
||||||
/// of errors in HTTP response bodies as defined in RFC 9457.
|
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
|
||||||
pub struct ProblemDetails {
|
|
||||||
/// A URI reference that identifies the problem type.
|
|
||||||
/// When dereferenced, it SHOULD provide human-readable documentation for the problem type.
|
|
||||||
#[serde(rename = "type", default = "default_problem_type")]
|
|
||||||
pub problem_type: String,
|
|
||||||
|
|
||||||
/// A short, human-readable summary of the problem type.
|
|
||||||
pub title: Option<String>,
|
|
||||||
|
|
||||||
/// The HTTP status code generated by the origin server for this occurrence of the problem.
|
|
||||||
pub status: Option<u16>,
|
|
||||||
|
|
||||||
/// A human-readable explanation specific to this occurrence of the problem.
|
|
||||||
pub detail: Option<String>,
|
|
||||||
|
|
||||||
/// A URI reference that identifies the specific occurrence of the problem.
|
|
||||||
pub instance: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Default problem type URI as per RFC 9457
|
|
||||||
#[inline]
|
|
||||||
fn default_problem_type() -> String {
|
|
||||||
"about:blank".to_string()
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ProblemDetails {
|
|
||||||
/// Get a human-readable description of the problem
|
|
||||||
pub fn description(&self) -> Option<String> {
|
|
||||||
match self {
|
|
||||||
Self {
|
|
||||||
title: Some(title),
|
|
||||||
detail: Some(detail),
|
|
||||||
..
|
|
||||||
} => Some(format!("Server message: {title}, {detail}")),
|
|
||||||
Self {
|
|
||||||
title: Some(title), ..
|
|
||||||
} => Some(format!("Server message: {title}")),
|
|
||||||
Self {
|
|
||||||
detail: Some(detail),
|
|
||||||
..
|
|
||||||
} => Some(format!("Server message: {detail}")),
|
|
||||||
Self {
|
|
||||||
status: Some(status),
|
|
||||||
..
|
|
||||||
} => Some(format!("HTTP error {status}")),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Error {
|
pub struct Error {
|
||||||
kind: Box<ErrorKind>,
|
kind: Box<ErrorKind>,
|
||||||
|
|
@ -79,9 +22,8 @@ impl Display for Error {
|
||||||
if self.retries > 0 {
|
if self.retries > 0 {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"Request failed after {retries} {subject}",
|
"Request failed after {retries} retries",
|
||||||
retries = self.retries,
|
retries = self.retries
|
||||||
subject = if self.retries > 1 { "retries" } else { "retry" }
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
Display::fmt(&self.kind, f)
|
Display::fmt(&self.kind, f)
|
||||||
|
|
@ -133,11 +75,6 @@ impl Error {
|
||||||
ErrorKind::BadHtml { source: err, url }.into()
|
ErrorKind::BadHtml { source: err, url }.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new error from a `MessagePack` parsing error.
|
|
||||||
pub(crate) fn from_msgpack_err(err: rmp_serde::decode::Error, url: DisplaySafeUrl) -> Self {
|
|
||||||
ErrorKind::BadMessagePack { source: err, url }.into()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if this error corresponds to an offline error.
|
/// Returns `true` if this error corresponds to an offline error.
|
||||||
pub(crate) fn is_offline(&self) -> bool {
|
pub(crate) fn is_offline(&self) -> bool {
|
||||||
matches!(&*self.kind, ErrorKind::Offline(_))
|
matches!(&*self.kind, ErrorKind::Offline(_))
|
||||||
|
|
@ -273,15 +210,11 @@ pub enum ErrorKind {
|
||||||
/// Make sure the package name is spelled correctly and that you've
|
/// Make sure the package name is spelled correctly and that you've
|
||||||
/// configured the right registry to fetch it from.
|
/// configured the right registry to fetch it from.
|
||||||
#[error("Package `{0}` was not found in the registry")]
|
#[error("Package `{0}` was not found in the registry")]
|
||||||
RemotePackageNotFound(PackageName),
|
PackageNotFound(String),
|
||||||
|
|
||||||
/// The package was not found in the local (file-based) index.
|
/// The package was not found in the local (file-based) index.
|
||||||
#[error("Package `{0}` was not found in the local index")]
|
#[error("Package `{0}` was not found in the local index")]
|
||||||
LocalPackageNotFound(PackageName),
|
FileNotFound(String),
|
||||||
|
|
||||||
/// The root was not found in the local (file-based) index.
|
|
||||||
#[error("Local index not found at: `{}`", _0.display())]
|
|
||||||
LocalIndexNotFound(PathBuf),
|
|
||||||
|
|
||||||
/// The metadata file could not be parsed.
|
/// The metadata file could not be parsed.
|
||||||
#[error("Couldn't parse metadata of {0} from {1}")]
|
#[error("Couldn't parse metadata of {0} from {1}")]
|
||||||
|
|
@ -291,12 +224,16 @@ pub enum ErrorKind {
|
||||||
#[source] Box<uv_pypi_types::MetadataError>,
|
#[source] Box<uv_pypi_types::MetadataError>,
|
||||||
),
|
),
|
||||||
|
|
||||||
|
/// The metadata file was not found in the wheel.
|
||||||
|
#[error("Metadata file `{0}` was not found in {1}")]
|
||||||
|
MetadataNotFound(WheelFilename, String),
|
||||||
|
|
||||||
/// An error that happened while making a request or in a reqwest middleware.
|
/// An error that happened while making a request or in a reqwest middleware.
|
||||||
#[error("Failed to fetch: `{0}`")]
|
#[error("Failed to fetch: `{0}`")]
|
||||||
WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError),
|
WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError),
|
||||||
|
|
||||||
/// Add the number of failed retries to the error.
|
/// Add the number of failed retries to the error.
|
||||||
#[error("Request failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
#[error("Request failed after {retries} retries")]
|
||||||
RequestWithRetries {
|
RequestWithRetries {
|
||||||
source: Box<ErrorKind>,
|
source: Box<ErrorKind>,
|
||||||
retries: u32,
|
retries: u32,
|
||||||
|
|
@ -314,12 +251,6 @@ pub enum ErrorKind {
|
||||||
url: DisplaySafeUrl,
|
url: DisplaySafeUrl,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[error("Received some unexpected MessagePack from {}", url)]
|
|
||||||
BadMessagePack {
|
|
||||||
source: rmp_serde::decode::Error,
|
|
||||||
url: DisplaySafeUrl,
|
|
||||||
},
|
|
||||||
|
|
||||||
#[error("Failed to read zip with range requests: `{0}`")]
|
#[error("Failed to read zip with range requests: `{0}`")]
|
||||||
AsyncHttpRangeReader(DisplaySafeUrl, #[source] AsyncHttpRangeReaderError),
|
AsyncHttpRangeReader(DisplaySafeUrl, #[source] AsyncHttpRangeReaderError),
|
||||||
|
|
||||||
|
|
@ -338,9 +269,6 @@ pub enum ErrorKind {
|
||||||
#[error("Failed to write to the client cache")]
|
#[error("Failed to write to the client cache")]
|
||||||
CacheWrite(#[source] std::io::Error),
|
CacheWrite(#[source] std::io::Error),
|
||||||
|
|
||||||
#[error("Failed to acquire lock on the client cache")]
|
|
||||||
CacheLock(#[source] CacheError),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Io(std::io::Error),
|
Io(std::io::Error),
|
||||||
|
|
||||||
|
|
@ -391,19 +319,7 @@ impl ErrorKind {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Self::WrappedReqwestError(url, WrappedReqwestError::from(err))
|
Self::WrappedReqwestError(url, WrappedReqwestError(err))
|
||||||
}
|
|
||||||
|
|
||||||
/// Create an [`ErrorKind`] from a [`reqwest::Error`] with problem details.
|
|
||||||
pub(crate) fn from_reqwest_with_problem_details(
|
|
||||||
url: DisplaySafeUrl,
|
|
||||||
error: reqwest::Error,
|
|
||||||
problem_details: Option<ProblemDetails>,
|
|
||||||
) -> Self {
|
|
||||||
Self::WrappedReqwestError(
|
|
||||||
url,
|
|
||||||
WrappedReqwestError::with_problem_details(error.into(), problem_details),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -413,26 +329,12 @@ impl ErrorKind {
|
||||||
/// Wraps a [`reqwest_middleware::Error`] instead of an [`reqwest::Error`] since the actual reqwest
|
/// Wraps a [`reqwest_middleware::Error`] instead of an [`reqwest::Error`] since the actual reqwest
|
||||||
/// error may be below some context in the [`anyhow::Error`].
|
/// error may be below some context in the [`anyhow::Error`].
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct WrappedReqwestError {
|
pub struct WrappedReqwestError(reqwest_middleware::Error);
|
||||||
error: reqwest_middleware::Error,
|
|
||||||
problem_details: Option<Box<ProblemDetails>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl WrappedReqwestError {
|
impl WrappedReqwestError {
|
||||||
/// Create a new `WrappedReqwestError` with optional problem details
|
|
||||||
pub fn with_problem_details(
|
|
||||||
error: reqwest_middleware::Error,
|
|
||||||
problem_details: Option<ProblemDetails>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
error,
|
|
||||||
problem_details: problem_details.map(Box::new),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the inner [`reqwest::Error`] from the error chain, if it exists.
|
/// Return the inner [`reqwest::Error`] from the error chain, if it exists.
|
||||||
fn inner(&self) -> Option<&reqwest::Error> {
|
fn inner(&self) -> Option<&reqwest::Error> {
|
||||||
match &self.error {
|
match &self.0 {
|
||||||
reqwest_middleware::Error::Reqwest(err) => Some(err),
|
reqwest_middleware::Error::Reqwest(err) => Some(err),
|
||||||
reqwest_middleware::Error::Middleware(err) => err.chain().find_map(|err| {
|
reqwest_middleware::Error::Middleware(err) => err.chain().find_map(|err| {
|
||||||
if let Some(err) = err.downcast_ref::<reqwest::Error>() {
|
if let Some(err) = err.downcast_ref::<reqwest::Error>() {
|
||||||
|
|
@ -494,19 +396,13 @@ impl WrappedReqwestError {
|
||||||
|
|
||||||
impl From<reqwest::Error> for WrappedReqwestError {
|
impl From<reqwest::Error> for WrappedReqwestError {
|
||||||
fn from(error: reqwest::Error) -> Self {
|
fn from(error: reqwest::Error) -> Self {
|
||||||
Self {
|
Self(error.into())
|
||||||
error: error.into(),
|
|
||||||
problem_details: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<reqwest_middleware::Error> for WrappedReqwestError {
|
impl From<reqwest_middleware::Error> for WrappedReqwestError {
|
||||||
fn from(error: reqwest_middleware::Error) -> Self {
|
fn from(error: reqwest_middleware::Error) -> Self {
|
||||||
Self {
|
Self(error)
|
||||||
error,
|
|
||||||
problem_details: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -514,7 +410,7 @@ impl Deref for WrappedReqwestError {
|
||||||
type Target = reqwest_middleware::Error;
|
type Target = reqwest_middleware::Error;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.error
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -523,15 +419,9 @@ impl Display for WrappedReqwestError {
|
||||||
if self.is_likely_offline() {
|
if self.is_likely_offline() {
|
||||||
// Insert an extra hint, we'll show the wrapped error through `source`
|
// Insert an extra hint, we'll show the wrapped error through `source`
|
||||||
f.write_str("Could not connect, are you offline?")
|
f.write_str("Could not connect, are you offline?")
|
||||||
} else if let Some(problem_details) = &self.problem_details {
|
|
||||||
// Show problem details if available
|
|
||||||
match problem_details.description() {
|
|
||||||
None => Display::fmt(&self.error, f),
|
|
||||||
Some(message) => f.write_str(&message),
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// Show the wrapped error
|
// Show the wrapped error
|
||||||
Display::fmt(&self.error, f)
|
Display::fmt(&self.0, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -540,117 +430,10 @@ impl std::error::Error for WrappedReqwestError {
|
||||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||||
if self.is_likely_offline() {
|
if self.is_likely_offline() {
|
||||||
// `Display` is inserting an extra message, so we need to show the wrapped error
|
// `Display` is inserting an extra message, so we need to show the wrapped error
|
||||||
Some(&self.error)
|
Some(&self.0)
|
||||||
} else if self.problem_details.is_some() {
|
|
||||||
// `Display` is showing problem details, so show the wrapped error as source
|
|
||||||
Some(&self.error)
|
|
||||||
} else {
|
} else {
|
||||||
// `Display` is showing the wrapped error, continue with its source
|
// `Display` is showing the wrapped error, continue with its source
|
||||||
self.error.source()
|
self.0.source()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_parsing() {
|
|
||||||
let json = r#"{
|
|
||||||
"type": "https://example.com/probs/out-of-credit",
|
|
||||||
"title": "You do not have enough credit.",
|
|
||||||
"detail": "Your current balance is 30, but that costs 50.",
|
|
||||||
"status": 403,
|
|
||||||
"instance": "/account/12345/msgs/abc"
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.problem_type,
|
|
||||||
"https://example.com/probs/out-of-credit"
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.title,
|
|
||||||
Some("You do not have enough credit.".to_string())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.detail,
|
|
||||||
Some("Your current balance is 30, but that costs 50.".to_string())
|
|
||||||
);
|
|
||||||
assert_eq!(problem_details.status, Some(403));
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.instance,
|
|
||||||
Some("/account/12345/msgs/abc".to_string())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_default_type() {
|
|
||||||
let json = r#"{
|
|
||||||
"detail": "Something went wrong",
|
|
||||||
"status": 500
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(problem_details.problem_type, "about:blank");
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.detail,
|
|
||||||
Some("Something went wrong".to_string())
|
|
||||||
);
|
|
||||||
assert_eq!(problem_details.status, Some(500));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_description() {
|
|
||||||
let json = r#"{
|
|
||||||
"detail": "Detailed error message",
|
|
||||||
"title": "Error Title",
|
|
||||||
"status": 400
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.description().unwrap(),
|
|
||||||
"Server message: Error Title, Detailed error message"
|
|
||||||
);
|
|
||||||
|
|
||||||
let json_no_detail = r#"{
|
|
||||||
"title": "Error Title",
|
|
||||||
"status": 400
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails =
|
|
||||||
serde_json::from_slice(json_no_detail.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.description().unwrap(),
|
|
||||||
"Server message: Error Title"
|
|
||||||
);
|
|
||||||
|
|
||||||
let json_minimal = r#"{
|
|
||||||
"status": 400
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails =
|
|
||||||
serde_json::from_slice(json_minimal.as_bytes()).unwrap();
|
|
||||||
assert_eq!(problem_details.description().unwrap(), "HTTP error 400");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_problem_details_with_extensions() {
|
|
||||||
let json = r#"{
|
|
||||||
"type": "https://example.com/probs/out-of-credit",
|
|
||||||
"title": "You do not have enough credit.",
|
|
||||||
"detail": "Your current balance is 30, but that costs 50.",
|
|
||||||
"status": 403,
|
|
||||||
"balance": 30,
|
|
||||||
"accounts": ["/account/12345", "/account/67890"]
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
problem_details.title,
|
|
||||||
Some("You do not have enough credit.".to_string())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_small_str::SmallString;
|
use uv_small_str::SmallString;
|
||||||
|
|
||||||
use crate::cached_client::{CacheControl, CachedClientError};
|
use crate::cached_client::{CacheControl, CachedClientError};
|
||||||
use crate::html::SimpleDetailHTML;
|
use crate::html::SimpleHtml;
|
||||||
use crate::{CachedClient, Connectivity, Error, ErrorKind, OwnedArchive};
|
use crate::{CachedClient, Connectivity, Error, ErrorKind, OwnedArchive};
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
|
@ -189,13 +189,13 @@ impl<'a> FlatIndexClient<'a> {
|
||||||
async {
|
async {
|
||||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||||
// This ensures that we handle redirects and other URL transformations correctly.
|
// This ensures that we handle redirects and other URL transformations correctly.
|
||||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
let url = DisplaySafeUrl::from(response.url().clone());
|
||||||
|
|
||||||
let text = response
|
let text = response
|
||||||
.text()
|
.text()
|
||||||
.await
|
.await
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(&text, &url)
|
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||||
|
|
||||||
// Convert to a reference-counted string.
|
// Convert to a reference-counted string.
|
||||||
|
|
@ -204,7 +204,7 @@ impl<'a> FlatIndexClient<'a> {
|
||||||
let unarchived: Vec<File> = files
|
let unarchived: Vec<File> = files
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|file| {
|
.filter_map(|file| {
|
||||||
match File::try_from_pypi(file, &base) {
|
match File::try_from(file, &base) {
|
||||||
Ok(file) => Some(file),
|
Ok(file) => Some(file),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
// Ignore files with unparsable version specifiers.
|
// Ignore files with unparsable version specifiers.
|
||||||
|
|
@ -305,7 +305,6 @@ impl<'a> FlatIndexClient<'a> {
|
||||||
upload_time_utc_ms: None,
|
upload_time_utc_ms: None,
|
||||||
url: FileLocation::AbsoluteUrl(UrlString::from(url)),
|
url: FileLocation::AbsoluteUrl(UrlString::from(url)),
|
||||||
yanked: None,
|
yanked: None,
|
||||||
zstd: None,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(filename) = DistFilename::try_from_normalized_filename(filename) else {
|
let Some(filename) = DistFilename::try_from_normalized_filename(filename) else {
|
||||||
|
|
@ -321,63 +320,6 @@ impl<'a> FlatIndexClient<'a> {
|
||||||
index: flat_index.clone(),
|
index: flat_index.clone(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
dists.sort_by(|a, b| {
|
|
||||||
a.filename
|
|
||||||
.cmp(&b.filename)
|
|
||||||
.then_with(|| a.index.cmp(&b.index))
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(FlatIndexEntries::from_entries(dists))
|
Ok(FlatIndexEntries::from_entries(dists))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use fs_err::File;
|
|
||||||
use std::io::Write;
|
|
||||||
use tempfile::tempdir;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_from_directory_sorts_distributions() {
|
|
||||||
let dir = tempdir().unwrap();
|
|
||||||
|
|
||||||
let filenames = [
|
|
||||||
"beta-2.0.0-py3-none-any.whl",
|
|
||||||
"alpha-1.0.0.tar.gz",
|
|
||||||
"alpha-1.0.0-py3-none-any.whl",
|
|
||||||
];
|
|
||||||
|
|
||||||
for name in &filenames {
|
|
||||||
let mut file = File::create(dir.path().join(name)).unwrap();
|
|
||||||
file.write_all(b"").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let entries = FlatIndexClient::read_from_directory(
|
|
||||||
dir.path(),
|
|
||||||
&IndexUrl::parse(&dir.path().to_string_lossy(), None).unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let actual = entries
|
|
||||||
.entries
|
|
||||||
.iter()
|
|
||||||
.map(|entry| entry.filename.to_string())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut expected = filenames
|
|
||||||
.iter()
|
|
||||||
.map(|name| DistFilename::try_from_normalized_filename(name).unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
expected.sort();
|
|
||||||
|
|
||||||
let expected = expected
|
|
||||||
.into_iter()
|
|
||||||
.map(|filename| filename.to_string())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
assert_eq!(actual, expected);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -3,32 +3,32 @@ use std::str::FromStr;
|
||||||
use jiff::Timestamp;
|
use jiff::Timestamp;
|
||||||
use tl::HTMLTag;
|
use tl::HTMLTag;
|
||||||
use tracing::{debug, instrument, warn};
|
use tracing::{debug, instrument, warn};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
use uv_normalize::PackageName;
|
|
||||||
use uv_pep440::VersionSpecifiers;
|
use uv_pep440::VersionSpecifiers;
|
||||||
use uv_pypi_types::{BaseUrl, CoreMetadata, Hashes, PypiFile, Yanked};
|
use uv_pypi_types::{BaseUrl, CoreMetadata, File, Hashes, Yanked};
|
||||||
use uv_pypi_types::{HashError, LenientVersionSpecifiers};
|
use uv_pypi_types::{HashError, LenientVersionSpecifiers};
|
||||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
/// A parsed structure from PyPI "HTML" index format for a single package.
|
/// A parsed structure from PyPI "HTML" index format for a single package.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct SimpleDetailHTML {
|
pub(crate) struct SimpleHtml {
|
||||||
/// The [`BaseUrl`] to which all relative URLs should be resolved.
|
/// The [`BaseUrl`] to which all relative URLs should be resolved.
|
||||||
pub(crate) base: BaseUrl,
|
pub(crate) base: BaseUrl,
|
||||||
/// The list of [`PypiFile`]s available for download sorted by filename.
|
/// The list of [`File`]s available for download sorted by filename.
|
||||||
pub(crate) files: Vec<PypiFile>,
|
pub(crate) files: Vec<File>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleDetailHTML {
|
impl SimpleHtml {
|
||||||
/// Parse the list of [`PypiFile`]s from the simple HTML page returned by the given URL.
|
/// Parse the list of [`File`]s from the simple HTML page returned by the given URL.
|
||||||
#[instrument(skip_all, fields(url = % url))]
|
#[instrument(skip_all, fields(url = % url))]
|
||||||
pub(crate) fn parse(text: &str, url: &DisplaySafeUrl) -> Result<Self, Error> {
|
pub(crate) fn parse(text: &str, url: &Url) -> Result<Self, Error> {
|
||||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
||||||
|
|
||||||
// Parse the first `<base>` tag, if any, to determine the base URL to which all
|
// Parse the first `<base>` tag, if any, to determine the base URL to which all
|
||||||
// relative URLs should be resolved. The HTML spec requires that the `<base>` tag
|
// relative URLs should be resolved. The HTML spec requires that the `<base>` tag
|
||||||
// appear before other tags with attribute values of URLs.
|
// appear before other tags with attribute values of URLs.
|
||||||
let base = BaseUrl::from(
|
let base = BaseUrl::from(DisplaySafeUrl::from(
|
||||||
dom.nodes()
|
dom.nodes()
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|node| node.as_tag())
|
.filter_map(|node| node.as_tag())
|
||||||
|
|
@ -38,10 +38,10 @@ impl SimpleDetailHTML {
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.flatten()
|
.flatten()
|
||||||
.unwrap_or_else(|| url.clone()),
|
.unwrap_or_else(|| url.clone()),
|
||||||
);
|
));
|
||||||
|
|
||||||
// Parse each `<a>` tag, to extract the filename, hash, and URL.
|
// Parse each `<a>` tag, to extract the filename, hash, and URL.
|
||||||
let mut files: Vec<PypiFile> = dom
|
let mut files: Vec<File> = dom
|
||||||
.nodes()
|
.nodes()
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|node| node.as_tag())
|
.filter_map(|node| node.as_tag())
|
||||||
|
|
@ -67,20 +67,19 @@ impl SimpleDetailHTML {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse the `href` from a `<base>` tag.
|
/// Parse the `href` from a `<base>` tag.
|
||||||
fn parse_base(base: &HTMLTag) -> Result<Option<DisplaySafeUrl>, Error> {
|
fn parse_base(base: &HTMLTag) -> Result<Option<Url>, Error> {
|
||||||
let Some(Some(href)) = base.attributes().get("href") else {
|
let Some(Some(href)) = base.attributes().get("href") else {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
let href = std::str::from_utf8(href.as_bytes())?;
|
let href = std::str::from_utf8(href.as_bytes())?;
|
||||||
let url =
|
let url = Url::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
||||||
DisplaySafeUrl::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
|
||||||
Ok(Some(url))
|
Ok(Some(url))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a [`PypiFile`] from an `<a>` tag.
|
/// Parse a [`File`] from an `<a>` tag.
|
||||||
///
|
///
|
||||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute.
|
/// Returns `None` if the `<a>` don't doesn't have an `href` attribute.
|
||||||
fn parse_anchor(link: &HTMLTag) -> Result<Option<PypiFile>, Error> {
|
fn parse_anchor(link: &HTMLTag) -> Result<Option<File>, Error> {
|
||||||
// Extract the href.
|
// Extract the href.
|
||||||
let Some(href) = link
|
let Some(href) = link
|
||||||
.attributes()
|
.attributes()
|
||||||
|
|
@ -213,7 +212,7 @@ impl SimpleDetailHTML {
|
||||||
.map(|upload_time| html_escape::decode_html_entities(upload_time))
|
.map(|upload_time| html_escape::decode_html_entities(upload_time))
|
||||||
.and_then(|upload_time| Timestamp::from_str(&upload_time).ok());
|
.and_then(|upload_time| Timestamp::from_str(&upload_time).ok());
|
||||||
|
|
||||||
Ok(Some(PypiFile {
|
Ok(Some(File {
|
||||||
core_metadata,
|
core_metadata,
|
||||||
yanked,
|
yanked,
|
||||||
requires_python,
|
requires_python,
|
||||||
|
|
@ -226,56 +225,6 @@ impl SimpleDetailHTML {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A parsed structure from PyPI "HTML" index format listing all available packages.
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub(crate) struct SimpleIndexHtml {
|
|
||||||
/// The list of project names available in the index.
|
|
||||||
pub(crate) projects: Vec<PackageName>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SimpleIndexHtml {
|
|
||||||
/// Parse the list of project names from the Simple API index HTML page.
|
|
||||||
pub(crate) fn parse(text: &str) -> Result<Self, Error> {
|
|
||||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
|
||||||
|
|
||||||
// Parse each `<a>` tag to extract the project name.
|
|
||||||
let parser = dom.parser();
|
|
||||||
let mut projects = dom
|
|
||||||
.nodes()
|
|
||||||
.iter()
|
|
||||||
.filter_map(|node| node.as_tag())
|
|
||||||
.filter(|link| link.name().as_bytes() == b"a")
|
|
||||||
.filter_map(|link| Self::parse_anchor_project_name(link, parser))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
// Sort for deterministic ordering.
|
|
||||||
projects.sort_unstable();
|
|
||||||
|
|
||||||
Ok(Self { projects })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a project name from an `<a>` tag.
|
|
||||||
///
|
|
||||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute or text content.
|
|
||||||
fn parse_anchor_project_name(link: &HTMLTag, parser: &tl::Parser) -> Option<PackageName> {
|
|
||||||
// Extract the href.
|
|
||||||
link.attributes()
|
|
||||||
.get("href")
|
|
||||||
.flatten()
|
|
||||||
.filter(|bytes| !bytes.as_bytes().is_empty())?;
|
|
||||||
|
|
||||||
// Extract the text content, which should be the project name.
|
|
||||||
let inner_text = link.inner_text(parser);
|
|
||||||
let project_name = inner_text.trim();
|
|
||||||
|
|
||||||
if project_name.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
PackageName::from_str(project_name).ok()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
|
|
@ -285,7 +234,7 @@ pub enum Error {
|
||||||
FromUtf8(#[from] std::string::FromUtf8Error),
|
FromUtf8(#[from] std::string::FromUtf8Error),
|
||||||
|
|
||||||
#[error("Failed to parse URL: {0}")]
|
#[error("Failed to parse URL: {0}")]
|
||||||
UrlParse(String, #[source] DisplaySafeUrlError),
|
UrlParse(String, #[source] url::ParseError),
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
HtmlParse(#[from] tl::ParseError),
|
HtmlParse(#[from] tl::ParseError),
|
||||||
|
|
@ -325,10 +274,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -347,7 +296,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -382,10 +331,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -404,7 +353,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -442,10 +391,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -464,7 +413,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -499,10 +448,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -521,7 +470,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2+233fca715f49-py3-none-any.whl",
|
filename: "Jinja2-3.1.2+233fca715f49-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -556,10 +505,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -578,7 +527,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -613,10 +562,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -635,7 +584,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "torchtext-0.17.0+cpu-cp39-cp39-win_amd64.whl",
|
filename: "torchtext-0.17.0+cpu-cp39-cp39-win_amd64.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -668,10 +617,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -690,7 +639,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -723,10 +672,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
";
|
";
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -761,10 +710,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -799,10 +748,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -821,7 +770,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -854,10 +803,10 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -876,7 +825,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -909,11 +858,11 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base);
|
let result = SimpleHtml::parse(text, &base);
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
Ok(
|
Ok(
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -932,7 +881,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -966,11 +915,11 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base);
|
let result = SimpleHtml::parse(text, &base);
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
Ok(
|
Ok(
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -989,7 +938,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -1023,8 +972,8 @@ mod tests {
|
||||||
</html>
|
</html>
|
||||||
<!--TIMESTAMP 1703347410-->
|
<!--TIMESTAMP 1703347410-->
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap_err();
|
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||||
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, `sha512`, or `blake2b`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, `sha512`, or `blake2b`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1040,13 +989,11 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse(
|
let base = Url::parse("https://storage.googleapis.com/jax-releases/jax_cuda_releases.html")
|
||||||
"https://storage.googleapis.com/jax-releases/jax_cuda_releases.html",
|
.unwrap();
|
||||||
)
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
.unwrap();
|
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1065,7 +1012,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
filename: "jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -1081,7 +1028,7 @@ mod tests {
|
||||||
url: "https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
url: "https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||||
yanked: None,
|
yanked: None,
|
||||||
},
|
},
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl",
|
filename: "jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -1124,11 +1071,11 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1147,7 +1094,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Flask-0.1.tar.gz",
|
filename: "Flask-0.1.tar.gz",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -1165,7 +1112,7 @@ mod tests {
|
||||||
url: "0.1/Flask-0.1.tar.gz",
|
url: "0.1/Flask-0.1.tar.gz",
|
||||||
yanked: None,
|
yanked: None,
|
||||||
},
|
},
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Flask-0.10.1.tar.gz",
|
filename: "Flask-0.10.1.tar.gz",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -1183,7 +1130,7 @@ mod tests {
|
||||||
url: "0.10.1/Flask-0.10.1.tar.gz",
|
url: "0.10.1/Flask-0.10.1.tar.gz",
|
||||||
yanked: None,
|
yanked: None,
|
||||||
},
|
},
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "flask-3.0.1.tar.gz",
|
filename: "flask-3.0.1.tar.gz",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -1228,10 +1175,10 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1250,7 +1197,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: None,
|
core_metadata: None,
|
||||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||||
hashes: Hashes {
|
hashes: Hashes {
|
||||||
|
|
@ -1300,11 +1247,11 @@ mod tests {
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
"#;
|
"#;
|
||||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
insta::assert_debug_snapshot!(result, @r#"
|
||||||
SimpleDetailHTML {
|
SimpleHtml {
|
||||||
base: BaseUrl(
|
base: BaseUrl(
|
||||||
DisplaySafeUrl {
|
DisplaySafeUrl {
|
||||||
scheme: "https",
|
scheme: "https",
|
||||||
|
|
@ -1323,7 +1270,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
files: [
|
files: [
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: Some(
|
core_metadata: Some(
|
||||||
Bool(
|
Bool(
|
||||||
true,
|
true,
|
||||||
|
|
@ -1343,7 +1290,7 @@ mod tests {
|
||||||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl",
|
url: "/whl/Jinja2-3.1.2-py3-none-any.whl",
|
||||||
yanked: None,
|
yanked: None,
|
||||||
},
|
},
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: Some(
|
core_metadata: Some(
|
||||||
Bool(
|
Bool(
|
||||||
true,
|
true,
|
||||||
|
|
@ -1363,7 +1310,7 @@ mod tests {
|
||||||
url: "/whl/Jinja2-3.1.3-py3-none-any.whl",
|
url: "/whl/Jinja2-3.1.3-py3-none-any.whl",
|
||||||
yanked: None,
|
yanked: None,
|
||||||
},
|
},
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: Some(
|
core_metadata: Some(
|
||||||
Bool(
|
Bool(
|
||||||
false,
|
false,
|
||||||
|
|
@ -1383,7 +1330,7 @@ mod tests {
|
||||||
url: "/whl/Jinja2-3.1.4-py3-none-any.whl",
|
url: "/whl/Jinja2-3.1.4-py3-none-any.whl",
|
||||||
yanked: None,
|
yanked: None,
|
||||||
},
|
},
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: Some(
|
core_metadata: Some(
|
||||||
Bool(
|
Bool(
|
||||||
false,
|
false,
|
||||||
|
|
@ -1403,7 +1350,7 @@ mod tests {
|
||||||
url: "/whl/Jinja2-3.1.5-py3-none-any.whl",
|
url: "/whl/Jinja2-3.1.5-py3-none-any.whl",
|
||||||
yanked: None,
|
yanked: None,
|
||||||
},
|
},
|
||||||
PypiFile {
|
File {
|
||||||
core_metadata: Some(
|
core_metadata: Some(
|
||||||
Bool(
|
Bool(
|
||||||
true,
|
true,
|
||||||
|
|
@ -1427,180 +1374,4 @@ mod tests {
|
||||||
}
|
}
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Test parsing Simple API index (root) HTML.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>Simple Index</title>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<h1>Simple Index</h1>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
<a href="/simple/jinja2/">jinja2</a><br/>
|
|
||||||
<a href="/simple/requests/">requests</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"jinja2",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"requests",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that project names are sorted.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_sorted() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="/simple/zebra/">zebra</a><br/>
|
|
||||||
<a href="/simple/apple/">apple</a><br/>
|
|
||||||
<a href="/simple/monkey/">monkey</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"apple",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"monkey",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"zebra",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that links without `href` attributes are ignored.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_missing_href() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<h1>Simple Index</h1>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
<a>no-href-project</a><br/>
|
|
||||||
<a href="/simple/requests/">requests</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"requests",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that links with empty `href` attributes are ignored.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_empty_href() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="">empty-href</a><br/>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test that links with empty text content are ignored.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_empty_text() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="/simple/empty/"></a><br/>
|
|
||||||
<a href="/simple/flask/">flask</a><br/>
|
|
||||||
<a href="/simple/whitespace/"> </a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Test parsing with case variations and normalization.
|
|
||||||
#[test]
|
|
||||||
fn parse_simple_index_case_variations() {
|
|
||||||
let text = r#"
|
|
||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<body>
|
|
||||||
<a href="/simple/Flask/">Flask</a><br/>
|
|
||||||
<a href="/simple/django/">django</a><br/>
|
|
||||||
<a href="/simple/PyYAML/">PyYAML</a><br/>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"#;
|
|
||||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
|
||||||
// Note: We preserve the case as returned by the server
|
|
||||||
insta::assert_debug_snapshot!(result, @r#"
|
|
||||||
SimpleIndexHtml {
|
|
||||||
projects: [
|
|
||||||
PackageName(
|
|
||||||
"django",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"flask",
|
|
||||||
),
|
|
||||||
PackageName(
|
|
||||||
"pyyaml",
|
|
||||||
),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
"#);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,15 @@
|
||||||
pub use base_client::{
|
pub use base_client::{
|
||||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_MAX_REDIRECTS, DEFAULT_RETRIES,
|
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
|
||||||
ExtraMiddleware, RedirectClientWithMiddleware, RedirectPolicy, RequestBuilder,
|
RedirectClientWithMiddleware, RequestBuilder, RetryParsingError, UvRetryableStrategy,
|
||||||
RetryParsingError, UvRetryableStrategy, is_transient_network_error,
|
is_extended_transient_error, retries_from_env,
|
||||||
};
|
};
|
||||||
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
||||||
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
||||||
pub use flat_index::{FlatIndexClient, FlatIndexEntries, FlatIndexEntry, FlatIndexError};
|
pub use flat_index::{FlatIndexClient, FlatIndexEntries, FlatIndexEntry, FlatIndexError};
|
||||||
pub use linehaul::LineHaul;
|
pub use linehaul::LineHaul;
|
||||||
pub use registry_client::{
|
pub use registry_client::{
|
||||||
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleDetailMetadata,
|
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleMetadata,
|
||||||
SimpleDetailMetadatum, SimpleIndexMetadata, VersionFiles,
|
SimpleMetadatum, VersionFiles,
|
||||||
};
|
};
|
||||||
pub use rkyvutil::{Deserializer, OwnedArchive, Serializer, Validator};
|
pub use rkyvutil::{Deserializer, OwnedArchive, Serializer, Validator};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,14 +5,12 @@ use tracing::instrument;
|
||||||
|
|
||||||
use uv_pep508::MarkerEnvironment;
|
use uv_pep508::MarkerEnvironment;
|
||||||
use uv_platform_tags::{Os, Platform};
|
use uv_platform_tags::{Os, Platform};
|
||||||
use uv_static::EnvVars;
|
|
||||||
use uv_version::version;
|
use uv_version::version;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
||||||
pub struct Installer {
|
pub struct Installer {
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
pub version: Option<String>,
|
pub version: Option<String>,
|
||||||
pub subcommand: Option<Vec<String>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
||||||
|
|
@ -64,20 +62,11 @@ pub struct LineHaul {
|
||||||
impl LineHaul {
|
impl LineHaul {
|
||||||
/// Initializes Linehaul information based on PEP 508 markers.
|
/// Initializes Linehaul information based on PEP 508 markers.
|
||||||
#[instrument(name = "linehaul", skip_all)]
|
#[instrument(name = "linehaul", skip_all)]
|
||||||
pub fn new(
|
pub fn new(markers: &MarkerEnvironment, platform: Option<&Platform>) -> Self {
|
||||||
markers: Option<&MarkerEnvironment>,
|
|
||||||
platform: Option<&Platform>,
|
|
||||||
subcommand: Option<Vec<String>>,
|
|
||||||
) -> Self {
|
|
||||||
// https://github.com/pypa/pip/blob/24.0/src/pip/_internal/network/session.py#L87
|
// https://github.com/pypa/pip/blob/24.0/src/pip/_internal/network/session.py#L87
|
||||||
let looks_like_ci = [
|
let looks_like_ci = ["BUILD_BUILDID", "BUILD_ID", "CI", "PIP_IS_CI"]
|
||||||
EnvVars::BUILD_BUILDID,
|
.iter()
|
||||||
EnvVars::BUILD_ID,
|
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
||||||
EnvVars::CI,
|
|
||||||
EnvVars::PIP_IS_CI,
|
|
||||||
]
|
|
||||||
.iter()
|
|
||||||
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
|
||||||
|
|
||||||
let libc = match platform.map(Platform::os) {
|
let libc = match platform.map(Platform::os) {
|
||||||
Some(Os::Manylinux { major, minor }) => Some(Libc {
|
Some(Os::Manylinux { major, minor }) => Some(Libc {
|
||||||
|
|
@ -128,19 +117,18 @@ impl LineHaul {
|
||||||
installer: Option::from(Installer {
|
installer: Option::from(Installer {
|
||||||
name: Some("uv".to_string()),
|
name: Some("uv".to_string()),
|
||||||
version: Some(version().to_string()),
|
version: Some(version().to_string()),
|
||||||
subcommand,
|
|
||||||
}),
|
}),
|
||||||
python: markers.map(|markers| markers.python_full_version().version.to_string()),
|
python: Some(markers.python_full_version().version.to_string()),
|
||||||
implementation: Option::from(Implementation {
|
implementation: Option::from(Implementation {
|
||||||
name: markers.map(|markers| markers.platform_python_implementation().to_string()),
|
name: Some(markers.platform_python_implementation().to_string()),
|
||||||
version: markers.map(|markers| markers.python_full_version().version.to_string()),
|
version: Some(markers.python_full_version().version.to_string()),
|
||||||
}),
|
}),
|
||||||
distro,
|
distro,
|
||||||
system: Option::from(System {
|
system: Option::from(System {
|
||||||
name: markers.map(|markers| markers.platform_system().to_string()),
|
name: Some(markers.platform_system().to_string()),
|
||||||
release: markers.map(|markers| markers.platform_release().to_string()),
|
release: Some(markers.platform_release().to_string()),
|
||||||
}),
|
}),
|
||||||
cpu: markers.map(|markers| markers.platform_machine().to_string()),
|
cpu: Some(markers.platform_machine().to_string()),
|
||||||
// Should probably always be None in uv.
|
// Should probably always be None in uv.
|
||||||
openssl_version: None,
|
openssl_version: None,
|
||||||
// Should probably always be None in uv.
|
// Should probably always be None in uv.
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,7 @@ impl Middleware for OfflineMiddleware {
|
||||||
) -> reqwest_middleware::Result<Response> {
|
) -> reqwest_middleware::Result<Response> {
|
||||||
Err(reqwest_middleware::Error::Middleware(
|
Err(reqwest_middleware::Error::Middleware(
|
||||||
OfflineError {
|
OfflineError {
|
||||||
url: DisplaySafeUrl::from_url(req.url().clone()),
|
url: DisplaySafeUrl::from(req.url().clone()),
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
))
|
))
|
||||||
|
|
|
||||||
|
|
@ -15,10 +15,10 @@ use tokio::sync::{Mutex, Semaphore};
|
||||||
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
use tracing::{Instrument, debug, info_span, instrument, trace, warn};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
use uv_auth::{CredentialsCache, Indexes, PyxTokenStore};
|
use uv_auth::Indexes;
|
||||||
use uv_cache::{Cache, CacheBucket, CacheEntry, WheelCache};
|
use uv_cache::{Cache, CacheBucket, CacheEntry, WheelCache};
|
||||||
use uv_configuration::IndexStrategy;
|
|
||||||
use uv_configuration::KeyringProviderType;
|
use uv_configuration::KeyringProviderType;
|
||||||
|
use uv_configuration::{IndexStrategy, TrustedHost};
|
||||||
use uv_distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
use uv_distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||||
use uv_distribution_types::{
|
use uv_distribution_types::{
|
||||||
BuiltDist, File, IndexCapabilities, IndexFormat, IndexLocations, IndexMetadataRef,
|
BuiltDist, File, IndexCapabilities, IndexFormat, IndexLocations, IndexMetadataRef,
|
||||||
|
|
@ -29,9 +29,7 @@ use uv_normalize::PackageName;
|
||||||
use uv_pep440::Version;
|
use uv_pep440::Version;
|
||||||
use uv_pep508::MarkerEnvironment;
|
use uv_pep508::MarkerEnvironment;
|
||||||
use uv_platform_tags::Platform;
|
use uv_platform_tags::Platform;
|
||||||
use uv_pypi_types::{
|
use uv_pypi_types::{ResolutionMetadata, SimpleJson};
|
||||||
PypiSimpleDetail, PypiSimpleIndex, PyxSimpleDetail, PyxSimpleIndex, ResolutionMetadata,
|
|
||||||
};
|
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
use uv_small_str::SmallString;
|
use uv_small_str::SmallString;
|
||||||
use uv_torch::TorchStrategy;
|
use uv_torch::TorchStrategy;
|
||||||
|
|
@ -39,7 +37,7 @@ use uv_torch::TorchStrategy;
|
||||||
use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy};
|
use crate::base_client::{BaseClientBuilder, ExtraMiddleware, RedirectPolicy};
|
||||||
use crate::cached_client::CacheControl;
|
use crate::cached_client::CacheControl;
|
||||||
use crate::flat_index::FlatIndexEntry;
|
use crate::flat_index::FlatIndexEntry;
|
||||||
use crate::html::SimpleDetailHTML;
|
use crate::html::SimpleHtml;
|
||||||
use crate::remote_metadata::wheel_metadata_from_remote_zip;
|
use crate::remote_metadata::wheel_metadata_from_remote_zip;
|
||||||
use crate::rkyvutil::OwnedArchive;
|
use crate::rkyvutil::OwnedArchive;
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
@ -50,33 +48,32 @@ use crate::{
|
||||||
/// A builder for an [`RegistryClient`].
|
/// A builder for an [`RegistryClient`].
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct RegistryClientBuilder<'a> {
|
pub struct RegistryClientBuilder<'a> {
|
||||||
index_locations: IndexLocations,
|
index_urls: IndexUrls,
|
||||||
index_strategy: IndexStrategy,
|
index_strategy: IndexStrategy,
|
||||||
torch_backend: Option<TorchStrategy>,
|
torch_backend: Option<TorchStrategy>,
|
||||||
cache: Cache,
|
cache: Cache,
|
||||||
base_client_builder: BaseClientBuilder<'a>,
|
base_client_builder: BaseClientBuilder<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> RegistryClientBuilder<'a> {
|
impl RegistryClientBuilder<'_> {
|
||||||
pub fn new(base_client_builder: BaseClientBuilder<'a>, cache: Cache) -> Self {
|
pub fn new(cache: Cache) -> Self {
|
||||||
Self {
|
Self {
|
||||||
index_locations: IndexLocations::default(),
|
index_urls: IndexUrls::default(),
|
||||||
index_strategy: IndexStrategy::default(),
|
index_strategy: IndexStrategy::default(),
|
||||||
torch_backend: None,
|
torch_backend: None,
|
||||||
cache,
|
cache,
|
||||||
base_client_builder,
|
base_client_builder: BaseClientBuilder::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RegistryClientBuilder<'a> {
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn with_reqwest_client(mut self, client: reqwest::Client) -> Self {
|
pub fn index_locations(mut self, index_locations: &IndexLocations) -> Self {
|
||||||
self.base_client_builder = self.base_client_builder.custom_client(client);
|
self.index_urls = index_locations.index_urls();
|
||||||
self
|
self.base_client_builder = self
|
||||||
}
|
.base_client_builder
|
||||||
|
.indexes(Indexes::from(index_locations));
|
||||||
#[must_use]
|
|
||||||
pub fn index_locations(mut self, index_locations: IndexLocations) -> Self {
|
|
||||||
self.index_locations = index_locations;
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -98,6 +95,37 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn allow_insecure_host(mut self, allow_insecure_host: Vec<TrustedHost>) -> Self {
|
||||||
|
self.base_client_builder = self
|
||||||
|
.base_client_builder
|
||||||
|
.allow_insecure_host(allow_insecure_host);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn connectivity(mut self, connectivity: Connectivity) -> Self {
|
||||||
|
self.base_client_builder = self.base_client_builder.connectivity(connectivity);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn retries(mut self, retries: u32) -> Self {
|
||||||
|
self.base_client_builder = self.base_client_builder.retries(retries);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn retries_from_env(mut self) -> anyhow::Result<Self> {
|
||||||
|
self.base_client_builder = self.base_client_builder.retries_from_env()?;
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||||
|
self.base_client_builder = self.base_client_builder.native_tls(native_tls);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn built_in_root_certs(mut self, built_in_root_certs: bool) -> Self {
|
pub fn built_in_root_certs(mut self, built_in_root_certs: bool) -> Self {
|
||||||
self.base_client_builder = self
|
self.base_client_builder = self
|
||||||
|
|
@ -148,36 +176,10 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add all authenticated sources to the cache.
|
pub fn build(self) -> RegistryClient {
|
||||||
pub fn cache_index_credentials(&mut self) {
|
|
||||||
for index in self.index_locations.known_indexes() {
|
|
||||||
if let Some(credentials) = index.credentials() {
|
|
||||||
trace!(
|
|
||||||
"Read credentials for index {}",
|
|
||||||
index
|
|
||||||
.name
|
|
||||||
.as_ref()
|
|
||||||
.map(ToString::to_string)
|
|
||||||
.unwrap_or_else(|| index.url.to_string())
|
|
||||||
);
|
|
||||||
if let Some(root_url) = index.root_url() {
|
|
||||||
self.base_client_builder
|
|
||||||
.store_credentials(&root_url, credentials.clone());
|
|
||||||
}
|
|
||||||
self.base_client_builder
|
|
||||||
.store_credentials(index.raw_url(), credentials);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn build(mut self) -> RegistryClient {
|
|
||||||
self.cache_index_credentials();
|
|
||||||
let index_urls = self.index_locations.index_urls();
|
|
||||||
|
|
||||||
// Build a base client
|
// Build a base client
|
||||||
let builder = self
|
let builder = self
|
||||||
.base_client_builder
|
.base_client_builder
|
||||||
.indexes(Indexes::from(&self.index_locations))
|
|
||||||
.redirect(RedirectPolicy::RetriggerMiddleware);
|
.redirect(RedirectPolicy::RetriggerMiddleware);
|
||||||
|
|
||||||
let client = builder.build();
|
let client = builder.build();
|
||||||
|
|
@ -189,7 +191,7 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
let client = CachedClient::new(client);
|
let client = CachedClient::new(client);
|
||||||
|
|
||||||
RegistryClient {
|
RegistryClient {
|
||||||
index_urls,
|
index_urls: self.index_urls,
|
||||||
index_strategy: self.index_strategy,
|
index_strategy: self.index_strategy,
|
||||||
torch_backend: self.torch_backend,
|
torch_backend: self.torch_backend,
|
||||||
cache: self.cache,
|
cache: self.cache,
|
||||||
|
|
@ -197,20 +199,13 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
client,
|
client,
|
||||||
timeout,
|
timeout,
|
||||||
flat_indexes: Arc::default(),
|
flat_indexes: Arc::default(),
|
||||||
pyx_token_store: PyxTokenStore::from_settings().ok(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Share the underlying client between two different middleware configurations.
|
/// Share the underlying client between two different middleware configurations.
|
||||||
pub fn wrap_existing(mut self, existing: &BaseClient) -> RegistryClient {
|
pub fn wrap_existing(self, existing: &BaseClient) -> RegistryClient {
|
||||||
self.cache_index_credentials();
|
|
||||||
let index_urls = self.index_locations.index_urls();
|
|
||||||
|
|
||||||
// Wrap in any relevant middleware and handle connectivity.
|
// Wrap in any relevant middleware and handle connectivity.
|
||||||
let client = self
|
let client = self.base_client_builder.wrap_existing(existing);
|
||||||
.base_client_builder
|
|
||||||
.indexes(Indexes::from(&self.index_locations))
|
|
||||||
.wrap_existing(existing);
|
|
||||||
|
|
||||||
let timeout = client.timeout();
|
let timeout = client.timeout();
|
||||||
let connectivity = client.connectivity();
|
let connectivity = client.connectivity();
|
||||||
|
|
@ -219,7 +214,7 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
let client = CachedClient::new(client);
|
let client = CachedClient::new(client);
|
||||||
|
|
||||||
RegistryClient {
|
RegistryClient {
|
||||||
index_urls,
|
index_urls: self.index_urls,
|
||||||
index_strategy: self.index_strategy,
|
index_strategy: self.index_strategy,
|
||||||
torch_backend: self.torch_backend,
|
torch_backend: self.torch_backend,
|
||||||
cache: self.cache,
|
cache: self.cache,
|
||||||
|
|
@ -227,11 +222,24 @@ impl<'a> RegistryClientBuilder<'a> {
|
||||||
client,
|
client,
|
||||||
timeout,
|
timeout,
|
||||||
flat_indexes: Arc::default(),
|
flat_indexes: Arc::default(),
|
||||||
pyx_token_store: PyxTokenStore::from_settings().ok(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> TryFrom<BaseClientBuilder<'a>> for RegistryClientBuilder<'a> {
|
||||||
|
type Error = std::io::Error;
|
||||||
|
|
||||||
|
fn try_from(value: BaseClientBuilder<'a>) -> Result<Self, Self::Error> {
|
||||||
|
Ok(Self {
|
||||||
|
index_urls: IndexUrls::default(),
|
||||||
|
index_strategy: IndexStrategy::default(),
|
||||||
|
torch_backend: None,
|
||||||
|
cache: Cache::temp()?,
|
||||||
|
base_client_builder: value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A client for fetching packages from a `PyPI`-compatible index.
|
/// A client for fetching packages from a `PyPI`-compatible index.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct RegistryClient {
|
pub struct RegistryClient {
|
||||||
|
|
@ -251,16 +259,13 @@ pub struct RegistryClient {
|
||||||
timeout: Duration,
|
timeout: Duration,
|
||||||
/// The flat index entries for each `--find-links`-style index URL.
|
/// The flat index entries for each `--find-links`-style index URL.
|
||||||
flat_indexes: Arc<Mutex<FlatIndexCache>>,
|
flat_indexes: Arc<Mutex<FlatIndexCache>>,
|
||||||
/// The pyx token store to use for persistent credentials.
|
|
||||||
// TODO(charlie): The token store is only needed for `is_known_url`; can we avoid storing it here?
|
|
||||||
pyx_token_store: Option<PyxTokenStore>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The format of the package metadata returned by querying an index.
|
/// The format of the package metadata returned by querying an index.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum MetadataFormat {
|
pub enum MetadataFormat {
|
||||||
/// The metadata adheres to the Simple Repository API format.
|
/// The metadata adheres to the Simple Repository API format.
|
||||||
Simple(OwnedArchive<SimpleDetailMetadata>),
|
Simple(OwnedArchive<SimpleMetadata>),
|
||||||
/// The metadata consists of a list of distributions from a "flat" index.
|
/// The metadata consists of a list of distributions from a "flat" index.
|
||||||
Flat(Vec<FlatIndexEntry>),
|
Flat(Vec<FlatIndexEntry>),
|
||||||
}
|
}
|
||||||
|
|
@ -291,15 +296,8 @@ impl RegistryClient {
|
||||||
self.timeout
|
self.timeout
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
|
||||||
self.client.uncached().credentials_cache()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the appropriate index URLs for the given [`PackageName`].
|
/// Return the appropriate index URLs for the given [`PackageName`].
|
||||||
fn index_urls_for(
|
fn index_urls_for(&self, package_name: &PackageName) -> impl Iterator<Item = IndexMetadataRef> {
|
||||||
&self,
|
|
||||||
package_name: &PackageName,
|
|
||||||
) -> impl Iterator<Item = IndexMetadataRef<'_>> {
|
|
||||||
self.torch_backend
|
self.torch_backend
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|torch_backend| {
|
.and_then(|torch_backend| {
|
||||||
|
|
@ -332,7 +330,7 @@ impl RegistryClient {
|
||||||
/// and [PEP 691 – JSON-based Simple API for Python Package Indexes](https://peps.python.org/pep-0691/),
|
/// and [PEP 691 – JSON-based Simple API for Python Package Indexes](https://peps.python.org/pep-0691/),
|
||||||
/// which the PyPI JSON API implements.
|
/// which the PyPI JSON API implements.
|
||||||
#[instrument(skip_all, fields(package = % package_name))]
|
#[instrument(skip_all, fields(package = % package_name))]
|
||||||
pub async fn simple_detail<'index>(
|
pub async fn package_metadata<'index>(
|
||||||
&'index self,
|
&'index self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
index: Option<IndexMetadataRef<'index>>,
|
index: Option<IndexMetadataRef<'index>>,
|
||||||
|
|
@ -363,7 +361,7 @@ impl RegistryClient {
|
||||||
let status_code_strategy =
|
let status_code_strategy =
|
||||||
self.index_urls.status_code_strategy_for(index.url);
|
self.index_urls.status_code_strategy_for(index.url);
|
||||||
match self
|
match self
|
||||||
.simple_detail_single_index(
|
.simple_single_index(
|
||||||
package_name,
|
package_name,
|
||||||
index.url,
|
index.url,
|
||||||
capabilities,
|
capabilities,
|
||||||
|
|
@ -409,7 +407,7 @@ impl RegistryClient {
|
||||||
let status_code_strategy =
|
let status_code_strategy =
|
||||||
IndexStatusCodeStrategy::ignore_authentication_error_codes();
|
IndexStatusCodeStrategy::ignore_authentication_error_codes();
|
||||||
let metadata = match self
|
let metadata = match self
|
||||||
.simple_detail_single_index(
|
.simple_single_index(
|
||||||
package_name,
|
package_name,
|
||||||
index.url,
|
index.url,
|
||||||
capabilities,
|
capabilities,
|
||||||
|
|
@ -443,7 +441,7 @@ impl RegistryClient {
|
||||||
if results.is_empty() {
|
if results.is_empty() {
|
||||||
return match self.connectivity {
|
return match self.connectivity {
|
||||||
Connectivity::Online => {
|
Connectivity::Online => {
|
||||||
Err(ErrorKind::RemotePackageNotFound(package_name.clone()).into())
|
Err(ErrorKind::PackageNotFound(package_name.to_string()).into())
|
||||||
}
|
}
|
||||||
Connectivity::Offline => Err(ErrorKind::Offline(package_name.to_string()).into()),
|
Connectivity::Offline => Err(ErrorKind::Offline(package_name.to_string()).into()),
|
||||||
};
|
};
|
||||||
|
|
@ -492,11 +490,11 @@ impl RegistryClient {
|
||||||
Ok(package_entries)
|
Ok(package_entries)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the [`SimpleDetailMetadata`] from a single index for a given package.
|
/// Fetch the [`SimpleMetadata`] from a single index for a given package.
|
||||||
///
|
///
|
||||||
/// The index can either be a PEP 503-compatible remote repository, or a local directory laid
|
/// The index can either be a PEP 503-compatible remote repository, or a local directory laid
|
||||||
/// out in the same format.
|
/// out in the same format.
|
||||||
async fn simple_detail_single_index(
|
async fn simple_single_index(
|
||||||
&self,
|
&self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
index: &IndexUrl,
|
index: &IndexUrl,
|
||||||
|
|
@ -539,13 +537,13 @@ impl RegistryClient {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let _lock = {
|
let _lock = {
|
||||||
let lock_entry = cache_entry.with_file(format!("{package_name}.lock"));
|
let lock_entry = cache_entry.with_file(format!("{package_name}.lock"));
|
||||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = if matches!(index, IndexUrl::Path(_)) {
|
let result = if matches!(index, IndexUrl::Path(_)) {
|
||||||
self.fetch_local_simple_detail(package_name, &url).await
|
self.fetch_local_index(package_name, &url).await
|
||||||
} else {
|
} else {
|
||||||
self.fetch_remote_simple_detail(package_name, &url, index, &cache_entry, cache_control)
|
self.fetch_remote_index(package_name, &url, &cache_entry, cache_control)
|
||||||
.await
|
.await
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -574,46 +572,33 @@ impl RegistryClient {
|
||||||
ErrorKind::Offline(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
ErrorKind::Offline(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
||||||
|
|
||||||
// The package could not be found in the local index.
|
// The package could not be found in the local index.
|
||||||
ErrorKind::LocalPackageNotFound(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
ErrorKind::FileNotFound(_) => Ok(SimpleMetadataSearchOutcome::NotFound),
|
||||||
|
|
||||||
_ => Err(err),
|
_ => Err(err),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the [`SimpleDetailMetadata`] from a remote URL, using the PEP 503 Simple Repository API.
|
/// Fetch the [`SimpleMetadata`] from a remote URL, using the PEP 503 Simple Repository API.
|
||||||
async fn fetch_remote_simple_detail(
|
async fn fetch_remote_index(
|
||||||
&self,
|
&self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
index: &IndexUrl,
|
|
||||||
cache_entry: &CacheEntry,
|
cache_entry: &CacheEntry,
|
||||||
cache_control: CacheControl<'_>,
|
cache_control: CacheControl<'_>,
|
||||||
) -> Result<OwnedArchive<SimpleDetailMetadata>, Error> {
|
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||||
// In theory, we should be able to pass `MediaType::all()` to all registries, and as
|
|
||||||
// unsupported media types should be ignored by the server. For now, we implement this
|
|
||||||
// defensively to avoid issues with misconfigured servers.
|
|
||||||
let accept = if self
|
|
||||||
.pyx_token_store
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|token_store| token_store.is_known_url(index.url()))
|
|
||||||
{
|
|
||||||
MediaType::all()
|
|
||||||
} else {
|
|
||||||
MediaType::pypi()
|
|
||||||
};
|
|
||||||
let simple_request = self
|
let simple_request = self
|
||||||
.uncached_client(url)
|
.uncached_client(url)
|
||||||
.get(Url::from(url.clone()))
|
.get(Url::from(url.clone()))
|
||||||
.header("Accept-Encoding", "gzip, deflate, zstd")
|
.header("Accept-Encoding", "gzip, deflate, zstd")
|
||||||
.header("Accept", accept)
|
.header("Accept", MediaType::accepts())
|
||||||
.build()
|
.build()
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
let parse_simple_response = |response: Response| {
|
let parse_simple_response = |response: Response| {
|
||||||
async {
|
async {
|
||||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||||
// This ensures that we handle redirects and other URL transformations correctly.
|
// This ensures that we handle redirects and other URL transformations correctly.
|
||||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
let url = DisplaySafeUrl::from(response.url().clone());
|
||||||
|
|
||||||
let content_type = response
|
let content_type = response
|
||||||
.headers()
|
.headers()
|
||||||
|
|
@ -631,53 +616,22 @@ impl RegistryClient {
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let unarchived = match media_type {
|
let unarchived = match media_type {
|
||||||
MediaType::PyxV1Msgpack => {
|
MediaType::Json => {
|
||||||
let bytes = response
|
let bytes = response
|
||||||
.bytes()
|
.bytes()
|
||||||
.await
|
.await
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
let data: PyxSimpleDetail = rmp_serde::from_slice(bytes.as_ref())
|
let data: SimpleJson = serde_json::from_slice(bytes.as_ref())
|
||||||
.map_err(|err| Error::from_msgpack_err(err, url.clone()))?;
|
|
||||||
|
|
||||||
SimpleDetailMetadata::from_pyx_files(
|
|
||||||
data.files,
|
|
||||||
data.core_metadata,
|
|
||||||
package_name,
|
|
||||||
&url,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
MediaType::PyxV1Json => {
|
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
let data: PyxSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
||||||
|
|
||||||
SimpleDetailMetadata::from_pyx_files(
|
SimpleMetadata::from_files(data.files, package_name, &url)
|
||||||
data.files,
|
|
||||||
data.core_metadata,
|
|
||||||
package_name,
|
|
||||||
&url,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
MediaType::PypiV1Json => {
|
MediaType::Html => {
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
|
|
||||||
let data: PypiSimpleDetail = serde_json::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
|
||||||
|
|
||||||
SimpleDetailMetadata::from_pypi_files(data.files, package_name, &url)
|
|
||||||
}
|
|
||||||
MediaType::PypiV1Html | MediaType::TextHtml => {
|
|
||||||
let text = response
|
let text = response
|
||||||
.text()
|
.text()
|
||||||
.await
|
.await
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||||
SimpleDetailMetadata::from_html(&text, package_name, &url)?
|
SimpleMetadata::from_html(&text, package_name, &url)?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
OwnedArchive::from_unarchived(&unarchived)
|
OwnedArchive::from_unarchived(&unarchived)
|
||||||
|
|
@ -697,13 +651,13 @@ impl RegistryClient {
|
||||||
Ok(simple)
|
Ok(simple)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the [`SimpleDetailMetadata`] from a local file, using a PEP 503-compatible directory
|
/// Fetch the [`SimpleMetadata`] from a local file, using a PEP 503-compatible directory
|
||||||
/// structure.
|
/// structure.
|
||||||
async fn fetch_local_simple_detail(
|
async fn fetch_local_index(
|
||||||
&self,
|
&self,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
) -> Result<OwnedArchive<SimpleDetailMetadata>, Error> {
|
) -> Result<OwnedArchive<SimpleMetadata>, Error> {
|
||||||
let path = url
|
let path = url
|
||||||
.to_file_path()
|
.to_file_path()
|
||||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
||||||
|
|
@ -711,185 +665,15 @@ impl RegistryClient {
|
||||||
let text = match fs_err::tokio::read_to_string(&path).await {
|
let text = match fs_err::tokio::read_to_string(&path).await {
|
||||||
Ok(text) => text,
|
Ok(text) => text,
|
||||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||||
return Err(Error::from(ErrorKind::LocalPackageNotFound(
|
return Err(Error::from(ErrorKind::FileNotFound(
|
||||||
package_name.clone(),
|
package_name.to_string(),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return Err(Error::from(ErrorKind::Io(err)));
|
return Err(Error::from(ErrorKind::Io(err)));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let metadata = SimpleDetailMetadata::from_html(&text, package_name, url)?;
|
let metadata = SimpleMetadata::from_html(&text, package_name, url)?;
|
||||||
OwnedArchive::from_unarchived(&metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the list of projects from a Simple API index at a remote URL.
|
|
||||||
///
|
|
||||||
/// This fetches the root of a Simple API index (e.g., `https://pypi.org/simple/`)
|
|
||||||
/// which returns a list of all available projects.
|
|
||||||
pub async fn fetch_simple_index(
|
|
||||||
&self,
|
|
||||||
index_url: &IndexUrl,
|
|
||||||
) -> Result<SimpleIndexMetadata, Error> {
|
|
||||||
// Format the URL for PyPI.
|
|
||||||
let mut url = index_url.url().clone();
|
|
||||||
url.path_segments_mut()
|
|
||||||
.map_err(|()| ErrorKind::CannotBeABase(index_url.url().clone()))?
|
|
||||||
.pop_if_empty()
|
|
||||||
// The URL *must* end in a trailing slash for proper relative path behavior
|
|
||||||
// ref https://github.com/servo/rust-url/issues/333
|
|
||||||
.push("");
|
|
||||||
|
|
||||||
if url.scheme() == "file" {
|
|
||||||
let archived = self.fetch_local_simple_index(&url).await?;
|
|
||||||
Ok(OwnedArchive::deserialize(&archived))
|
|
||||||
} else {
|
|
||||||
let archived = self.fetch_remote_simple_index(&url, index_url).await?;
|
|
||||||
Ok(OwnedArchive::deserialize(&archived))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the list of projects from a remote Simple API index.
|
|
||||||
async fn fetch_remote_simple_index(
|
|
||||||
&self,
|
|
||||||
url: &DisplaySafeUrl,
|
|
||||||
index: &IndexUrl,
|
|
||||||
) -> Result<OwnedArchive<SimpleIndexMetadata>, Error> {
|
|
||||||
// In theory, we should be able to pass `MediaType::all()` to all registries, and as
|
|
||||||
// unsupported media types should be ignored by the server. For now, we implement this
|
|
||||||
// defensively to avoid issues with misconfigured servers.
|
|
||||||
let accept = if self
|
|
||||||
.pyx_token_store
|
|
||||||
.as_ref()
|
|
||||||
.is_some_and(|token_store| token_store.is_known_url(index.url()))
|
|
||||||
{
|
|
||||||
MediaType::all()
|
|
||||||
} else {
|
|
||||||
MediaType::pypi()
|
|
||||||
};
|
|
||||||
|
|
||||||
let cache_entry = self.cache.entry(
|
|
||||||
CacheBucket::Simple,
|
|
||||||
WheelCache::Index(index).root(),
|
|
||||||
"index.html.rkyv",
|
|
||||||
);
|
|
||||||
let cache_control = match self.connectivity {
|
|
||||||
Connectivity::Online => {
|
|
||||||
if let Some(header) = self.index_urls.simple_api_cache_control_for(index) {
|
|
||||||
CacheControl::Override(header)
|
|
||||||
} else {
|
|
||||||
CacheControl::from(
|
|
||||||
self.cache
|
|
||||||
.freshness(&cache_entry, None, None)
|
|
||||||
.map_err(ErrorKind::Io)?,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Connectivity::Offline => CacheControl::AllowStale,
|
|
||||||
};
|
|
||||||
|
|
||||||
let parse_simple_response = |response: Response| {
|
|
||||||
async {
|
|
||||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
|
||||||
// This ensures that we handle redirects and other URL transformations correctly.
|
|
||||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
|
||||||
|
|
||||||
let content_type = response
|
|
||||||
.headers()
|
|
||||||
.get("content-type")
|
|
||||||
.ok_or_else(|| Error::from(ErrorKind::MissingContentType(url.clone())))?;
|
|
||||||
let content_type = content_type.to_str().map_err(|err| {
|
|
||||||
Error::from(ErrorKind::InvalidContentTypeHeader(url.clone(), err))
|
|
||||||
})?;
|
|
||||||
let media_type = content_type.split(';').next().unwrap_or(content_type);
|
|
||||||
let media_type = MediaType::from_str(media_type).ok_or_else(|| {
|
|
||||||
Error::from(ErrorKind::UnsupportedMediaType(
|
|
||||||
url.clone(),
|
|
||||||
media_type.to_string(),
|
|
||||||
))
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let metadata = match media_type {
|
|
||||||
MediaType::PyxV1Msgpack => {
|
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
let data: PyxSimpleIndex = rmp_serde::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_msgpack_err(err, url.clone()))?;
|
|
||||||
SimpleIndexMetadata::from_pyx_index(data)
|
|
||||||
}
|
|
||||||
MediaType::PyxV1Json => {
|
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
let data: PyxSimpleIndex = serde_json::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
|
||||||
SimpleIndexMetadata::from_pyx_index(data)
|
|
||||||
}
|
|
||||||
MediaType::PypiV1Json => {
|
|
||||||
let bytes = response
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
let data: PypiSimpleIndex = serde_json::from_slice(bytes.as_ref())
|
|
||||||
.map_err(|err| Error::from_json_err(err, url.clone()))?;
|
|
||||||
SimpleIndexMetadata::from_pypi_index(data)
|
|
||||||
}
|
|
||||||
MediaType::PypiV1Html | MediaType::TextHtml => {
|
|
||||||
let text = response
|
|
||||||
.text()
|
|
||||||
.await
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
SimpleIndexMetadata::from_html(&text, &url)?
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
OwnedArchive::from_unarchived(&metadata)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let simple_request = self
|
|
||||||
.uncached_client(url)
|
|
||||||
.get(Url::from(url.clone()))
|
|
||||||
.header("Accept-Encoding", "gzip, deflate, zstd")
|
|
||||||
.header("Accept", accept)
|
|
||||||
.build()
|
|
||||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
|
||||||
|
|
||||||
let index = self
|
|
||||||
.cached_client()
|
|
||||||
.get_cacheable_with_retry(
|
|
||||||
simple_request,
|
|
||||||
&cache_entry,
|
|
||||||
cache_control,
|
|
||||||
parse_simple_response,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(index)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch the list of projects from a local Simple API index.
|
|
||||||
async fn fetch_local_simple_index(
|
|
||||||
&self,
|
|
||||||
url: &DisplaySafeUrl,
|
|
||||||
) -> Result<OwnedArchive<SimpleIndexMetadata>, Error> {
|
|
||||||
let path = url
|
|
||||||
.to_file_path()
|
|
||||||
.map_err(|()| ErrorKind::NonFileUrl(url.clone()))?
|
|
||||||
.join("index.html");
|
|
||||||
let text = match fs_err::tokio::read_to_string(&path).await {
|
|
||||||
Ok(text) => text,
|
|
||||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
|
||||||
return Err(Error::from(ErrorKind::LocalIndexNotFound(path)));
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(Error::from(ErrorKind::Io(err)));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let metadata = SimpleIndexMetadata::from_html(&text, url)?;
|
|
||||||
OwnedArchive::from_unarchived(&metadata)
|
OwnedArchive::from_unarchived(&metadata)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1031,7 +815,7 @@ impl RegistryClient {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let _lock = {
|
let _lock = {
|
||||||
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
||||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||||
};
|
};
|
||||||
|
|
||||||
let response_callback = async |response: Response| {
|
let response_callback = async |response: Response| {
|
||||||
|
|
@ -1115,7 +899,7 @@ impl RegistryClient {
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let _lock = {
|
let _lock = {
|
||||||
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
let lock_entry = cache_entry.with_file(format!("{}.lock", filename.stem()));
|
||||||
lock_entry.lock().await.map_err(ErrorKind::CacheLock)?
|
lock_entry.lock().await.map_err(ErrorKind::CacheWrite)?
|
||||||
};
|
};
|
||||||
|
|
||||||
// Attempt to fetch via a range request.
|
// Attempt to fetch via a range request.
|
||||||
|
|
@ -1246,7 +1030,7 @@ impl RegistryClient {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) enum SimpleMetadataSearchOutcome {
|
pub(crate) enum SimpleMetadataSearchOutcome {
|
||||||
/// Simple metadata was found
|
/// Simple metadata was found
|
||||||
Found(OwnedArchive<SimpleDetailMetadata>),
|
Found(OwnedArchive<SimpleMetadata>),
|
||||||
/// Simple metadata was not found
|
/// Simple metadata was not found
|
||||||
NotFound,
|
NotFound,
|
||||||
/// A status code failure was encountered when searching for
|
/// A status code failure was encountered when searching for
|
||||||
|
|
@ -1327,71 +1111,24 @@ pub struct VersionSourceDist {
|
||||||
pub file: File,
|
pub file: File,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The list of projects available in a Simple API index.
|
|
||||||
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||||
#[rkyv(derive(Debug))]
|
#[rkyv(derive(Debug))]
|
||||||
pub struct SimpleIndexMetadata {
|
pub struct SimpleMetadata(Vec<SimpleMetadatum>);
|
||||||
/// The list of project names available in the index.
|
|
||||||
projects: Vec<PackageName>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SimpleIndexMetadata {
|
|
||||||
/// Iterate over the projects in the index.
|
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &PackageName> {
|
|
||||||
self.projects.iter()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a [`SimpleIndexMetadata`] from a [`PypiSimpleIndex`].
|
|
||||||
fn from_pypi_index(index: PypiSimpleIndex) -> Self {
|
|
||||||
Self {
|
|
||||||
projects: index.projects.into_iter().map(|entry| entry.name).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a [`SimpleIndexMetadata`] from a [`PyxSimpleIndex`].
|
|
||||||
fn from_pyx_index(index: PyxSimpleIndex) -> Self {
|
|
||||||
Self {
|
|
||||||
projects: index.projects.into_iter().map(|entry| entry.name).collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Create a [`SimpleIndexMetadata`] from HTML content.
|
|
||||||
fn from_html(text: &str, url: &DisplaySafeUrl) -> Result<Self, Error> {
|
|
||||||
let html = crate::html::SimpleIndexHtml::parse(text).map_err(|err| {
|
|
||||||
Error::from(ErrorKind::BadHtml {
|
|
||||||
source: err,
|
|
||||||
url: url.clone(),
|
|
||||||
})
|
|
||||||
})?;
|
|
||||||
Ok(Self {
|
|
||||||
projects: html.projects,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
|
||||||
#[rkyv(derive(Debug))]
|
|
||||||
pub struct SimpleDetailMetadata(Vec<SimpleDetailMetadatum>);
|
|
||||||
|
|
||||||
#[derive(Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
#[derive(Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||||
#[rkyv(derive(Debug))]
|
#[rkyv(derive(Debug))]
|
||||||
pub struct SimpleDetailMetadatum {
|
pub struct SimpleMetadatum {
|
||||||
pub version: Version,
|
pub version: Version,
|
||||||
pub files: VersionFiles,
|
pub files: VersionFiles,
|
||||||
pub metadata: Option<ResolutionMetadata>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SimpleDetailMetadata {
|
impl SimpleMetadata {
|
||||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &SimpleDetailMetadatum> {
|
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &SimpleMetadatum> {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_pypi_files(
|
fn from_files(files: Vec<uv_pypi_types::File>, package_name: &PackageName, base: &Url) -> Self {
|
||||||
files: Vec<uv_pypi_types::PypiFile>,
|
let mut map: BTreeMap<Version, VersionFiles> = BTreeMap::default();
|
||||||
package_name: &PackageName,
|
|
||||||
base: &Url,
|
|
||||||
) -> Self {
|
|
||||||
let mut version_map: BTreeMap<Version, VersionFiles> = BTreeMap::default();
|
|
||||||
|
|
||||||
// Convert to a reference-counted string.
|
// Convert to a reference-counted string.
|
||||||
let base = SmallString::from(base.as_str());
|
let base = SmallString::from(base.as_str());
|
||||||
|
|
@ -1403,7 +1140,11 @@ impl SimpleDetailMetadata {
|
||||||
warn!("Skipping file for {package_name}: {}", file.filename);
|
warn!("Skipping file for {package_name}: {}", file.filename);
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
let file = match File::try_from_pypi(file, &base) {
|
let version = match filename {
|
||||||
|
DistFilename::SourceDistFilename(ref inner) => &inner.version,
|
||||||
|
DistFilename::WheelFilename(ref inner) => &inner.version,
|
||||||
|
};
|
||||||
|
let file = match File::try_from(file, &base) {
|
||||||
Ok(file) => file,
|
Ok(file) => file,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
// Ignore files with unparsable version specifiers.
|
// Ignore files with unparsable version specifiers.
|
||||||
|
|
@ -1411,7 +1152,7 @@ impl SimpleDetailMetadata {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match version_map.entry(filename.version().clone()) {
|
match map.entry(version.clone()) {
|
||||||
std::collections::btree_map::Entry::Occupied(mut entry) => {
|
std::collections::btree_map::Entry::Occupied(mut entry) => {
|
||||||
entry.get_mut().push(filename, file);
|
entry.get_mut().push(filename, file);
|
||||||
}
|
}
|
||||||
|
|
@ -1422,160 +1163,66 @@ impl SimpleDetailMetadata {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Self(
|
Self(
|
||||||
version_map
|
map.into_iter()
|
||||||
.into_iter()
|
.map(|(version, files)| SimpleMetadatum { version, files })
|
||||||
.map(|(version, files)| SimpleDetailMetadatum {
|
|
||||||
version,
|
|
||||||
files,
|
|
||||||
metadata: None,
|
|
||||||
})
|
|
||||||
.collect(),
|
.collect(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_pyx_files(
|
/// Read the [`SimpleMetadata`] from an HTML index.
|
||||||
files: Vec<uv_pypi_types::PyxFile>,
|
|
||||||
mut core_metadata: FxHashMap<Version, uv_pypi_types::CoreMetadatum>,
|
|
||||||
package_name: &PackageName,
|
|
||||||
base: &Url,
|
|
||||||
) -> Self {
|
|
||||||
let mut version_map: BTreeMap<Version, VersionFiles> = BTreeMap::default();
|
|
||||||
|
|
||||||
// Convert to a reference-counted string.
|
|
||||||
let base = SmallString::from(base.as_str());
|
|
||||||
|
|
||||||
// Group the distributions by version and kind
|
|
||||||
for file in files {
|
|
||||||
let file = match File::try_from_pyx(file, &base) {
|
|
||||||
Ok(file) => file,
|
|
||||||
Err(err) => {
|
|
||||||
// Ignore files with unparsable version specifiers.
|
|
||||||
warn!("Skipping file for {package_name}: {err}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let Some(filename) = DistFilename::try_from_filename(&file.filename, package_name)
|
|
||||||
else {
|
|
||||||
warn!("Skipping file for {package_name}: {}", file.filename);
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
match version_map.entry(filename.version().clone()) {
|
|
||||||
std::collections::btree_map::Entry::Occupied(mut entry) => {
|
|
||||||
entry.get_mut().push(filename, file);
|
|
||||||
}
|
|
||||||
std::collections::btree_map::Entry::Vacant(entry) => {
|
|
||||||
let mut files = VersionFiles::default();
|
|
||||||
files.push(filename, file);
|
|
||||||
entry.insert(files);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Self(
|
|
||||||
version_map
|
|
||||||
.into_iter()
|
|
||||||
.map(|(version, files)| {
|
|
||||||
let metadata =
|
|
||||||
core_metadata
|
|
||||||
.remove(&version)
|
|
||||||
.map(|metadata| ResolutionMetadata {
|
|
||||||
name: package_name.clone(),
|
|
||||||
version: version.clone(),
|
|
||||||
requires_dist: metadata.requires_dist,
|
|
||||||
requires_python: metadata.requires_python,
|
|
||||||
provides_extra: metadata.provides_extra,
|
|
||||||
dynamic: false,
|
|
||||||
});
|
|
||||||
SimpleDetailMetadatum {
|
|
||||||
version,
|
|
||||||
files,
|
|
||||||
metadata,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read the [`SimpleDetailMetadata`] from an HTML index.
|
|
||||||
fn from_html(
|
fn from_html(
|
||||||
text: &str,
|
text: &str,
|
||||||
package_name: &PackageName,
|
package_name: &PackageName,
|
||||||
url: &DisplaySafeUrl,
|
url: &DisplaySafeUrl,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(text, url)
|
let SimpleHtml { base, files } =
|
||||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
SimpleHtml::parse(text, url).map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||||
|
|
||||||
Ok(Self::from_pypi_files(files, package_name, base.as_url()))
|
Ok(Self::from_files(files, package_name, base.as_url()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoIterator for SimpleDetailMetadata {
|
impl IntoIterator for SimpleMetadata {
|
||||||
type Item = SimpleDetailMetadatum;
|
type Item = SimpleMetadatum;
|
||||||
type IntoIter = std::vec::IntoIter<SimpleDetailMetadatum>;
|
type IntoIter = std::vec::IntoIter<SimpleMetadatum>;
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
self.0.into_iter()
|
self.0.into_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ArchivedSimpleDetailMetadata {
|
impl ArchivedSimpleMetadata {
|
||||||
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &rkyv::Archived<SimpleDetailMetadatum>> {
|
pub fn iter(&self) -> impl DoubleEndedIterator<Item = &rkyv::Archived<SimpleMetadatum>> {
|
||||||
self.0.iter()
|
self.0.iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn datum(&self, i: usize) -> Option<&rkyv::Archived<SimpleDetailMetadatum>> {
|
pub fn datum(&self, i: usize) -> Option<&rkyv::Archived<SimpleMetadatum>> {
|
||||||
self.0.get(i)
|
self.0.get(i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum MediaType {
|
enum MediaType {
|
||||||
PyxV1Msgpack,
|
Json,
|
||||||
PyxV1Json,
|
Html,
|
||||||
PypiV1Json,
|
|
||||||
PypiV1Html,
|
|
||||||
TextHtml,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MediaType {
|
impl MediaType {
|
||||||
/// Parse a media type from a string, returning `None` if the media type is not supported.
|
/// Parse a media type from a string, returning `None` if the media type is not supported.
|
||||||
fn from_str(s: &str) -> Option<Self> {
|
fn from_str(s: &str) -> Option<Self> {
|
||||||
match s {
|
match s {
|
||||||
"application/vnd.pyx.simple.v1+msgpack" => Some(Self::PyxV1Msgpack),
|
"application/vnd.pypi.simple.v1+json" => Some(Self::Json),
|
||||||
"application/vnd.pyx.simple.v1+json" => Some(Self::PyxV1Json),
|
"application/vnd.pypi.simple.v1+html" | "text/html" => Some(Self::Html),
|
||||||
"application/vnd.pypi.simple.v1+json" => Some(Self::PypiV1Json),
|
|
||||||
"application/vnd.pypi.simple.v1+html" => Some(Self::PypiV1Html),
|
|
||||||
"text/html" => Some(Self::TextHtml),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the `Accept` header value for all PyPI media types.
|
|
||||||
#[inline]
|
|
||||||
const fn pypi() -> &'static str {
|
|
||||||
// See: https://peps.python.org/pep-0691/#version-format-selection
|
|
||||||
"application/vnd.pypi.simple.v1+json, application/vnd.pypi.simple.v1+html;q=0.2, text/html;q=0.01"
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the `Accept` header value for all supported media types.
|
/// Return the `Accept` header value for all supported media types.
|
||||||
#[inline]
|
#[inline]
|
||||||
const fn all() -> &'static str {
|
const fn accepts() -> &'static str {
|
||||||
// See: https://peps.python.org/pep-0691/#version-format-selection
|
// See: https://peps.python.org/pep-0691/#version-format-selection
|
||||||
"application/vnd.pyx.simple.v1+msgpack, application/vnd.pyx.simple.v1+json;q=0.9, application/vnd.pypi.simple.v1+json;q=0.8, application/vnd.pypi.simple.v1+html;q=0.2, text/html;q=0.01"
|
"application/vnd.pypi.simple.v1+json, application/vnd.pypi.simple.v1+html;q=0.2, text/html;q=0.01"
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for MediaType {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
Self::PyxV1Msgpack => write!(f, "application/vnd.pyx.simple.v1+msgpack"),
|
|
||||||
Self::PyxV1Json => write!(f, "application/vnd.pyx.simple.v1+json"),
|
|
||||||
Self::PypiV1Json => write!(f, "application/vnd.pypi.simple.v1+json"),
|
|
||||||
Self::PypiV1Html => write!(f, "application/vnd.pypi.simple.v1+html"),
|
|
||||||
Self::TextHtml => write!(f, "text/html"),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1605,12 +1252,10 @@ mod tests {
|
||||||
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use uv_normalize::PackageName;
|
use uv_normalize::PackageName;
|
||||||
use uv_pypi_types::PypiSimpleDetail;
|
use uv_pypi_types::SimpleJson;
|
||||||
use uv_redacted::DisplaySafeUrl;
|
use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
use crate::{
|
use crate::{SimpleMetadata, SimpleMetadatum, html::SimpleHtml};
|
||||||
BaseClientBuilder, SimpleDetailMetadata, SimpleDetailMetadatum, html::SimpleDetailHTML,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::RegistryClientBuilder;
|
use crate::RegistryClientBuilder;
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
|
|
@ -1659,7 +1304,7 @@ mod tests {
|
||||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?;
|
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?;
|
||||||
|
|
||||||
let cache = Cache::temp()?;
|
let cache = Cache::temp()?;
|
||||||
let registry_client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache)
|
let registry_client = RegistryClientBuilder::new(cache)
|
||||||
.allow_cross_origin_credentials()
|
.allow_cross_origin_credentials()
|
||||||
.build();
|
.build();
|
||||||
let client = registry_client.cached_client().uncached();
|
let client = registry_client.cached_client().uncached();
|
||||||
|
|
@ -1719,7 +1364,7 @@ mod tests {
|
||||||
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?;
|
let redirect_server_url = DisplaySafeUrl::parse(&redirect_server.uri())?.join("foo/")?;
|
||||||
|
|
||||||
let cache = Cache::temp()?;
|
let cache = Cache::temp()?;
|
||||||
let registry_client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache)
|
let registry_client = RegistryClientBuilder::new(cache)
|
||||||
.allow_cross_origin_credentials()
|
.allow_cross_origin_credentials()
|
||||||
.build();
|
.build();
|
||||||
let client = registry_client.cached_client().uncached();
|
let client = registry_client.cached_client().uncached();
|
||||||
|
|
@ -1767,7 +1412,7 @@ mod tests {
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let cache = Cache::temp()?;
|
let cache = Cache::temp()?;
|
||||||
let registry_client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache)
|
let registry_client = RegistryClientBuilder::new(cache)
|
||||||
.allow_cross_origin_credentials()
|
.allow_cross_origin_credentials()
|
||||||
.build();
|
.build();
|
||||||
let client = registry_client.cached_client().uncached();
|
let client = registry_client.cached_client().uncached();
|
||||||
|
|
@ -1826,16 +1471,16 @@ mod tests {
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
let data: PypiSimpleDetail = serde_json::from_str(response).unwrap();
|
let data: SimpleJson = serde_json::from_str(response).unwrap();
|
||||||
let base = DisplaySafeUrl::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
let base = DisplaySafeUrl::parse("https://pypi.org/simple/pyflyby/").unwrap();
|
||||||
let simple_metadata = SimpleDetailMetadata::from_pypi_files(
|
let simple_metadata = SimpleMetadata::from_files(
|
||||||
data.files,
|
data.files,
|
||||||
&PackageName::from_str("pyflyby").unwrap(),
|
&PackageName::from_str("pyflyby").unwrap(),
|
||||||
&base,
|
&base,
|
||||||
);
|
);
|
||||||
let versions: Vec<String> = simple_metadata
|
let versions: Vec<String> = simple_metadata
|
||||||
.iter()
|
.iter()
|
||||||
.map(|SimpleDetailMetadatum { version, .. }| version.to_string())
|
.map(|SimpleMetadatum { version, .. }| version.to_string())
|
||||||
.collect();
|
.collect();
|
||||||
assert_eq!(versions, ["1.7.8".to_string()]);
|
assert_eq!(versions, ["1.7.8".to_string()]);
|
||||||
}
|
}
|
||||||
|
|
@ -1866,7 +1511,7 @@ mod tests {
|
||||||
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
// Note the lack of a trailing `/` here is important for coverage of url-join behavior
|
||||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask")
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(text, &base).unwrap();
|
let SimpleHtml { base, files } = SimpleHtml::parse(text, &base).unwrap();
|
||||||
let base = SmallString::from(base.as_str());
|
let base = SmallString::from(base.as_str());
|
||||||
|
|
||||||
// Test parsing of the file urls
|
// Test parsing of the file urls
|
||||||
|
|
|
||||||
|
|
@ -1,382 +0,0 @@
|
||||||
use std::net::SocketAddr;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use futures::future;
|
|
||||||
use http_body_util::combinators::BoxBody;
|
|
||||||
use http_body_util::{BodyExt, Full};
|
|
||||||
use hyper::body::{Bytes, Incoming};
|
|
||||||
use hyper::header::USER_AGENT;
|
|
||||||
use hyper::service::service_fn;
|
|
||||||
use hyper::{Request, Response};
|
|
||||||
use hyper_util::rt::{TokioExecutor, TokioIo};
|
|
||||||
use hyper_util::server::conn::auto::Builder;
|
|
||||||
use rcgen::{
|
|
||||||
BasicConstraints, Certificate, CertificateParams, DnType, ExtendedKeyUsagePurpose, IsCa,
|
|
||||||
Issuer, KeyPair, KeyUsagePurpose, SanType, date_time_ymd,
|
|
||||||
};
|
|
||||||
use rustls::pki_types::{CertificateDer, PrivateKeyDer};
|
|
||||||
use rustls::server::WebPkiClientVerifier;
|
|
||||||
use rustls::{RootCertStore, ServerConfig};
|
|
||||||
use tokio::net::TcpListener;
|
|
||||||
use tokio::task::JoinHandle;
|
|
||||||
use tokio_rustls::TlsAcceptor;
|
|
||||||
|
|
||||||
use uv_fs::Simplified;
|
|
||||||
|
|
||||||
/// An issued certificate, together with the subject keypair.
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub(crate) struct SelfSigned {
|
|
||||||
/// An issued certificate.
|
|
||||||
pub public: Certificate,
|
|
||||||
/// The certificate's subject signing key.
|
|
||||||
pub private: KeyPair,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Defines the base location for temporary generated certs.
|
|
||||||
///
|
|
||||||
/// See [`TestContext::test_bucket_dir`] for implementation rationale.
|
|
||||||
pub(crate) fn test_cert_dir() -> PathBuf {
|
|
||||||
std::env::temp_dir()
|
|
||||||
.simple_canonicalize()
|
|
||||||
.expect("failed to canonicalize temp dir")
|
|
||||||
.join("uv")
|
|
||||||
.join("tests")
|
|
||||||
.join("certs")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generates a self-signed server certificate for `uv-test-server`, `localhost` and `127.0.0.1`.
|
|
||||||
/// This certificate is standalone and not issued by a self-signed Root CA.
|
|
||||||
///
|
|
||||||
/// Use sparingly as generation of certs is a slow operation.
|
|
||||||
pub(crate) fn generate_self_signed_certs() -> Result<SelfSigned> {
|
|
||||||
let mut params = CertificateParams::default();
|
|
||||||
params.is_ca = IsCa::NoCa;
|
|
||||||
params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
params.key_usages.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
params.key_usages.push(KeyUsagePurpose::KeyEncipherment);
|
|
||||||
params
|
|
||||||
.extended_key_usages
|
|
||||||
.push(ExtendedKeyUsagePurpose::ServerAuth);
|
|
||||||
params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-server");
|
|
||||||
params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-server".try_into()?));
|
|
||||||
params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("localhost".try_into()?));
|
|
||||||
params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::IpAddress("127.0.0.1".parse()?));
|
|
||||||
let private = KeyPair::generate()?;
|
|
||||||
let public = params.self_signed(&private)?;
|
|
||||||
|
|
||||||
Ok(SelfSigned { public, private })
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Generates a self-signed root CA, server certificate, and client certificate.
|
|
||||||
/// There are no intermediate certs generated as part of this function.
|
|
||||||
/// The server certificate is for `uv-test-server`, `localhost` and `127.0.0.1` issued by this CA.
|
|
||||||
/// The client certificate is for `uv-test-client` issued by this CA.
|
|
||||||
///
|
|
||||||
/// Use sparingly as generation of these certs is a very slow operation.
|
|
||||||
pub(crate) fn generate_self_signed_certs_with_ca() -> Result<(SelfSigned, SelfSigned, SelfSigned)> {
|
|
||||||
// Generate the CA
|
|
||||||
let mut ca_params = CertificateParams::default();
|
|
||||||
ca_params.is_ca = IsCa::Ca(BasicConstraints::Unconstrained); // root cert
|
|
||||||
ca_params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
ca_params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
ca_params.key_usages.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
ca_params.key_usages.push(KeyUsagePurpose::KeyCertSign);
|
|
||||||
ca_params.key_usages.push(KeyUsagePurpose::CrlSign);
|
|
||||||
ca_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
ca_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-ca");
|
|
||||||
ca_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-ca".try_into()?));
|
|
||||||
let ca_private_key = KeyPair::generate()?;
|
|
||||||
let ca_public_cert = ca_params.self_signed(&ca_private_key)?;
|
|
||||||
let ca_cert_issuer = Issuer::new(ca_params, &ca_private_key);
|
|
||||||
|
|
||||||
// Generate server cert issued by this CA
|
|
||||||
let mut server_params = CertificateParams::default();
|
|
||||||
server_params.is_ca = IsCa::NoCa;
|
|
||||||
server_params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
server_params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
server_params.use_authority_key_identifier_extension = true;
|
|
||||||
server_params
|
|
||||||
.key_usages
|
|
||||||
.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
server_params
|
|
||||||
.key_usages
|
|
||||||
.push(KeyUsagePurpose::KeyEncipherment);
|
|
||||||
server_params
|
|
||||||
.extended_key_usages
|
|
||||||
.push(ExtendedKeyUsagePurpose::ServerAuth);
|
|
||||||
server_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
server_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-server");
|
|
||||||
server_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-server".try_into()?));
|
|
||||||
server_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("localhost".try_into()?));
|
|
||||||
server_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::IpAddress("127.0.0.1".parse()?));
|
|
||||||
let server_private_key = KeyPair::generate()?;
|
|
||||||
let server_public_cert = server_params.signed_by(&server_private_key, &ca_cert_issuer)?;
|
|
||||||
|
|
||||||
// Generate client cert issued by this CA
|
|
||||||
let mut client_params = CertificateParams::default();
|
|
||||||
client_params.is_ca = IsCa::NoCa;
|
|
||||||
client_params.not_before = date_time_ymd(1975, 1, 1);
|
|
||||||
client_params.not_after = date_time_ymd(4096, 1, 1);
|
|
||||||
client_params.use_authority_key_identifier_extension = true;
|
|
||||||
client_params
|
|
||||||
.key_usages
|
|
||||||
.push(KeyUsagePurpose::DigitalSignature);
|
|
||||||
client_params
|
|
||||||
.extended_key_usages
|
|
||||||
.push(ExtendedKeyUsagePurpose::ClientAuth);
|
|
||||||
client_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::OrganizationName, "Astral Software Inc.");
|
|
||||||
client_params
|
|
||||||
.distinguished_name
|
|
||||||
.push(DnType::CommonName, "uv-test-client");
|
|
||||||
client_params
|
|
||||||
.subject_alt_names
|
|
||||||
.push(SanType::DnsName("uv-test-client".try_into()?));
|
|
||||||
let client_private_key = KeyPair::generate()?;
|
|
||||||
let client_public_cert = client_params.signed_by(&client_private_key, &ca_cert_issuer)?;
|
|
||||||
|
|
||||||
let ca_self_signed = SelfSigned {
|
|
||||||
public: ca_public_cert,
|
|
||||||
private: ca_private_key,
|
|
||||||
};
|
|
||||||
let server_self_signed = SelfSigned {
|
|
||||||
public: server_public_cert,
|
|
||||||
private: server_private_key,
|
|
||||||
};
|
|
||||||
let client_self_signed = SelfSigned {
|
|
||||||
public: client_public_cert,
|
|
||||||
private: client_private_key,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok((ca_self_signed, server_self_signed, client_self_signed))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Plain is fine for now; Arc/Box could be used later if we need to support move.
|
|
||||||
type ServerSvcFn =
|
|
||||||
fn(
|
|
||||||
Request<Incoming>,
|
|
||||||
) -> future::Ready<Result<Response<BoxBody<Bytes, hyper::Error>>, hyper::Error>>;
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub(crate) struct TestServerBuilder<'a> {
|
|
||||||
// Custom server response function
|
|
||||||
svc_fn: Option<ServerSvcFn>,
|
|
||||||
// CA certificate
|
|
||||||
ca_cert: Option<&'a SelfSigned>,
|
|
||||||
// Server certificate
|
|
||||||
server_cert: Option<&'a SelfSigned>,
|
|
||||||
// Enable mTLS Verification
|
|
||||||
mutual_tls: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> TestServerBuilder<'a> {
|
|
||||||
pub(crate) fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
svc_fn: None,
|
|
||||||
server_cert: None,
|
|
||||||
ca_cert: None,
|
|
||||||
mutual_tls: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[expect(unused)]
|
|
||||||
/// Provide a custom server response function.
|
|
||||||
pub(crate) fn with_svc_fn(mut self, svc_fn: ServerSvcFn) -> Self {
|
|
||||||
self.svc_fn = Some(svc_fn);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Provide the server certificate. This will enable TLS (HTTPS).
|
|
||||||
pub(crate) fn with_server_cert(mut self, server_cert: &'a SelfSigned) -> Self {
|
|
||||||
self.server_cert = Some(server_cert);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// CA certificate used to build the `RootCertStore` for client verification.
|
|
||||||
/// Requires `with_server_cert`.
|
|
||||||
pub(crate) fn with_ca_cert(mut self, ca_cert: &'a SelfSigned) -> Self {
|
|
||||||
self.ca_cert = Some(ca_cert);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Enforce mutual TLS (client cert auth).
|
|
||||||
/// Requires `with_server_cert` and `with_ca_cert`.
|
|
||||||
pub(crate) fn with_mutual_tls(mut self, mutual: bool) -> Self {
|
|
||||||
self.mutual_tls = mutual;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Starts the HTTP(S) server with optional mTLS enforcement.
|
|
||||||
pub(crate) async fn start(self) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
// Validate builder input combinations
|
|
||||||
if self.ca_cert.is_some() && self.server_cert.is_none() {
|
|
||||||
anyhow::bail!("server certificate is required when CA certificate is provided");
|
|
||||||
}
|
|
||||||
if self.mutual_tls && (self.ca_cert.is_none() || self.server_cert.is_none()) {
|
|
||||||
anyhow::bail!("ca certificate is required for mTLS");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up the TCP listener on a random available port
|
|
||||||
let listener = TcpListener::bind("127.0.0.1:0").await?;
|
|
||||||
let addr = listener.local_addr()?;
|
|
||||||
|
|
||||||
// Setup TLS Config (if any)
|
|
||||||
let tls_acceptor = if let Some(server_cert) = self.server_cert {
|
|
||||||
// Prepare Server Cert and KeyPair
|
|
||||||
let server_key = PrivateKeyDer::try_from(server_cert.private.serialize_der()).unwrap();
|
|
||||||
let server_cert = vec![CertificateDer::from(server_cert.public.der().to_vec())];
|
|
||||||
|
|
||||||
// Setup CA Verifier
|
|
||||||
let client_verifier = if let Some(ca_cert) = self.ca_cert {
|
|
||||||
let mut root_store = RootCertStore::empty();
|
|
||||||
root_store
|
|
||||||
.add(CertificateDer::from(ca_cert.public.der().to_vec()))
|
|
||||||
.expect("failed to add CA cert");
|
|
||||||
if self.mutual_tls {
|
|
||||||
// Setup mTLS CA config
|
|
||||||
WebPkiClientVerifier::builder(root_store.into())
|
|
||||||
.build()
|
|
||||||
.expect("failed to setup client verifier")
|
|
||||||
} else {
|
|
||||||
// Only load the CA roots
|
|
||||||
WebPkiClientVerifier::builder(root_store.into())
|
|
||||||
.allow_unauthenticated()
|
|
||||||
.build()
|
|
||||||
.expect("failed to setup client verifier")
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
WebPkiClientVerifier::no_client_auth()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut tls_config = ServerConfig::builder()
|
|
||||||
.with_client_cert_verifier(client_verifier)
|
|
||||||
.with_single_cert(server_cert, server_key)?;
|
|
||||||
tls_config.alpn_protocols = vec![b"http/1.1".to_vec(), b"http/1.0".to_vec()];
|
|
||||||
|
|
||||||
Some(TlsAcceptor::from(Arc::new(tls_config)))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
// Setup Response Handler
|
|
||||||
let svc_fn = if let Some(custom_svc_fn) = self.svc_fn {
|
|
||||||
custom_svc_fn
|
|
||||||
} else {
|
|
||||||
|req: Request<Incoming>| {
|
|
||||||
// Get User Agent Header and send it back in the response
|
|
||||||
let user_agent = req
|
|
||||||
.headers()
|
|
||||||
.get(USER_AGENT)
|
|
||||||
.and_then(|v| v.to_str().ok())
|
|
||||||
.map(ToString::to_string)
|
|
||||||
.unwrap_or_default(); // Empty Default
|
|
||||||
let response_content = Full::new(Bytes::from(user_agent))
|
|
||||||
.map_err(|_| unreachable!())
|
|
||||||
.boxed();
|
|
||||||
// If we ever want a true echo server, we can use instead
|
|
||||||
// let response_content = req.into_body().boxed();
|
|
||||||
// although uv-client doesn't expose post currently.
|
|
||||||
future::ok::<_, hyper::Error>(Response::new(response_content))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Spawn the server loop in a background task
|
|
||||||
let server_task = tokio::spawn(async move {
|
|
||||||
let svc = service_fn(move |req: Request<Incoming>| svc_fn(req));
|
|
||||||
|
|
||||||
let (tcp_stream, _remote_addr) = listener
|
|
||||||
.accept()
|
|
||||||
.await
|
|
||||||
.context("Failed to accept TCP connection")?;
|
|
||||||
|
|
||||||
// Start Server (not wrapped in loop {} since we want a single response server)
|
|
||||||
// If we want server to accept multiple connections, we can wrap it in loop {}
|
|
||||||
// but we'll need to ensure to handle termination signals in the tests otherwise
|
|
||||||
// it may never stop.
|
|
||||||
if let Some(tls_acceptor) = tls_acceptor {
|
|
||||||
let tls_stream = tls_acceptor
|
|
||||||
.accept(tcp_stream)
|
|
||||||
.await
|
|
||||||
.context("Failed to accept TLS connection")?;
|
|
||||||
let socket = TokioIo::new(tls_stream);
|
|
||||||
tokio::task::spawn(async move {
|
|
||||||
Builder::new(TokioExecutor::new())
|
|
||||||
.serve_connection(socket, svc)
|
|
||||||
.await
|
|
||||||
.expect("HTTPS Server Started");
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
let socket = TokioIo::new(tcp_stream);
|
|
||||||
tokio::task::spawn(async move {
|
|
||||||
Builder::new(TokioExecutor::new())
|
|
||||||
.serve_connection(socket, svc)
|
|
||||||
.await
|
|
||||||
.expect("HTTP Server Started");
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok((server_task, addr))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Single Request HTTP server that echoes the User Agent Header.
|
|
||||||
pub(crate) async fn start_http_user_agent_server() -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
TestServerBuilder::new().start().await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Single Request HTTPS server that echoes the User Agent Header.
|
|
||||||
pub(crate) async fn start_https_user_agent_server(
|
|
||||||
server_cert: &SelfSigned,
|
|
||||||
) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
TestServerBuilder::new()
|
|
||||||
.with_server_cert(server_cert)
|
|
||||||
.start()
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Single Request HTTPS mTLS server that echoes the User Agent Header.
|
|
||||||
pub(crate) async fn start_https_mtls_user_agent_server(
|
|
||||||
ca_cert: &SelfSigned,
|
|
||||||
server_cert: &SelfSigned,
|
|
||||||
) -> Result<(JoinHandle<Result<()>>, SocketAddr)> {
|
|
||||||
TestServerBuilder::new()
|
|
||||||
.with_ca_cert(ca_cert)
|
|
||||||
.with_server_cert(server_cert)
|
|
||||||
.with_mutual_tls(true)
|
|
||||||
.start()
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
@ -1,4 +1,2 @@
|
||||||
mod http_util;
|
|
||||||
mod remote_metadata;
|
mod remote_metadata;
|
||||||
mod ssl_certs;
|
|
||||||
mod user_agent_version;
|
mod user_agent_version;
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ use std::str::FromStr;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
use uv_cache::Cache;
|
use uv_cache::Cache;
|
||||||
use uv_client::{BaseClientBuilder, RegistryClientBuilder};
|
use uv_client::RegistryClientBuilder;
|
||||||
use uv_distribution_filename::WheelFilename;
|
use uv_distribution_filename::WheelFilename;
|
||||||
use uv_distribution_types::{BuiltDist, DirectUrlBuiltDist, IndexCapabilities};
|
use uv_distribution_types::{BuiltDist, DirectUrlBuiltDist, IndexCapabilities};
|
||||||
use uv_pep508::VerbatimUrl;
|
use uv_pep508::VerbatimUrl;
|
||||||
|
|
@ -11,8 +11,8 @@ use uv_redacted::DisplaySafeUrl;
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
||||||
let cache = Cache::temp()?.init().await?;
|
let cache = Cache::temp()?.init()?;
|
||||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache).build();
|
let client = RegistryClientBuilder::new(cache).build();
|
||||||
|
|
||||||
// The first run is without cache (the tempdir is empty), the second has the cache from the
|
// The first run is without cache (the tempdir is empty), the second has the cache from the
|
||||||
// first run.
|
// first run.
|
||||||
|
|
@ -21,11 +21,11 @@ async fn remote_metadata_with_and_without_cache() -> Result<()> {
|
||||||
let filename = WheelFilename::from_str(url.rsplit_once('/').unwrap().1)?;
|
let filename = WheelFilename::from_str(url.rsplit_once('/').unwrap().1)?;
|
||||||
let dist = BuiltDist::DirectUrl(DirectUrlBuiltDist {
|
let dist = BuiltDist::DirectUrl(DirectUrlBuiltDist {
|
||||||
filename,
|
filename,
|
||||||
location: Box::new(DisplaySafeUrl::parse(url)?),
|
location: Box::new(DisplaySafeUrl::parse(url).unwrap()),
|
||||||
url: VerbatimUrl::from_str(url)?,
|
url: VerbatimUrl::from_str(url).unwrap(),
|
||||||
});
|
});
|
||||||
let capabilities = IndexCapabilities::default();
|
let capabilities = IndexCapabilities::default();
|
||||||
let metadata = client.wheel_metadata(&dist, &capabilities).await?;
|
let metadata = client.wheel_metadata(&dist, &capabilities).await.unwrap();
|
||||||
assert_eq!(metadata.version.to_string(), "4.66.1");
|
assert_eq!(metadata.version.to_string(), "4.66.1");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue