mirror of https://github.com/astral-sh/uv
Compare commits
No commits in common. "main" and "0.7.22" have entirely different histories.
|
|
@ -1,81 +0,0 @@
|
|||
# /// script
|
||||
# requires-python = ">=3.12"
|
||||
# dependencies = []
|
||||
# ///
|
||||
|
||||
"""Post-edit hook to auto-format files after Claude edits."""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_rust(file_path: str, cwd: str) -> None:
|
||||
"""Format Rust files with cargo fmt."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["cargo", "fmt", "--", file_path],
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def format_python(file_path: str, cwd: str) -> None:
|
||||
"""Format Python files with ruff."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["uvx", "ruff", "format", file_path],
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def format_prettier(file_path: str, cwd: str, prose_wrap: bool = False) -> None:
|
||||
"""Format files with prettier."""
|
||||
args = ["npx", "prettier", "--write"]
|
||||
if prose_wrap:
|
||||
args.extend(["--prose-wrap", "always"])
|
||||
args.append(file_path)
|
||||
try:
|
||||
subprocess.run(args, cwd=cwd, capture_output=True)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def main() -> None:
|
||||
import os
|
||||
|
||||
input_data = json.load(sys.stdin)
|
||||
|
||||
tool_name = input_data.get("tool_name")
|
||||
tool_input = input_data.get("tool_input", {})
|
||||
file_path = tool_input.get("file_path")
|
||||
|
||||
# Only process Write, Edit, and MultiEdit tools
|
||||
if tool_name not in ("Write", "Edit", "MultiEdit"):
|
||||
return
|
||||
|
||||
if not file_path:
|
||||
return
|
||||
|
||||
cwd = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
||||
path = Path(file_path)
|
||||
ext = path.suffix
|
||||
|
||||
if ext == ".rs":
|
||||
format_rust(file_path, cwd)
|
||||
elif ext in (".py", ".pyi"):
|
||||
format_python(file_path, cwd)
|
||||
elif ext in (".json5", ".yaml", ".yml"):
|
||||
format_prettier(file_path, cwd)
|
||||
elif ext == ".md":
|
||||
format_prettier(file_path, cwd, prose_wrap=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"hooks": {
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Edit|Write|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "uv run .claude/hooks/post-edit-format.py"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -2,14 +2,3 @@
|
|||
# Mark tests that take longer than 10s as slow.
|
||||
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 12 }
|
||||
|
||||
[test-groups]
|
||||
serial = { max-threads = 1 }
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'test(native_auth)'
|
||||
test-group = 'serial'
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'package(uv-keyring)'
|
||||
test-group = 'serial'
|
||||
|
|
|
|||
|
|
@ -3,19 +3,20 @@
|
|||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: [
|
||||
"github>astral-sh/renovate-config",
|
||||
"config:recommended",
|
||||
// For tool versions defined in GitHub Actions:
|
||||
"customManagers:githubActionsVersions",
|
||||
],
|
||||
labels: ["internal"],
|
||||
schedule: ["* 0-3 * * 1"],
|
||||
schedule: ["before 4am on Monday"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
prHourlyLimit: 10,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
managerFilePatterns: ["/^Cargo\\.toml$/", "/^crates/.*Cargo\\.toml$/"],
|
||||
fileMatch: ["^crates/.*Cargo\\.toml$"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
|
|
@ -85,61 +86,18 @@
|
|||
description: "Weekly update of pyo3 dependencies",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pubgrub",
|
||||
matchManagers: ["cargo"],
|
||||
matchDepNames: ["pubgrub", "version-ranges"],
|
||||
description: "version-ranges and pubgrub are in the same Git repository",
|
||||
},
|
||||
{
|
||||
commitMessageTopic: "MSRV",
|
||||
matchManagers: ["custom.regex"],
|
||||
matchDepNames: ["msrv"],
|
||||
// We have a rolling support policy for the MSRV
|
||||
// 2 releases back * 6 weeks per release * 7 days per week + 1
|
||||
minimumReleaseAge: "85 days",
|
||||
internalChecksFilter: "strict",
|
||||
groupName: "MSRV",
|
||||
},
|
||||
{
|
||||
matchManagers: ["custom.regex"],
|
||||
matchDepNames: ["rust"],
|
||||
commitMessageTopic: "Rust",
|
||||
},
|
||||
],
|
||||
customManagers: [
|
||||
// Update major GitHub actions references in documentation.
|
||||
{
|
||||
customType: "regex",
|
||||
managerFilePatterns: ["/^docs/.*\\.md$/"],
|
||||
fileMatch: ["^docs/.*\\.md$"],
|
||||
matchStrings: [
|
||||
"\\suses: (?<depName>[\\w-]+/[\\w-]+)(?<path>/.*)?@(?<currentValue>.+?)\\s",
|
||||
],
|
||||
datasourceTemplate: "github-tags",
|
||||
versioningTemplate: "regex:^v(?<major>\\d+)$",
|
||||
},
|
||||
// Minimum supported Rust toolchain version
|
||||
{
|
||||
customType: "regex",
|
||||
managerFilePatterns: ["/(^|/)Cargo\\.toml?$/"],
|
||||
matchStrings: [
|
||||
'rust-version\\s*=\\s*"(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)"',
|
||||
],
|
||||
depNameTemplate: "msrv",
|
||||
packageNameTemplate: "rust-lang/rust",
|
||||
datasourceTemplate: "github-releases",
|
||||
},
|
||||
// Rust toolchain version
|
||||
{
|
||||
customType: "regex",
|
||||
managerFilePatterns: ["/(^|/)rust-toolchain\\.toml?$/"],
|
||||
matchStrings: [
|
||||
'channel\\s*=\\s*"(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)"',
|
||||
],
|
||||
depNameTemplate: "rust",
|
||||
packageNameTemplate: "rust-lang/rust",
|
||||
datasourceTemplate: "github-releases",
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
|
|
|
|||
|
|
@ -40,17 +40,12 @@ env:
|
|||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sdist:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -59,18 +54,17 @@ jobs:
|
|||
- name: "Prep README.md"
|
||||
run: python scripts/transform_readme.py --target pypi
|
||||
- name: "Build sdist"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
command: sdist
|
||||
args: --out dist
|
||||
- name: "Test sdist"
|
||||
run: |
|
||||
# We can't use `--find-links` here, since we need maturin, which means no `--no-index`, and without that option
|
||||
# we run the risk that pip pull uv from PyPI instead.
|
||||
pip install dist/${PACKAGE_NAME}-*.tar.gz --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
python -m ${MODULE_NAME} --help
|
||||
pip install dist/${{ env.PACKAGE_NAME }}-*.tar.gz --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
- name: "Upload sdist"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
|
@ -80,16 +74,15 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build sdist uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
command: sdist
|
||||
args: --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test sdist uv-build"
|
||||
run: |
|
||||
pip install crates/uv-build/dist/${PACKAGE_NAME}_build-*.tar.gz --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
python -m ${MODULE_NAME}_build --help
|
||||
pip install crates/uv-build/dist/${{ env.PACKAGE_NAME }}_build-*.tar.gz --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- name: "Upload sdist uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -98,12 +91,9 @@ jobs:
|
|||
|
||||
macos-x86_64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-macos-14
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -113,9 +103,8 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels - x86_64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: x86_64
|
||||
args: --release --locked --out dist --features self-update
|
||||
- name: "Upload wheels"
|
||||
|
|
@ -144,9 +133,8 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build - x86_64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: x86_64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Upload wheels uv-build"
|
||||
|
|
@ -157,12 +145,9 @@ jobs:
|
|||
|
||||
macos-aarch64:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-macos-14
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -172,16 +157,15 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels - aarch64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: aarch64
|
||||
args: --release --locked --out dist --features self-update
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
python -m ${MODULE_NAME} --help
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
|
@ -209,16 +193,15 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build - aarch64"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: aarch64
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test wheel - aarch64"
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
python -m ${MODULE_NAME}_build --help
|
||||
pip install ${{ env.PACKAGE_NAME }}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -239,9 +222,6 @@ jobs:
|
|||
arch: x64 # not relevant here
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -251,18 +231,17 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --release --locked --out dist --features self-update,windows-gui-bin
|
||||
- name: "Test wheel"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
python -m ${MODULE_NAME} --help
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
uvw --help
|
||||
- name: "Upload wheels"
|
||||
|
|
@ -273,13 +252,11 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
ARCHIVE_FILE=uv-${PLATFORM_TARGET}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${PLATFORM_TARGET}/release/uv.exe
|
||||
7z a $ARCHIVE_FILE ./target/${PLATFORM_TARGET}/release/uvx.exe
|
||||
7z a $ARCHIVE_FILE ./target/${PLATFORM_TARGET}/release/uvw.exe
|
||||
ARCHIVE_FILE=uv-${{ matrix.platform.target }}.zip
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/uv.exe
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/uvx.exe
|
||||
7z a $ARCHIVE_FILE ./target/${{ matrix.platform.target }}/release/uvw.exe
|
||||
sha256sum $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
PLATFORM_TARGET: ${{ matrix.platform.target }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -290,18 +267,17 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test wheel uv-build"
|
||||
if: ${{ !startsWith(matrix.platform.target, 'aarch64') }}
|
||||
shell: bash
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
python -m ${MODULE_NAME}_build --help
|
||||
pip install ${{ env.PACKAGE_NAME }}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -318,9 +294,6 @@ jobs:
|
|||
- { target: "x86_64-unknown-linux-gnu", cc: "gcc" }
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -330,9 +303,8 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
# Generally, we try to build in a target docker container. In this case however, a
|
||||
# 32-bit compiler runs out of memory (4GB memory limit for 32-bit), so we cross compile
|
||||
|
|
@ -365,9 +337,9 @@ jobs:
|
|||
- name: "Test wheel"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
python -m ${MODULE_NAME} --help
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
|
@ -377,6 +349,7 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
|
|
@ -385,8 +358,6 @@ jobs:
|
|||
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -397,18 +368,17 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: auto
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test wheel uv-build"
|
||||
if: ${{ startsWith(matrix.target, 'x86_64') }}
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
python -m ${MODULE_NAME}_build --help
|
||||
pip install ${{ env.PACKAGE_NAME }}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -417,7 +387,7 @@ jobs:
|
|||
|
||||
linux-arm:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-ubuntu-22.04-8
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
matrix:
|
||||
|
|
@ -434,9 +404,6 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -445,9 +412,8 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||
|
|
@ -463,14 +429,11 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME} --help
|
||||
# python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -479,6 +442,7 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
|
|
@ -487,8 +451,6 @@ jobs:
|
|||
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
TARGET: ${{ matrix.platform.target }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -499,9 +461,8 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
# On `aarch64`, use `manylinux: 2_28`; otherwise, use `manylinux: auto`.
|
||||
manylinux: ${{ matrix.platform.arch == 'aarch64' && '2_28' || 'auto' }}
|
||||
|
|
@ -517,13 +478,10 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
pip install ${{ env.PACKAGE_NAME }}_build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME}_build --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
# python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -543,9 +501,6 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -554,14 +509,15 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist --features self-update
|
||||
rust-toolchain: ${{ matrix.platform.toolchain || null }}
|
||||
# Until the llvm updates hit stable
|
||||
# https://github.com/rust-lang/rust/issues/141287
|
||||
rust-toolchain: nightly-2025-05-25
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
if: matrix.platform.arch != 'ppc64'
|
||||
name: "Test wheel"
|
||||
|
|
@ -573,14 +529,11 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME} --help
|
||||
# python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -589,6 +542,7 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
|
|
@ -597,8 +551,6 @@ jobs:
|
|||
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
TARGET: ${{ matrix.platform.target }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -609,9 +561,8 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -627,13 +578,10 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
pip install ${{ env.PACKAGE_NAME }}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME}-build --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
# python -m ${{ env.MODULE_NAME }}-build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -658,9 +606,6 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -669,9 +614,8 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -695,10 +639,10 @@ jobs:
|
|||
# apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
# pip3 install -U pip
|
||||
# run: |
|
||||
# pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
# ${MODULE_NAME} --help
|
||||
# pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
# ${{ env.MODULE_NAME }} --help
|
||||
# #(konsti) TODO: Enable this test on all platforms,currently `find_uv_bin` is failingto discover uv here.
|
||||
# # python -m ${MODULE_NAME} --helppython -m ${MODULE_NAME} --help
|
||||
# # python -m ${{ env.MODULE_NAME }} --helppython -m ${{ env.MODULE_NAME }} --help
|
||||
# uvx --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
|
@ -708,6 +652,7 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
|
|
@ -716,8 +661,6 @@ jobs:
|
|||
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
TARGET: ${{ matrix.platform.target }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -728,9 +671,8 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
|
|
@ -762,9 +704,6 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -773,14 +712,13 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --release --locked --out dist --features self-update
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
name: "Test wheel"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
@ -791,14 +729,11 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME} --help
|
||||
# python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -807,6 +742,7 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
|
|
@ -815,8 +751,6 @@ jobs:
|
|||
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
TARGET: ${{ matrix.platform.target }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -827,14 +761,13 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: auto
|
||||
docker-options: ${{ matrix.platform.maturin_docker_options }}
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
- uses: uraimo/run-on-arch-action@ac33288c3728ca72563c97b8b88dda5a65a84448 # v2
|
||||
name: "Test wheel uv-build"
|
||||
with:
|
||||
arch: ${{ matrix.platform.arch }}
|
||||
|
|
@ -845,13 +778,10 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
pip install ${{ env.PACKAGE_NAME }}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME}-build --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
# python -m ${{ env.MODULE_NAME }}-build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -868,9 +798,6 @@ jobs:
|
|||
- i686-unknown-linux-musl
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -880,26 +807,25 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --release --locked --out dist --features self-update
|
||||
- name: "Test wheel"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:3.12
|
||||
options: -v ${{ github.workspace }}:/io -w /io --env MODULE_NAME --env PACKAGE_NAME
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install --upgrade pip
|
||||
.venv/bin/pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${MODULE_NAME} --help
|
||||
.venv/bin/pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# .venv/bin/python -m ${MODULE_NAME} --help
|
||||
# .venv/bin/python -m ${{ env.MODULE_NAME }} --help
|
||||
.venv/bin/uvx --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
|
|
@ -909,6 +835,7 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
|
||||
|
|
@ -917,8 +844,6 @@ jobs:
|
|||
cp target/$TARGET/release/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
TARGET: ${{ matrix.target }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -929,25 +854,24 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels uv-build"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --profile minimal-size --locked --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
- name: "Test wheel uv-build"
|
||||
if: matrix.target == 'x86_64-unknown-linux-musl'
|
||||
uses: addnab/docker-run-action@4f65fabd2431ebc8d299f8e5a018d79a769ae185 # v3
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: alpine:3.12
|
||||
options: -v ${{ github.workspace }}:/io -w /io --env MODULE_NAME --env PACKAGE_NAME
|
||||
options: -v ${{ github.workspace }}:/io -w /io
|
||||
run: |
|
||||
apk add python3
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install --upgrade pip
|
||||
.venv/bin/pip install ${PACKAGE_NAME}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
.venv/bin/${MODULE_NAME}-build --help
|
||||
.venv/bin/python -m ${MODULE_NAME}_build --help
|
||||
.venv/bin/pip install ${{ env.PACKAGE_NAME }}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }}-build --help
|
||||
.venv/bin/python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- name: "Upload wheels uv-build"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -956,7 +880,7 @@ jobs:
|
|||
|
||||
musllinux-cross:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
runs-on: depot-ubuntu-22.04-8
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
|
|
@ -969,9 +893,6 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
|
|
@ -980,9 +901,8 @@ jobs:
|
|||
|
||||
# uv
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --release --locked --out dist --features self-update ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}}
|
||||
|
|
@ -997,14 +917,11 @@ jobs:
|
|||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${MODULE_NAME} --help
|
||||
.venv/bin/pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }} --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# .venv/bin/python -m ${MODULE_NAME} --help
|
||||
# .venv/bin/python -m ${{ env.MODULE_NAME }} --help
|
||||
.venv/bin/uvx --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel (manylinux)"
|
||||
if: matrix.platform.arch == 'aarch64'
|
||||
|
|
@ -1016,14 +933,11 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME} --no-index --find-links dist/ --force-reinstall
|
||||
${MODULE_NAME} --help
|
||||
pip install ${{ env.PACKAGE_NAME }} --no-index --find-links dist/ --force-reinstall
|
||||
${{ env.MODULE_NAME }} --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME} --help
|
||||
# python -m ${{ env.MODULE_NAME }} --help
|
||||
uvx --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -1032,17 +946,16 @@ jobs:
|
|||
- name: "Archive binary"
|
||||
shell: bash
|
||||
run: |
|
||||
TARGET=${{ matrix.platform.target }}
|
||||
ARCHIVE_NAME=uv-$TARGET
|
||||
ARCHIVE_FILE=$ARCHIVE_NAME.tar.gz
|
||||
PROFILE="${{ matrix.platform.arch == 'ppc64le' && 'release-no-lto' || 'release' }}"
|
||||
|
||||
mkdir -p $ARCHIVE_NAME
|
||||
cp target/$TARGET/$PROFILE/uv $ARCHIVE_NAME/uv
|
||||
cp target/$TARGET/$PROFILE/uvx $ARCHIVE_NAME/uvx
|
||||
tar czvf $ARCHIVE_FILE $ARCHIVE_NAME
|
||||
shasum -a 256 $ARCHIVE_FILE > $ARCHIVE_FILE.sha256
|
||||
env:
|
||||
TARGET: ${{ matrix.platform.target }}
|
||||
PROFILE: ${{ matrix.platform.arch == 'ppc64le' && 'release-no-lto' || 'release' }}
|
||||
- name: "Upload binary"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
@ -1053,9 +966,8 @@ jobs:
|
|||
|
||||
# uv-build
|
||||
- name: "Build wheels"
|
||||
uses: PyO3/maturin-action@86b9d133d34bc1b40018696f782949dac11bd380 # v1.49.4
|
||||
uses: PyO3/maturin-action@e10f6c464b90acceb5f640d31beda6d586ba7b4a # v1.49.3
|
||||
with:
|
||||
maturin-version: v1.9.6
|
||||
target: ${{ matrix.platform.target }}
|
||||
manylinux: musllinux_1_1
|
||||
args: --profile minimal-size --locked ${{ matrix.platform.arch == 'aarch64' && '--compatibility 2_17' || ''}} --out crates/uv-build/dist -m crates/uv-build/Cargo.toml
|
||||
|
|
@ -1070,13 +982,10 @@ jobs:
|
|||
apk add python3
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip install ${PACKAGE_NAME}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
.venv/bin/${MODULE_NAME}-build --help
|
||||
.venv/bin/pip install ${{ env.PACKAGE_NAME }}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
.venv/bin/${{ env.MODULE_NAME }}-build --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# .venv/bin/python -m ${MODULE_NAME}_build --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
# .venv/bin/python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- uses: uraimo/run-on-arch-action@d94c13912ea685de38fccc1109385b83fd79427d # v3.0.1
|
||||
name: "Test wheel (manylinux)"
|
||||
if: matrix.platform.arch == 'aarch64'
|
||||
|
|
@ -1088,13 +997,10 @@ jobs:
|
|||
apt-get install -y --no-install-recommends python3 python3-pip python-is-python3
|
||||
pip3 install -U pip
|
||||
run: |
|
||||
pip install ${PACKAGE_NAME}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${MODULE_NAME}-build --help
|
||||
pip install ${{ env.PACKAGE_NAME }}-build --no-index --find-links crates/uv-build/dist --force-reinstall
|
||||
${{ env.MODULE_NAME }}-build --help
|
||||
# TODO(konsti): Enable this test on all platforms, currently `find_uv_bin` is failing to discover uv here.
|
||||
# python -m ${MODULE_NAME}_build --help
|
||||
env: |
|
||||
PACKAGE_NAME: ${{ env.PACKAGE_NAME }}
|
||||
MODULE_NAME: ${{ env.MODULE_NAME }}
|
||||
# python -m ${{ env.MODULE_NAME }}_build --help
|
||||
- name: "Upload wheels"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
|
|||
|
|
@ -40,8 +40,6 @@ env:
|
|||
UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv
|
||||
UV_DOCKERHUB_IMAGE: docker.io/astral/uv
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
docker-plan:
|
||||
name: plan
|
||||
|
|
@ -59,13 +57,13 @@ jobs:
|
|||
IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
||||
id: plan
|
||||
run: |
|
||||
if [ "${DRY_RUN}" == "false" ]; then
|
||||
if [ "${{ env.DRY_RUN }}" == "false" ]; then
|
||||
echo "login=true" >> "$GITHUB_OUTPUT"
|
||||
echo "push=true" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=${{ env.TAG }}" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build and publish" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "login=${IS_LOCAL_PR}" >> "$GITHUB_OUTPUT"
|
||||
echo "login=${{ env.IS_LOCAL_PR }}" >> "$GITHUB_OUTPUT"
|
||||
echo "push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=dry-run" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build" >> "$GITHUB_OUTPUT"
|
||||
|
|
@ -93,16 +91,15 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
|
@ -114,20 +111,18 @@ jobs:
|
|||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
if [ "${{ needs.docker-plan.outputs.tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${{ needs.docker-plan.outputs.tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
env:
|
||||
TAG: ${{ needs.docker-plan.outputs.tag }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
|
|
@ -142,7 +137,7 @@ jobs:
|
|||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # v1.16.2
|
||||
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||
with:
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
context: .
|
||||
|
|
@ -178,39 +173,24 @@ jobs:
|
|||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.22,alpine3.22,alpine
|
||||
- alpine:3.21,alpine3.21
|
||||
- debian:trixie-slim,trixie-slim,debian-slim
|
||||
- buildpack-deps:trixie,trixie,debian
|
||||
- debian:bookworm-slim,bookworm-slim
|
||||
- buildpack-deps:bookworm,bookworm
|
||||
- python:3.14-alpine3.23,python3.14-alpine3.23,python3.14-alpine
|
||||
- python:3.13-alpine3.23,python3.13-alpine3.23,python3.13-alpine
|
||||
- python:3.12-alpine3.23,python3.12-alpine3.23,python3.12-alpine
|
||||
- python:3.11-alpine3.23,python3.11-alpine3.23,python3.11-alpine
|
||||
- python:3.10-alpine3.23,python3.10-alpine3.23,python3.10-alpine
|
||||
- python:3.9-alpine3.22,python3.9-alpine3.22,python3.9-alpine
|
||||
- python:3.8-alpine3.20,python3.8-alpine3.20,python3.8-alpine
|
||||
- python:3.14-trixie,python3.14-trixie
|
||||
- python:3.13-trixie,python3.13-trixie
|
||||
- python:3.12-trixie,python3.12-trixie
|
||||
- python:3.11-trixie,python3.11-trixie
|
||||
- python:3.10-trixie,python3.10-trixie
|
||||
- python:3.9-trixie,python3.9-trixie
|
||||
- python:3.14-slim-trixie,python3.14-trixie-slim
|
||||
- python:3.13-slim-trixie,python3.13-trixie-slim
|
||||
- python:3.12-slim-trixie,python3.12-trixie-slim
|
||||
- python:3.11-slim-trixie,python3.11-trixie-slim
|
||||
- python:3.10-slim-trixie,python3.10-trixie-slim
|
||||
- python:3.9-slim-trixie,python3.9-trixie-slim
|
||||
- python:3.14-bookworm,python3.14-bookworm
|
||||
- alpine:3.21,alpine3.21,alpine
|
||||
- debian:bookworm-slim,bookworm-slim,debian-slim
|
||||
- buildpack-deps:bookworm,bookworm,debian
|
||||
- python:3.14-rc-alpine,python3.14-rc-alpine
|
||||
- python:3.13-alpine,python3.13-alpine
|
||||
- python:3.12-alpine,python3.12-alpine
|
||||
- python:3.11-alpine,python3.11-alpine
|
||||
- python:3.10-alpine,python3.10-alpine
|
||||
- python:3.9-alpine,python3.9-alpine
|
||||
- python:3.8-alpine,python3.8-alpine
|
||||
- python:3.14-rc-bookworm,python3.14-rc-bookworm
|
||||
- python:3.13-bookworm,python3.13-bookworm
|
||||
- python:3.12-bookworm,python3.12-bookworm
|
||||
- python:3.11-bookworm,python3.11-bookworm
|
||||
- python:3.10-bookworm,python3.10-bookworm
|
||||
- python:3.9-bookworm,python3.9-bookworm
|
||||
- python:3.8-bookworm,python3.8-bookworm
|
||||
- python:3.14-slim-bookworm,python3.14-bookworm-slim
|
||||
- python:3.14-rc-slim-bookworm,python3.14-rc-bookworm-slim
|
||||
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
||||
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
||||
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
||||
|
|
@ -219,13 +199,13 @@ jobs:
|
|||
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
||||
steps:
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
|
@ -244,8 +224,7 @@ jobs:
|
|||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${UV_GHCR_IMAGE}:latest /uv /uvx /usr/local/bin/
|
||||
ENV UV_TOOL_BIN_DIR="/usr/local/bin"
|
||||
COPY --from=${{ env.UV_GHCR_IMAGE }}:latest /uv /uvx /usr/local/bin/
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/uv"]
|
||||
EOF
|
||||
|
|
@ -256,8 +235,8 @@ jobs:
|
|||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${VERSION}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${VERSION}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${{ needs.docker-plan.outputs.tag }}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
|
|
@ -270,12 +249,10 @@ jobs:
|
|||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> $GITHUB_ENV
|
||||
env:
|
||||
VERSION: ${{ needs.docker-plan.outputs.tag }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804 # v5.7.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
|
|
@ -290,7 +267,7 @@ jobs:
|
|||
|
||||
- name: Build and push
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # v1.16.2
|
||||
uses: depot/build-push-action@2583627a84956d07561420dcc1d0eb1f2af3fac0 # v1.15.0
|
||||
with:
|
||||
context: .
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
|
|
@ -356,11 +333,6 @@ jobs:
|
|||
docker-annotate-base:
|
||||
name: annotate uv
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # for GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
environment:
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
needs:
|
||||
|
|
@ -369,12 +341,12 @@ jobs:
|
|||
- docker-publish-extra
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
steps:
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: astral
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,33 +0,0 @@
|
|||
# Publish a release to crates.io.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "Publish to crates.io"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
crates-publish-uv:
|
||||
name: Upload uv to crates.io
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# TODO(zanieb): Switch to trusted publishing once published
|
||||
# - uses: rust-lang/crates-io-auth-action@v1
|
||||
# id: auth
|
||||
- name: Publish workspace crates
|
||||
# Note `--no-verify` is safe because we do a publish dry-run elsewhere in CI
|
||||
run: cargo publish --workspace --no-verify
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_TOKEN }}
|
||||
|
|
@ -17,36 +17,24 @@ on:
|
|||
required: true
|
||||
type: string
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
VERSION: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Generate reference documentation"
|
||||
- name: "Set docs version"
|
||||
run: |
|
||||
cargo dev generate-options-reference
|
||||
cargo dev generate-cli-reference
|
||||
cargo dev generate-env-vars-reference
|
||||
|
||||
- name: "Set docs display name"
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
version="${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}"
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
|
|
@ -56,20 +44,21 @@ jobs:
|
|||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "DISPLAY_NAME=$display_name" >> $GITHUB_ENV
|
||||
echo "version=$version" >> $GITHUB_ENV
|
||||
echo "display_name=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
display_name="${DISPLAY_NAME}"
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "BRANCH_NAME=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "TIMESTAMP=$timestamp" >> $GITHUB_ENV
|
||||
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "timestamp=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
|
@ -95,10 +84,8 @@ jobs:
|
|||
|
||||
- name: "Clone docs repo"
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
git clone https://${ASTRAL_DOCS_PAT}@github.com/astral-sh/docs.git astral-docs
|
||||
env:
|
||||
ASTRAL_DOCS_PAT: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
version="${{ env.version }}"
|
||||
git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/uv && mkdir -p astral-docs/site && cp -r site/uv astral-docs/site/
|
||||
|
|
@ -106,7 +93,7 @@ jobs:
|
|||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${BRANCH_NAME}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
|
@ -120,9 +107,9 @@ jobs:
|
|||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
display_name="${DISPLAY_NAME}"
|
||||
branch_name="${BRANCH_NAME}"
|
||||
version="${{ env.version }}"
|
||||
display_name="${{ env.display_name }}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update uv documentation for $display_name"
|
||||
|
|
@ -148,7 +135,7 @@ jobs:
|
|||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${BRANCH_NAME}"
|
||||
branch_name="${{ env.branch_name }}"
|
||||
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
|
|
|
|||
|
|
@ -18,15 +18,18 @@ jobs:
|
|||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write # For PyPI's trusted publishing
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv-*
|
||||
path: wheels_uv
|
||||
merge-multiple: true
|
||||
- name: Remove wheels unsupported by PyPI
|
||||
run: rm wheels_uv/*riscv*
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv/*
|
||||
|
||||
|
|
@ -36,14 +39,17 @@ jobs:
|
|||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write # For PyPI's trusted publishing
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
with:
|
||||
pattern: wheels_uv_build-*
|
||||
path: wheels_uv_build
|
||||
merge-multiple: true
|
||||
- name: Remove wheels unsupported by PyPI
|
||||
run: rm wheels_uv_build/*riscv*
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv_build/*
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
# This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist
|
||||
# This file was autogenerated by dist: https://github.com/astral-sh/cargo-dist
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# Copyright 2025 Astral Software Inc.
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
|
|
@ -68,7 +69,7 @@ jobs:
|
|||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7-prerelease.1/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
|
|
@ -168,8 +169,8 @@ jobs:
|
|||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
|
|
@ -222,36 +223,17 @@ jobs:
|
|||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
custom-publish-crates:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi # DIRTY: see #16989
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-crates.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-crates
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-crates.result == 'skipped' || needs.custom-publish-crates.result == 'success') }}
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
permissions:
|
||||
"attestations": "write"
|
||||
"contents": "write"
|
||||
"id-token": "write"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
|
|
@ -270,15 +252,6 @@ jobs:
|
|||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2
|
||||
with:
|
||||
subject-path: |
|
||||
artifacts/*.json
|
||||
artifacts/*.sh
|
||||
artifacts/*.ps1
|
||||
artifacts/*.zip
|
||||
artifacts/*.tar.gz
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ if ($env:DEPOT_RUNNER -eq "1") {
|
|||
# Create VHD and configure drive using diskpart
|
||||
$vhdPath = "C:\uv_dev_drive.vhdx"
|
||||
@"
|
||||
create vdisk file="$vhdPath" maximum=25600 type=expandable
|
||||
create vdisk file="$vhdPath" maximum=20480 type=expandable
|
||||
attach vdisk
|
||||
create partition primary
|
||||
active
|
||||
|
|
@ -41,9 +41,9 @@ assign letter=V
|
|||
Write-Output "Using existing drive at D:"
|
||||
$Drive = "D:"
|
||||
} else {
|
||||
# The size (25 GB) is chosen empirically to be large enough for our
|
||||
# The size (20 GB) is chosen empirically to be large enough for our
|
||||
# workflows; larger drives can take longer to set up.
|
||||
$Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 25GB |
|
||||
$Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 20GB |
|
||||
Mount-VHD -Passthru |
|
||||
Initialize-Disk -Passthru |
|
||||
New-Partition -AssignDriveLetter -UseMaximumSize |
|
||||
|
|
|
|||
|
|
@ -7,20 +7,17 @@ on:
|
|||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
permissions: {}
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
if: github.repository == 'astral-sh/uv'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: astral-sh/setup-uv@bd01e18f51369d5a26f1651c3cb451d3417e3bba # v6.3.1
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
|
|
@ -32,7 +29,7 @@ jobs:
|
|||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Sync Sysconfig Targets
|
||||
run: ${GITHUB_WORKSPACE}/crates/uv-dev/sync_sysconfig_targets.sh
|
||||
run: ${{ github.workspace }}/crates/uv-dev/sync_sysconfig_targets.sh
|
||||
working-directory: ./crates/uv-dev
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
@ -49,4 +46,3 @@ jobs:
|
|||
title: "Sync latest Python releases"
|
||||
body: "Automated update for Python releases."
|
||||
base: "main"
|
||||
draft: true
|
||||
|
|
|
|||
|
|
@ -1,24 +0,0 @@
|
|||
name: zizmor
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["**"]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
zizmor:
|
||||
name: Run zizmor
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor
|
||||
uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2
|
||||
|
|
@ -37,11 +37,6 @@ profile.json.gz
|
|||
# MkDocs
|
||||
/site
|
||||
|
||||
# Generated reference docs (use `cargo dev generate-all` to regenerate)
|
||||
/docs/reference/cli.md
|
||||
/docs/reference/environment.md
|
||||
/docs/reference/settings.md
|
||||
|
||||
# macOS
|
||||
**/.DS_Store
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ repos:
|
|||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.37.2
|
||||
rev: v1.34.0
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
|
@ -42,7 +42,7 @@ repos:
|
|||
types_or: [yaml, json5]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.3
|
||||
rev: v0.12.3
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
|
|
|
|||
|
|
@ -4,5 +4,5 @@ PREVIEW-CHANGELOG.md
|
|||
docs/reference/cli.md
|
||||
docs/reference/settings.md
|
||||
docs/reference/environment.md
|
||||
test/ecosystem/home-assistant-core/LICENSE.md
|
||||
ecosystem/home-assistant-core/LICENSE.md
|
||||
docs/guides/integration/gitlab.md
|
||||
|
|
|
|||
|
|
@ -1,14 +1,3 @@
|
|||
# These are versions of Python required for running uv's own test suite. You can add or remove
|
||||
# versions here as needed for tests; this doesn't impact uv's own functionality. They can be
|
||||
# installed through any means you like, e.g. `uv python install` if you already have a build of uv,
|
||||
# `cargo run python install`, or through some other installer.
|
||||
#
|
||||
# In uv's CI in GitHub Actions, they are bootstrapped by an existing released version of uv,
|
||||
# installed by the astral-sh/setup-uv action If you need a newer or different version, you will
|
||||
# first need to complete a uv release capable of installing that version, get it picked up by
|
||||
# astral-sh/setup-uv, and update its hash in .github/workflows.
|
||||
|
||||
3.14.0
|
||||
3.13.2
|
||||
3.12.9
|
||||
3.11.11
|
||||
|
|
@ -17,8 +6,7 @@
|
|||
3.8.20
|
||||
# The following are required for packse scenarios
|
||||
3.9.20
|
||||
3.9.18
|
||||
3.9.12
|
||||
# The following is needed for `==3.13` request tests
|
||||
3.13.0
|
||||
# A pre-release version required for testing
|
||||
3.14.0rc2
|
||||
|
|
|
|||
1096
CHANGELOG.md
1096
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
|
|
@ -1,125 +0,0 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
- [Our Pledge](#our-pledge)
|
||||
- [Our Standards](#our-standards)
|
||||
- [Enforcement Responsibilities](#enforcement-responsibilities)
|
||||
- [Scope](#scope)
|
||||
- [Enforcement](#enforcement)
|
||||
- [Enforcement Guidelines](#enforcement-guidelines)
|
||||
- [1. Correction](#1-correction)
|
||||
- [2. Warning](#2-warning)
|
||||
- [3. Temporary Ban](#3-temporary-ban)
|
||||
- [4. Permanent Ban](#4-permanent-ban)
|
||||
- [Attribution](#attribution)
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our community a
|
||||
harassment-free experience for everyone, regardless of age, body size, visible or invisible
|
||||
disability, ethnicity, sex characteristics, gender identity and expression, level of experience,
|
||||
education, socio-economic status, nationality, personal appearance, race, religion, or sexual
|
||||
identity and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and
|
||||
healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the
|
||||
experience
|
||||
- Focusing on what is best not just for us as individuals, but for the overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email address, without their
|
||||
explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior
|
||||
and will take appropriate and fair corrective action in response to any behavior that they deem
|
||||
inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject comments, commits,
|
||||
code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and
|
||||
will communicate reasons for moderation decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when an individual is
|
||||
officially representing the community in public spaces. Examples of representing our community
|
||||
include using an official e-mail address, posting via an official social media account, or acting as
|
||||
an appointed representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community
|
||||
leaders responsible for enforcement at <hey@astral.sh>. All complaints will be reviewed and
|
||||
investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the reporter of any
|
||||
incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining the consequences for
|
||||
any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or
|
||||
unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing clarity around the
|
||||
nature of the violation and an explanation of why the behavior was inappropriate. A public apology
|
||||
may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No interaction with the people
|
||||
involved, including unsolicited interaction with those enforcing the Code of Conduct, for a
|
||||
specified period of time. This includes avoiding interactions in community spaces as well as
|
||||
external channels like social media. Violating these terms may lead to a temporary or permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including sustained inappropriate
|
||||
behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public communication with the
|
||||
community for a specified period of time. No public or private interaction with the people involved,
|
||||
including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this
|
||||
period. Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community standards, including
|
||||
sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement
|
||||
of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available
|
||||
[here](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
||||
|
||||
Community Impact Guidelines were inspired by
|
||||
[Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
For answers to common questions about this code of conduct, see the
|
||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available
|
||||
[here](https://www.contributor-covenant.org/translations).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
|
@ -1,34 +1,10 @@
|
|||
# Contributing
|
||||
|
||||
## Finding ways to help
|
||||
|
||||
We label issues that would be good for a first time contributor as
|
||||
[`good first issue`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
|
||||
These usually do not require significant experience with Rust or the uv code base.
|
||||
|
||||
We label issues that we think are a good opportunity for subsequent contributions as
|
||||
[`help wanted`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22).
|
||||
These require varying levels of experience with Rust and uv. Often, we want to accomplish these
|
||||
tasks but do not have the resources to do so ourselves.
|
||||
|
||||
You don't need our permission to start on an issue we have labeled as appropriate for community
|
||||
contribution as described above. However, it's a good idea to indicate that you are going to work on
|
||||
an issue to avoid concurrent attempts to solve the same problem.
|
||||
|
||||
Please check in with us before starting work on an issue that has not been labeled as appropriate
|
||||
for community contribution. We're happy to receive contributions for other issues, but it's
|
||||
important to make sure we have consensus on the solution to the problem first.
|
||||
|
||||
Outside of issues with the labels above, issues labeled as
|
||||
[`bug`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22bug%22) are the
|
||||
best candidates for contribution. In contrast, issues labeled with `needs-decision` or
|
||||
`needs-design` are _not_ good candidates for contribution. Please do not open pull requests for
|
||||
issues with these labels.
|
||||
|
||||
Please do not open pull requests for new features without prior discussion. While we appreciate
|
||||
exploration of new features, we will almost always close these pull requests immediately. Adding a
|
||||
new feature to uv creates a long-term maintenance burden and requires strong consensus from the uv
|
||||
team before it is appropriate to begin work on an implementation.
|
||||
We have issues labeled as
|
||||
[Good First Issue](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22)
|
||||
and
|
||||
[Help Wanted](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22)
|
||||
which are good opportunities for new contributors.
|
||||
|
||||
## Setup
|
||||
|
||||
|
|
@ -40,12 +16,6 @@ On Ubuntu and other Debian-based distributions, you can install a C compiler wit
|
|||
sudo apt install build-essential
|
||||
```
|
||||
|
||||
On Fedora-based distributions, you can install a C compiler with:
|
||||
|
||||
```shell
|
||||
sudo dnf install gcc
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
For running tests, we recommend [nextest](https://nexte.st/).
|
||||
|
|
@ -86,13 +56,6 @@ cargo test --package <package> --test <test> -- <test_name> -- --exact
|
|||
cargo insta review
|
||||
```
|
||||
|
||||
### Git and Git LFS
|
||||
|
||||
A subset of uv tests require both [Git](https://git-scm.com) and [Git LFS](https://git-lfs.com/) to
|
||||
execute properly.
|
||||
|
||||
These tests can be disabled by turning off either `git` or `git-lfs` uv features.
|
||||
|
||||
### Local testing
|
||||
|
||||
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
||||
|
|
@ -102,15 +65,6 @@ cargo run -- venv
|
|||
cargo run -- pip install requests
|
||||
```
|
||||
|
||||
## Crate structure
|
||||
|
||||
Rust does not allow circular dependencies between crates. To visualize the crate hierarchy, install
|
||||
[cargo-depgraph](https://github.com/jplatte/cargo-depgraph) and graphviz, then run:
|
||||
|
||||
```shell
|
||||
cargo depgraph --dedup-transitive-deps --workspace-only | dot -Tpng > graph.png
|
||||
```
|
||||
|
||||
## Running inside a Docker container
|
||||
|
||||
Source distributions can run arbitrary code on build and can make unwanted modifications to your
|
||||
|
|
@ -136,7 +90,7 @@ Please refer to Ruff's
|
|||
it applies to uv, too.
|
||||
|
||||
We provide diverse sets of requirements for testing and benchmarking the resolver in
|
||||
`test/requirements` and for the installer in `test/requirements/compiled`.
|
||||
`scripts/requirements` and for the installer in `scripts/requirements/compiled`.
|
||||
|
||||
You can use `scripts/benchmark` to benchmark predefined workloads between uv versions and with other
|
||||
tools, e.g., from the `scripts/benchmark` directory:
|
||||
|
|
@ -147,7 +101,7 @@ uv run resolver \
|
|||
--poetry \
|
||||
--benchmark \
|
||||
resolve-cold \
|
||||
../test/requirements/trio.in
|
||||
../scripts/requirements/trio.in
|
||||
```
|
||||
|
||||
### Analyzing concurrency
|
||||
|
|
@ -157,7 +111,7 @@ visualize parallel requests and find any spots where uv is CPU-bound. Example us
|
|||
`uv-dev` respectively:
|
||||
|
||||
```shell
|
||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile test/requirements/jupyter.in
|
||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile scripts/requirements/jupyter.in
|
||||
```
|
||||
|
||||
```shell
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
207
Cargo.toml
207
Cargo.toml
|
|
@ -4,88 +4,84 @@ exclude = [
|
|||
"scripts",
|
||||
# Needs nightly
|
||||
"crates/uv-trampoline",
|
||||
# Only used to pull in features, allocators, etc. — we specifically don't want them
|
||||
# to be part of a workspace-wide cargo check, cargo clippy, etc.
|
||||
"crates/uv-performance-memory-allocator",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2024"
|
||||
rust-version = "1.89"
|
||||
rust-version = "1.86"
|
||||
homepage = "https://pypi.org/project/uv/"
|
||||
documentation = "https://pypi.org/project/uv/"
|
||||
repository = "https://github.com/astral-sh/uv"
|
||||
authors = ["uv"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
uv-auth = { version = "0.0.8", path = "crates/uv-auth" }
|
||||
uv-bin-install = { version = "0.0.8", path = "crates/uv-bin-install" }
|
||||
uv-build-backend = { version = "0.0.8", path = "crates/uv-build-backend" }
|
||||
uv-build-frontend = { version = "0.0.8", path = "crates/uv-build-frontend" }
|
||||
uv-cache = { version = "0.0.8", path = "crates/uv-cache" }
|
||||
uv-cache-info = { version = "0.0.8", path = "crates/uv-cache-info" }
|
||||
uv-cache-key = { version = "0.0.8", path = "crates/uv-cache-key" }
|
||||
uv-cli = { version = "0.0.8", path = "crates/uv-cli" }
|
||||
uv-client = { version = "0.0.8", path = "crates/uv-client" }
|
||||
uv-configuration = { version = "0.0.8", path = "crates/uv-configuration" }
|
||||
uv-console = { version = "0.0.8", path = "crates/uv-console" }
|
||||
uv-dirs = { version = "0.0.8", path = "crates/uv-dirs" }
|
||||
uv-dispatch = { version = "0.0.8", path = "crates/uv-dispatch" }
|
||||
uv-distribution = { version = "0.0.8", path = "crates/uv-distribution" }
|
||||
uv-distribution-filename = { version = "0.0.8", path = "crates/uv-distribution-filename" }
|
||||
uv-distribution-types = { version = "0.0.8", path = "crates/uv-distribution-types" }
|
||||
uv-extract = { version = "0.0.8", path = "crates/uv-extract" }
|
||||
uv-flags = { version = "0.0.8", path = "crates/uv-flags" }
|
||||
uv-fs = { version = "0.0.8", path = "crates/uv-fs", features = ["serde", "tokio"] }
|
||||
uv-git = { version = "0.0.8", path = "crates/uv-git" }
|
||||
uv-git-types = { version = "0.0.8", path = "crates/uv-git-types" }
|
||||
uv-globfilter = { version = "0.0.8", path = "crates/uv-globfilter" }
|
||||
uv-install-wheel = { version = "0.0.8", path = "crates/uv-install-wheel", default-features = false }
|
||||
uv-installer = { version = "0.0.8", path = "crates/uv-installer" }
|
||||
uv-keyring = { version = "0.0.8", path = "crates/uv-keyring" }
|
||||
uv-logging = { version = "0.0.8", path = "crates/uv-logging" }
|
||||
uv-macros = { version = "0.0.8", path = "crates/uv-macros" }
|
||||
uv-metadata = { version = "0.0.8", path = "crates/uv-metadata" }
|
||||
uv-normalize = { version = "0.0.8", path = "crates/uv-normalize" }
|
||||
uv-once-map = { version = "0.0.8", path = "crates/uv-once-map" }
|
||||
uv-options-metadata = { version = "0.0.8", path = "crates/uv-options-metadata" }
|
||||
uv-performance-memory-allocator = { version = "0.0.8", path = "crates/uv-performance-memory-allocator" }
|
||||
uv-pep440 = { version = "0.0.8", path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
||||
uv-pep508 = { version = "0.0.8", path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||
uv-platform = { version = "0.0.8", path = "crates/uv-platform" }
|
||||
uv-platform-tags = { version = "0.0.8", path = "crates/uv-platform-tags" }
|
||||
uv-preview = { version = "0.0.8", path = "crates/uv-preview" }
|
||||
uv-publish = { version = "0.0.8", path = "crates/uv-publish" }
|
||||
uv-pypi-types = { version = "0.0.8", path = "crates/uv-pypi-types" }
|
||||
uv-python = { version = "0.0.8", path = "crates/uv-python" }
|
||||
uv-redacted = { version = "0.0.8", path = "crates/uv-redacted" }
|
||||
uv-requirements = { version = "0.0.8", path = "crates/uv-requirements" }
|
||||
uv-requirements-txt = { version = "0.0.8", path = "crates/uv-requirements-txt" }
|
||||
uv-resolver = { version = "0.0.8", path = "crates/uv-resolver" }
|
||||
uv-scripts = { version = "0.0.8", path = "crates/uv-scripts" }
|
||||
uv-settings = { version = "0.0.8", path = "crates/uv-settings" }
|
||||
uv-shell = { version = "0.0.8", path = "crates/uv-shell" }
|
||||
uv-small-str = { version = "0.0.8", path = "crates/uv-small-str" }
|
||||
uv-state = { version = "0.0.8", path = "crates/uv-state" }
|
||||
uv-static = { version = "0.0.8", path = "crates/uv-static" }
|
||||
uv-tool = { version = "0.0.8", path = "crates/uv-tool" }
|
||||
uv-torch = { version = "0.0.8", path = "crates/uv-torch" }
|
||||
uv-trampoline-builder = { version = "0.0.8", path = "crates/uv-trampoline-builder" }
|
||||
uv-types = { version = "0.0.8", path = "crates/uv-types" }
|
||||
uv-version = { version = "0.9.18", path = "crates/uv-version" }
|
||||
uv-virtualenv = { version = "0.0.8", path = "crates/uv-virtualenv" }
|
||||
uv-warnings = { version = "0.0.8", path = "crates/uv-warnings" }
|
||||
uv-workspace = { version = "0.0.8", path = "crates/uv-workspace" }
|
||||
uv-auth = { path = "crates/uv-auth" }
|
||||
uv-build-backend = { path = "crates/uv-build-backend" }
|
||||
uv-build-frontend = { path = "crates/uv-build-frontend" }
|
||||
uv-cache = { path = "crates/uv-cache" }
|
||||
uv-cache-info = { path = "crates/uv-cache-info" }
|
||||
uv-cache-key = { path = "crates/uv-cache-key" }
|
||||
uv-cli = { path = "crates/uv-cli" }
|
||||
uv-client = { path = "crates/uv-client" }
|
||||
uv-configuration = { path = "crates/uv-configuration" }
|
||||
uv-console = { path = "crates/uv-console" }
|
||||
uv-dirs = { path = "crates/uv-dirs" }
|
||||
uv-dispatch = { path = "crates/uv-dispatch" }
|
||||
uv-distribution = { path = "crates/uv-distribution" }
|
||||
uv-distribution-filename = { path = "crates/uv-distribution-filename" }
|
||||
uv-distribution-types = { path = "crates/uv-distribution-types" }
|
||||
uv-extract = { path = "crates/uv-extract" }
|
||||
uv-fs = { path = "crates/uv-fs", features = ["serde", "tokio"] }
|
||||
uv-git = { path = "crates/uv-git" }
|
||||
uv-git-types = { path = "crates/uv-git-types" }
|
||||
uv-globfilter = { path = "crates/uv-globfilter" }
|
||||
uv-install-wheel = { path = "crates/uv-install-wheel", default-features = false }
|
||||
uv-installer = { path = "crates/uv-installer" }
|
||||
uv-macros = { path = "crates/uv-macros" }
|
||||
uv-metadata = { path = "crates/uv-metadata" }
|
||||
uv-normalize = { path = "crates/uv-normalize" }
|
||||
uv-once-map = { path = "crates/uv-once-map" }
|
||||
uv-options-metadata = { path = "crates/uv-options-metadata" }
|
||||
uv-pep440 = { path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
||||
uv-pep508 = { path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||
uv-platform-tags = { path = "crates/uv-platform-tags" }
|
||||
uv-publish = { path = "crates/uv-publish" }
|
||||
uv-pypi-types = { path = "crates/uv-pypi-types" }
|
||||
uv-python = { path = "crates/uv-python" }
|
||||
uv-redacted = { path = "crates/uv-redacted" }
|
||||
uv-requirements = { path = "crates/uv-requirements" }
|
||||
uv-requirements-txt = { path = "crates/uv-requirements-txt" }
|
||||
uv-resolver = { path = "crates/uv-resolver" }
|
||||
uv-scripts = { path = "crates/uv-scripts" }
|
||||
uv-settings = { path = "crates/uv-settings" }
|
||||
uv-shell = { path = "crates/uv-shell" }
|
||||
uv-small-str = { path = "crates/uv-small-str" }
|
||||
uv-state = { path = "crates/uv-state" }
|
||||
uv-static = { path = "crates/uv-static" }
|
||||
uv-tool = { path = "crates/uv-tool" }
|
||||
uv-torch = { path = "crates/uv-torch" }
|
||||
uv-trampoline-builder = { path = "crates/uv-trampoline-builder" }
|
||||
uv-types = { path = "crates/uv-types" }
|
||||
uv-version = { path = "crates/uv-version" }
|
||||
uv-virtualenv = { path = "crates/uv-virtualenv" }
|
||||
uv-warnings = { path = "crates/uv-warnings" }
|
||||
uv-workspace = { path = "crates/uv-workspace" }
|
||||
|
||||
ambient-id = { version = "0.0.7", default-features = false, features = ["astral-reqwest-middleware"] }
|
||||
anstream = { version = "0.6.15" }
|
||||
anyhow = { version = "1.0.89" }
|
||||
arcstr = { version = "1.2.0" }
|
||||
arrayvec = { version = "0.7.6" }
|
||||
astral-tokio-tar = { version = "0.5.6" }
|
||||
astral-tokio-tar = { version = "0.5.1" }
|
||||
async-channel = { version = "2.3.1" }
|
||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||
async-trait = { version = "0.1.82" }
|
||||
async_http_range_reader = { version = "0.9.1", package = "astral_async_http_range_reader" }
|
||||
async_zip = { version = "0.0.17", package = "astral_async_zip", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
async_http_range_reader = { version = "0.9.1" }
|
||||
async_zip = { git = "https://github.com/astral-sh/rs-async-zip", rev = "c909fda63fcafe4af496a07bfda28a5aae97e58d", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
axoupdater = { version = "0.9.0", default-features = false }
|
||||
backon = { version = "1.3.0" }
|
||||
base64 = { version = "0.22.1" }
|
||||
|
|
@ -97,30 +93,27 @@ cargo-util = { version = "0.2.14" }
|
|||
clap = { version = "4.5.17", features = ["derive", "env", "string", "wrap_help"] }
|
||||
clap_complete_command = { version = "0.6.1" }
|
||||
configparser = { version = "3.1.0" }
|
||||
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
||||
console = { version = "0.15.11", default-features = false }
|
||||
csv = { version = "1.3.0" }
|
||||
ctrlc = { version = "3.4.5" }
|
||||
cyclonedx-bom = { version = "0.8.0" }
|
||||
dashmap = { version = "6.1.0" }
|
||||
data-encoding = { version = "2.6.0" }
|
||||
diskus = { version = "0.9.0", default-features = false }
|
||||
dotenvy = { version = "0.15.7" }
|
||||
dunce = { version = "1.0.5" }
|
||||
either = { version = "1.13.0" }
|
||||
encoding_rs_io = { version = "0.1.7" }
|
||||
embed-manifest = { version = "1.5.0" }
|
||||
etcetera = { version = "0.11.0" }
|
||||
fastrand = { version = "2.3.0" }
|
||||
etcetera = { version = "0.10.0" }
|
||||
flate2 = { version = "1.0.33", default-features = false, features = ["zlib-rs"] }
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
fs2 = { version = "0.4.3" }
|
||||
futures = { version = "0.3.30" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.15" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
goblin = { version = "0.10.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] }
|
||||
h2 = { version = "0.4.7" }
|
||||
hashbrown = { version = "0.16.0" }
|
||||
hashbrown = { version = "0.15.1" }
|
||||
hex = { version = "0.4.3" }
|
||||
home = { version = "0.5.9" }
|
||||
html-escape = { version = "0.2.13" }
|
||||
http = { version = "1.1.0" }
|
||||
indexmap = { version = "2.5.0" }
|
||||
|
|
@ -135,7 +128,7 @@ memchr = { version = "2.7.4" }
|
|||
miette = { version = "7.2.0", features = ["fancy-no-backtrace"] }
|
||||
nanoid = { version = "0.4.0" }
|
||||
nix = { version = "0.30.0", features = ["signal"] }
|
||||
open = { version = "5.3.2" }
|
||||
once_cell = { version = "1.20.2" }
|
||||
owo-colors = { version = "4.1.0" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
|
|
@ -143,17 +136,16 @@ percent-encoding = { version = "2.3.1" }
|
|||
petgraph = { version = "0.8.0" }
|
||||
proc-macro2 = { version = "1.0.86" }
|
||||
procfs = { version = "0.17.0", default-features = false, features = ["flate2"] }
|
||||
pubgrub = { version = "0.3.3" , package = "astral-pubgrub" }
|
||||
pubgrub = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||
quote = { version = "1.0.37" }
|
||||
rayon = { version = "1.10.0" }
|
||||
ref-cast = { version = "1.0.24" }
|
||||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||
reqsign = { version = "0.18.0", features = ["aws", "default-context"], default-features = false }
|
||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "system-proxy", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { version = "0.4.2", package = "astral-reqwest-middleware", features = ["multipart"] }
|
||||
reqwest-retry = { version = "0.7.0", package = "astral-reqwest-retry" }
|
||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8", features = ["multipart"] }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
rmp-serde = { version = "1.3.0" }
|
||||
rust-netrc = { version = "0.1.2" }
|
||||
|
|
@ -162,15 +154,13 @@ rustix = { version = "1.0.0", default-features = false, features = ["fs", "std"]
|
|||
same-file = { version = "1.0.6" }
|
||||
schemars = { version = "1.0.0", features = ["url2"] }
|
||||
seahash = { version = "4.1.0" }
|
||||
secret-service = { version = "5.0.0", features = ["rt-tokio-crypto-rust"] }
|
||||
security-framework = { version = "3" }
|
||||
self-replace = { version = "1.5.0" }
|
||||
serde = { version = "1.0.210", features = ["derive", "rc"] }
|
||||
serde-untagged = { version = "0.1.6" }
|
||||
serde_json = { version = "1.0.128" }
|
||||
sha2 = { version = "0.10.8" }
|
||||
smallvec = { version = "1.13.2" }
|
||||
spdx = { version = "0.13.0" }
|
||||
spdx = { version = "0.10.6" }
|
||||
syn = { version = "2.0.77" }
|
||||
sys-info = { version = "0.9.1" }
|
||||
tar = { version = "0.4.43" }
|
||||
|
|
@ -178,52 +168,34 @@ target-lexicon = { version = "0.13.0" }
|
|||
tempfile = { version = "3.14.0" }
|
||||
textwrap = { version = "0.16.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
astral-tl = { version = "0.7.11" }
|
||||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync", "time"] }
|
||||
tl = { git = "https://github.com/astral-sh/tl.git", rev = "6e25b2ee2513d75385101a8ff9f591ef51f314ec" }
|
||||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync"] }
|
||||
tokio-stream = { version = "0.1.16" }
|
||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||
toml_edit = { version = "0.23.2", features = ["serde"] }
|
||||
toml = { version = "0.8.19" }
|
||||
toml_edit = { version = "0.22.21", features = ["serde"] }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-durations-export = { version = "0.3.0", features = ["plot"] }
|
||||
tracing-subscriber = { version = "0.3.18" } # Default feature set for uv_build, uv activates extra features
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry"] }
|
||||
tracing-test = { version = "0.2.5" }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
unscanny = { version = "0.1.0" }
|
||||
url = { version = "2.5.2", features = ["serde"] }
|
||||
uuid = { version = "1.16.0" }
|
||||
version-ranges = { version = "0.1.3", package = "astral-version-ranges" }
|
||||
version-ranges = { git = "https://github.com/astral-sh/pubgrub", rev = "06ec5a5f59ffaeb6cf5079c6cb184467da06c9db" }
|
||||
walkdir = { version = "2.5.0" }
|
||||
which = { version = "8.0.0", features = ["regex"] }
|
||||
windows = { version = "0.59.0", features = ["std", "Win32_Globalization", "Win32_System_LibraryLoader", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem", "Win32_Security", "Win32_System_Registry", "Win32_System_IO", "Win32_System_Ioctl"] }
|
||||
windows = { version = "0.59.0", features = ["Win32_Globalization", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem"] }
|
||||
windows-core = { version = "0.59.0" }
|
||||
windows-registry = { version = "0.5.0" }
|
||||
windows-result = { version = "0.3.0" }
|
||||
windows-sys = { version = "0.59.0", features = ["Win32_Foundation", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Ioctl", "Win32_System_IO", "Win32_System_Registry"] }
|
||||
wiremock = { version = "0.6.4" }
|
||||
wmi = { version = "0.16.0", default-features = false }
|
||||
xz2 = { version = "0.1.7" }
|
||||
zeroize = { version = "1.8.1" }
|
||||
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
||||
zstd = { version = "0.13.3" }
|
||||
|
||||
# dev-dependencies
|
||||
assert_cmd = { version = "2.0.16" }
|
||||
assert_fs = { version = "1.1.2" }
|
||||
byteorder = { version = "1.5.0" }
|
||||
filetime = { version = "0.2.25" }
|
||||
http-body-util = { version = "0.1.2" }
|
||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||
hyper-util = { version = "0.1.8", features = ["tokio", "server", "http1"] }
|
||||
ignore = { version = "0.4.23" }
|
||||
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
||||
predicates = { version = "3.1.2" }
|
||||
rcgen = { version = "0.14.5", features = ["crypto", "pem", "ring"], default-features = false }
|
||||
rustls = { version = "0.23.29", default-features = false }
|
||||
similar = { version = "2.6.0" }
|
||||
temp-env = { version = "0.3.6" }
|
||||
test-case = { version = "3.3.1" }
|
||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||
tokio-rustls = { version = "0.26.2", default-features = false }
|
||||
whoami = { version = "1.6.0" }
|
||||
[workspace.metadata.cargo-shear]
|
||||
ignored = ["flate2", "xz2"]
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
|
|
@ -259,7 +231,6 @@ rc_buffer = "warn"
|
|||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
if_not_else = "allow"
|
||||
use_self = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
|
@ -310,18 +281,8 @@ strip = false
|
|||
debug = "full"
|
||||
lto = false
|
||||
|
||||
# Profile for fast test execution: Skip debug info generation, and
|
||||
# apply basic optimization, which speed up build and running tests.
|
||||
[profile.fast-build]
|
||||
inherits = "dev"
|
||||
opt-level = 1
|
||||
debug = 0
|
||||
strip = "debuginfo"
|
||||
|
||||
# Profile for faster builds: Skip debug info generation, for faster
|
||||
# builds of smaller binaries.
|
||||
[profile.no-debug]
|
||||
inherits = "dev"
|
||||
debug = 0
|
||||
strip = "debuginfo"
|
||||
|
||||
|
|
@ -336,3 +297,7 @@ codegen-units = 1
|
|||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
|
||||
[patch.crates-io]
|
||||
reqwest-middleware = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
reqwest-retry = { git = "https://github.com/astral-sh/reqwest-middleware", rev = "ad8b9d332d1773fde8b4cd008486de5973e0a3f8" }
|
||||
|
|
|
|||
10
Dockerfile
10
Dockerfile
|
|
@ -7,6 +7,7 @@ RUN apt update \
|
|||
build-essential \
|
||||
curl \
|
||||
python3-venv \
|
||||
cmake \
|
||||
&& apt clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
|
@ -23,15 +24,8 @@ RUN case "$TARGETPLATFORM" in \
|
|||
*) exit 1 ;; \
|
||||
esac
|
||||
|
||||
# Temporarily using nightly-2025-11-02 for bundled musl v1.2.5
|
||||
# Ref: https://github.com/rust-lang/rust/pull/142682
|
||||
# TODO(samypr100): Remove when toolchain updates to 1.93
|
||||
COPY <<EOF rust-toolchain.toml
|
||||
[toolchain]
|
||||
channel = "nightly-2025-11-02"
|
||||
EOF
|
||||
# Update rustup whenever we bump the rust version
|
||||
# COPY rust-toolchain.toml rust-toolchain.toml
|
||||
COPY rust-toolchain.toml rust-toolchain.toml
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
||||
# Install the toolchain then the musl target
|
||||
|
|
|
|||
33
README.md
33
README.md
|
|
@ -42,7 +42,7 @@ An extremely fast Python package and project manager, written in Rust.
|
|||
- 🖥️ Supports macOS, Linux, and Windows.
|
||||
|
||||
uv is backed by [Astral](https://astral.sh), the creators of
|
||||
[Ruff](https://github.com/astral-sh/ruff) and [ty](https://github.com/astral-sh/ty).
|
||||
[Ruff](https://github.com/astral-sh/ruff).
|
||||
|
||||
## Installation
|
||||
|
||||
|
|
@ -192,12 +192,14 @@ uv installs Python and allows quickly switching between versions.
|
|||
Install multiple Python versions:
|
||||
|
||||
```console
|
||||
$ uv python install 3.12 3.13 3.14
|
||||
Installed 3 versions in 972ms
|
||||
+ cpython-3.12.12-macos-aarch64-none (python3.12)
|
||||
+ cpython-3.13.9-macos-aarch64-none (python3.13)
|
||||
+ cpython-3.14.0-macos-aarch64-none (python3.14)
|
||||
|
||||
$ uv python install 3.10 3.11 3.12
|
||||
Searching for Python versions matching: Python 3.10
|
||||
Searching for Python versions matching: Python 3.11
|
||||
Searching for Python versions matching: Python 3.12
|
||||
Installed 3 versions in 3.42s
|
||||
+ cpython-3.10.14-macos-aarch64-none
|
||||
+ cpython-3.11.9-macos-aarch64-none
|
||||
+ cpython-3.12.4-macos-aarch64-none
|
||||
```
|
||||
|
||||
Download Python versions as needed:
|
||||
|
|
@ -268,6 +270,14 @@ Installed 43 packages in 208ms
|
|||
|
||||
See the [pip interface documentation](https://docs.astral.sh/uv/pip/index/) to get started.
|
||||
|
||||
## Platform support
|
||||
|
||||
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
||||
|
||||
## Versioning policy
|
||||
|
||||
See uv's [versioning policy](https://docs.astral.sh/uv/reference/versioning/) document.
|
||||
|
||||
## Contributing
|
||||
|
||||
We are passionate about supporting contributors of all levels of experience and would love to see
|
||||
|
|
@ -284,15 +294,6 @@ It's pronounced as "you - vee" ([`/juː viː/`](https://en.wikipedia.org/wiki/He
|
|||
|
||||
Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details.
|
||||
|
||||
#### What platforms does uv support?
|
||||
|
||||
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
||||
|
||||
#### Is uv ready for production?
|
||||
|
||||
Yes, uv is stable and widely used in production. See uv's
|
||||
[versioning policy](https://docs.astral.sh/uv/reference/versioning/) document for details.
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're
|
||||
|
|
|
|||
2
STYLE.md
2
STYLE.md
|
|
@ -16,7 +16,7 @@ documentation_.
|
|||
1. If a message ends with a single relevant value, precede it with a colon, e.g.,
|
||||
`This is the value: value`. If the value is a literal, wrap it in backticks.
|
||||
1. Markdown files should be wrapped at 100 characters.
|
||||
1. Use a space, not an equals sign, for command-line arguments with a value, e.g.
|
||||
1. Use a space, not an equals sign, for command line arguments with a value, e.g.
|
||||
`--resolution lowest`, not `--resolution=lowest`.
|
||||
|
||||
## Styling uv
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
[files]
|
||||
extend-exclude = [
|
||||
"**/snapshots/",
|
||||
"test/ecosystem/**",
|
||||
"test/requirements/**/*.in",
|
||||
"crates/uv-build-frontend/src/pipreqs/mapping",
|
||||
"ecosystem/**",
|
||||
"scripts/**/*.in",
|
||||
]
|
||||
ignore-hidden = false
|
||||
|
||||
|
|
|
|||
|
|
@ -982,7 +982,7 @@ for more details.
|
|||
([#9135](https://github.com/astral-sh/uv/pull/9135))
|
||||
- Tweak script `--no-project` comment ([#10331](https://github.com/astral-sh/uv/pull/10331))
|
||||
- Update copyright year ([#10297](https://github.com/astral-sh/uv/pull/10297))
|
||||
- Add instructions for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
||||
- Add instructinos for installing with Scoop ([#10332](https://github.com/astral-sh/uv/pull/10332))
|
||||
|
||||
## 0.5.16
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
# Changelog 0.6.x
|
||||
|
||||
## 0.6.0
|
||||
|
||||
There have been 31 releases and 1135 pull requests since
|
||||
|
|
|
|||
|
|
@ -1,995 +0,0 @@
|
|||
# Changelog 0.7.x
|
||||
|
||||
## 0.7.0
|
||||
|
||||
This release contains various changes that improve correctness and user experience, but could break
|
||||
some workflows; many changes have been marked as breaking out of an abundance of caution. We expect
|
||||
most users to be able to upgrade without making changes.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Update `uv version` to display and update project versions
|
||||
([#12349](https://github.com/astral-sh/uv/pull/12349))**
|
||||
|
||||
Previously, `uv version` displayed uv's version. Now, `uv version` will display or update the
|
||||
project's version. This interface was
|
||||
[heavily requested](https://github.com/astral-sh/uv/issues/6298) and, after much consideration, we
|
||||
decided that transitioning the top-level command was the best option.
|
||||
|
||||
Here's a brief example:
|
||||
|
||||
```console
|
||||
$ uv init example
|
||||
Initialized project `example` at `./example`
|
||||
$ cd example
|
||||
$ uv version
|
||||
example 0.1.0
|
||||
$ uv version --bump major
|
||||
example 0.1.0 => 1.0.0
|
||||
$ uv version --short
|
||||
1.0.0
|
||||
```
|
||||
|
||||
If used outside of a project, uv will fallback to showing its own version still:
|
||||
|
||||
```console
|
||||
$ uv version
|
||||
warning: failed to read project: No `pyproject.toml` found in current directory or any parent directory
|
||||
running `uv self version` for compatibility with old `uv version` command.
|
||||
this fallback will be removed soon, pass `--preview` to make this an error.
|
||||
|
||||
uv 0.7.0 (4433f41c9 2025-04-29)
|
||||
```
|
||||
|
||||
As described in the warning, `--preview` can be used to error instead:
|
||||
|
||||
```console
|
||||
$ uv version --preview
|
||||
error: No `pyproject.toml` found in current directory or any parent directory
|
||||
```
|
||||
|
||||
The previous functionality of `uv version` was moved to `uv self version`.
|
||||
|
||||
- **Avoid fallback to subsequent indexes on authentication failure
|
||||
([#12805](https://github.com/astral-sh/uv/pull/12805))**
|
||||
|
||||
When using the `first-index` strategy (the default), uv will stop searching indexes for a package
|
||||
once it is found on a single index. Previously, uv considered a package as "missing" from an index
|
||||
during authentication failures, such as an HTTP 401 or HTTP 403 (normally, missing packages are
|
||||
represented by an HTTP 404). This behavior was motivated by unusual responses from some package
|
||||
indexes, but reduces the safety of uv's index strategy when authentication fails. Now, uv will
|
||||
consider an authentication failure as a stop-point when searching for a package across indexes.
|
||||
The `index.ignore-error-codes` option can be used to recover the existing behavior, e.g.:
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
ignore-error-codes = [401, 403]
|
||||
```
|
||||
|
||||
Since PyTorch's indexes always return a HTTP 403 for missing packages, uv special-cases indexes on
|
||||
the `pytorch.org` domain to ignore that error code by default.
|
||||
|
||||
- **Require the command in `uvx <name>` to be available in the Python environment
|
||||
([#11603](https://github.com/astral-sh/uv/pull/11603))**
|
||||
|
||||
Previously, `uvx` would attempt to execute a command even if it was not provided by a Python
|
||||
package. For example, if we presume `foo` is an empty Python package which provides no command,
|
||||
`uvx foo` would invoke the `foo` command on the `PATH` (if present). Now, uv will error early if
|
||||
the `foo` executable is not provided by the requested Python package. This check is not enforced
|
||||
when `--from` is used, so patterns like `uvx --from foo bash -c "..."` are still valid. uv also
|
||||
still allows `uvx foo` where the `foo` executable is provided by a dependency of `foo` instead of
|
||||
`foo` itself, as this is fairly common for packages which depend on a dedicated package for their
|
||||
command-line interface.
|
||||
|
||||
- **Use index URL instead of package URL for keyring credential lookups
|
||||
([#12651](https://github.com/astral-sh/uv/pull/12651))**
|
||||
|
||||
When determining credentials for querying a package URL, uv previously sent the full URL to the
|
||||
`keyring` command. However, some keyring plugins expect to receive the _index URL_ (which is
|
||||
usually a parent of the package URL). Now, uv requests credentials for the index URL instead. This
|
||||
behavior matches `pip`.
|
||||
|
||||
- **Remove `--version` from subcommands ([#13108](https://github.com/astral-sh/uv/pull/13108))**
|
||||
|
||||
Previously, uv allowed the `--version` flag on arbitrary subcommands, e.g., `uv run --version`.
|
||||
However, the `--version` flag is useful for other operations since uv is a package manager.
|
||||
Consequently, we've removed the `--version` flag from subcommands — it is only available as
|
||||
`uv --version`.
|
||||
|
||||
- **Omit Python 3.7 downloads from managed versions
|
||||
([#13022](https://github.com/astral-sh/uv/pull/13022))**
|
||||
|
||||
Python 3.7 is EOL and not formally supported by uv; however, Python 3.7 was previously available
|
||||
for download on a subset of platforms.
|
||||
|
||||
- **Reject non-PEP 751 TOML files in install, compile, and export commands
|
||||
([#13120](https://github.com/astral-sh/uv/pull/13120),
|
||||
[#13119](https://github.com/astral-sh/uv/pull/13119))**
|
||||
|
||||
Previously, uv treated arbitrary `.toml` files passed to commands (e.g.,
|
||||
`uv pip install -r foo.toml` or `uv pip compile -o foo.toml`) as `requirements.txt`-formatted
|
||||
files. Now, uv will error instead. If using PEP 751 lockfiles, use the standardized format for
|
||||
custom names instead, e.g., `pylock.foo.toml`.
|
||||
|
||||
- **Ignore arbitrary Python requests in version files
|
||||
([#12909](https://github.com/astral-sh/uv/pull/12909))**
|
||||
|
||||
uv allows arbitrary strings to be used for Python version requests, in which they are treated as
|
||||
an executable name to search for in the `PATH`. However, using this form of request in
|
||||
`.python-version` files is non-standard and conflicts with `pyenv-virtualenv` which writes
|
||||
environment names to `.python-version` files. In this release, uv will now ignore requests that
|
||||
are arbitrary strings when found in `.python-version` files.
|
||||
|
||||
- **Error on unknown dependency object specifiers
|
||||
([12811](https://github.com/astral-sh/uv/pull/12811))**
|
||||
|
||||
The `[dependency-groups]` entries can include "object specifiers", e.g. `set-phasers-to = ...` in:
|
||||
|
||||
```toml
|
||||
[dependency-groups]
|
||||
foo = ["pyparsing"]
|
||||
bar = [{set-phasers-to = "stun"}]
|
||||
```
|
||||
|
||||
However, the only current spec-compliant object specifier is `include-group`. Previously, uv would
|
||||
ignore unknown object specifiers. Now, uv will error.
|
||||
|
||||
- **Make `--frozen` and `--no-sources` conflicting options
|
||||
([#12671](https://github.com/astral-sh/uv/pull/12671))**
|
||||
|
||||
Using `--no-sources` always requires a new resolution and `--frozen` will always fail when used
|
||||
with it. Now, this conflict is encoded in the CLI options for clarity.
|
||||
|
||||
- **Treat empty `UV_PYTHON_INSTALL_DIR` and `UV_TOOL_DIR` as unset
|
||||
([#12907](https://github.com/astral-sh/uv/pull/12907),
|
||||
[#12905](https://github.com/astral-sh/uv/pull/12905))**
|
||||
|
||||
Previously, these variables were treated as set to the current working directory when set to an
|
||||
empty string. Now, uv will ignore these variables when empty. This matches uv's behavior for other
|
||||
environment variables which configure directories.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Disallow mixing requirements across PyTorch indexes
|
||||
([#13179](https://github.com/astral-sh/uv/pull/13179))
|
||||
- Add optional managed Python archive download cache
|
||||
([#12175](https://github.com/astral-sh/uv/pull/12175))
|
||||
- Add `poetry-core` as a `uv init` build backend option
|
||||
([#12781](https://github.com/astral-sh/uv/pull/12781))
|
||||
- Show tag hints when failing to find a compatible wheel in `pylock.toml`
|
||||
([#13136](https://github.com/astral-sh/uv/pull/13136))
|
||||
- Report Python versions in `pyvenv.cfg` version mismatch
|
||||
([#13027](https://github.com/astral-sh/uv/pull/13027))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on omitted wheel-only packages in `pylock.toml`
|
||||
([#13132](https://github.com/astral-sh/uv/pull/13132))
|
||||
- Fix display name for `uvx --version` ([#13109](https://github.com/astral-sh/uv/pull/13109))
|
||||
- Restore handling of authentication when encountering redirects
|
||||
([#13050](https://github.com/astral-sh/uv/pull/13050))
|
||||
- Respect build options (`--no-binary` et al) in `pylock.toml`
|
||||
([#13134](https://github.com/astral-sh/uv/pull/13134))
|
||||
- Use `upload-time` rather than `upload_time` in `uv.lock`
|
||||
([#13176](https://github.com/astral-sh/uv/pull/13176))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Changed `fish` completions append `>>` to overwrite `>`
|
||||
([#13130](https://github.com/astral-sh/uv/pull/13130))
|
||||
- Add `pylock.toml` mentions where relevant ([#13115](https://github.com/astral-sh/uv/pull/13115))
|
||||
- Add ROCm example to the PyTorch guide ([#13200](https://github.com/astral-sh/uv/pull/13200))
|
||||
- Upgrade PyTorch guide to CUDA 12.8 and PyTorch 2.7
|
||||
([#13199](https://github.com/astral-sh/uv/pull/13199))
|
||||
|
||||
## 0.7.1
|
||||
|
||||
### Enhancement
|
||||
|
||||
- Add support for BLAKE2b-256 ([#13204](https://github.com/astral-sh/uv/pull/13204))
|
||||
|
||||
### Bugfix
|
||||
|
||||
- Revert fix handling of authentication when encountering redirects
|
||||
([#13215](https://github.com/astral-sh/uv/pull/13215))
|
||||
|
||||
## 0.7.2
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve trace log for retryable errors ([#13228](https://github.com/astral-sh/uv/pull/13228))
|
||||
- Use "error" instead of "warning" for self-update message
|
||||
([#13229](https://github.com/astral-sh/uv/pull/13229))
|
||||
- Error when `uv version` is used with project-specific flags but no project is found
|
||||
([#13203](https://github.com/astral-sh/uv/pull/13203))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix incorrect virtual environment invalidation for pre-release Python versions
|
||||
([#13234](https://github.com/astral-sh/uv/pull/13234))
|
||||
- Fix patching of `clang` in managed Python sysconfig
|
||||
([#13237](https://github.com/astral-sh/uv/pull/13237))
|
||||
- Respect `--project` in `uv version` ([#13230](https://github.com/astral-sh/uv/pull/13230))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--dry-run` support to `uv self update` ([#9829](https://github.com/astral-sh/uv/pull/9829))
|
||||
- Add `--show-with` to `uv tool list` to list packages included by `--with`
|
||||
([#13264](https://github.com/astral-sh/uv/pull/13264))
|
||||
- De-duplicate fetched index URLs ([#13205](https://github.com/astral-sh/uv/pull/13205))
|
||||
- Support more zip compression formats: bzip2, lzma, xz, zstd
|
||||
([#13285](https://github.com/astral-sh/uv/pull/13285))
|
||||
- Add support for downloading GraalPy ([#13172](https://github.com/astral-sh/uv/pull/13172))
|
||||
- Improve error message when a virtual environment Python symlink is broken
|
||||
([#12168](https://github.com/astral-sh/uv/pull/12168))
|
||||
- Use `fs_err` for paths in symlinking errors ([#13303](https://github.com/astral-sh/uv/pull/13303))
|
||||
- Minify and embed managed Python JSON at compile time
|
||||
([#12967](https://github.com/astral-sh/uv/pull/12967))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Make preview default and add configuration docs
|
||||
([#12804](https://github.com/astral-sh/uv/pull/12804))
|
||||
- Build backend: Allow escaping in globs ([#13313](https://github.com/astral-sh/uv/pull/13313))
|
||||
- Build backend: Make builds reproducible across operating systems
|
||||
([#13171](https://github.com/astral-sh/uv/pull/13171))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `python-downloads-json-url` option for `uv.toml` to configure custom Python installations via
|
||||
JSON URL ([#12974](https://github.com/astral-sh/uv/pull/12974))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check nested IO errors for retries ([#13260](https://github.com/astral-sh/uv/pull/13260))
|
||||
- Accept `musllinux_1_0` as a valid platform tag
|
||||
([#13289](https://github.com/astral-sh/uv/pull/13289))
|
||||
- Fix discovery of pre-release managed Python versions in range requests
|
||||
([#13330](https://github.com/astral-sh/uv/pull/13330))
|
||||
- Respect locked script preferences in `uv run --with`
|
||||
([#13283](https://github.com/astral-sh/uv/pull/13283))
|
||||
- Retry streaming downloads on broken pipe errors
|
||||
([#13281](https://github.com/astral-sh/uv/pull/13281))
|
||||
- Treat already-installed base environment packages as preferences in `uv run --with`
|
||||
([#13284](https://github.com/astral-sh/uv/pull/13284))
|
||||
- Avoid enumerating sources in errors for path Python requests
|
||||
([#13335](https://github.com/astral-sh/uv/pull/13335))
|
||||
- Avoid re-creating virtual environment with `--no-sync`
|
||||
([#13287](https://github.com/astral-sh/uv/pull/13287))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove outdated description of index strategy
|
||||
([#13326](https://github.com/astral-sh/uv/pull/13326))
|
||||
- Update "Viewing the version" docs ([#13241](https://github.com/astral-sh/uv/pull/13241))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add more context to external errors ([#13351](https://github.com/astral-sh/uv/pull/13351))
|
||||
- Align indentation of long arguments ([#13394](https://github.com/astral-sh/uv/pull/13394))
|
||||
- Preserve order of dependencies which are sorted naively
|
||||
([#13334](https://github.com/astral-sh/uv/pull/13334))
|
||||
- Align progress bars by largest name length ([#13266](https://github.com/astral-sh/uv/pull/13266))
|
||||
- Reinstall local packages in `uv add` ([#13462](https://github.com/astral-sh/uv/pull/13462))
|
||||
- Rename `--raw-sources` to `--raw` ([#13348](https://github.com/astral-sh/uv/pull/13348))
|
||||
- Show 'Downgraded' when `self update` is used to install an older version
|
||||
([#13340](https://github.com/astral-sh/uv/pull/13340))
|
||||
- Suggest `uv self update` if required uv version is newer
|
||||
([#13305](https://github.com/astral-sh/uv/pull/13305))
|
||||
- Add 3.14 beta images to uv Docker images ([#13390](https://github.com/astral-sh/uv/pull/13390))
|
||||
- Add comma after "i.e." in Conda environment error
|
||||
([#13423](https://github.com/astral-sh/uv/pull/13423))
|
||||
- Be more precise in unpinned packages warning
|
||||
([#13426](https://github.com/astral-sh/uv/pull/13426))
|
||||
- Fix detection of sorted dependencies when include-group is used
|
||||
([#13354](https://github.com/astral-sh/uv/pull/13354))
|
||||
- Fix display of HTTP responses in trace logs for retry of errors
|
||||
([#13339](https://github.com/astral-sh/uv/pull/13339))
|
||||
- Log skip reasons during Python installation key interpreter match checks
|
||||
([#13472](https://github.com/astral-sh/uv/pull/13472))
|
||||
- Redact credentials when displaying URLs ([#13333](https://github.com/astral-sh/uv/pull/13333))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on `pylock.toml` dependency entries
|
||||
([#13384](https://github.com/astral-sh/uv/pull/13384))
|
||||
- Avoid panics for cannot-be-a-base URLs ([#13406](https://github.com/astral-sh/uv/pull/13406))
|
||||
- Ensure cached realm credentials are applied if no password is found for index URL
|
||||
([#13463](https://github.com/astral-sh/uv/pull/13463))
|
||||
- Fix `.tgz` parsing to respect true extension
|
||||
([#13382](https://github.com/astral-sh/uv/pull/13382))
|
||||
- Fix double self-dependency ([#13366](https://github.com/astral-sh/uv/pull/13366))
|
||||
- Reject `pylock.toml` in `uv add -r` ([#13421](https://github.com/astral-sh/uv/pull/13421))
|
||||
- Retain dot-separated wheel tags during cache prune
|
||||
([#13379](https://github.com/astral-sh/uv/pull/13379))
|
||||
- Retain trailing comments after PEP 723 metadata block
|
||||
([#13460](https://github.com/astral-sh/uv/pull/13460))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "export" instead of "install" in `uv export` arguments
|
||||
([#13430](https://github.com/astral-sh/uv/pull/13430))
|
||||
- Remove extra newline ([#13461](https://github.com/astral-sh/uv/pull/13461))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Normalize glob paths ([#13465](https://github.com/astral-sh/uv/pull/13465))
|
||||
|
||||
## 0.7.5
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support case-sensitive module discovery in the build backend
|
||||
([#13468](https://github.com/astral-sh/uv/pull/13468))
|
||||
- Bump Simple cache bucket to v16 ([#13498](https://github.com/astral-sh/uv/pull/13498))
|
||||
- Don't error when the script is too short for the buffer
|
||||
([#13488](https://github.com/astral-sh/uv/pull/13488))
|
||||
- Add missing word in "script not supported" error
|
||||
([#13483](https://github.com/astral-sh/uv/pull/13483))
|
||||
|
||||
## 0.7.6
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- Add free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250517)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve compatibility of `VIRTUAL_ENV_PROMPT` value
|
||||
([#13501](https://github.com/astral-sh/uv/pull/13501))
|
||||
- Bump MSRV to 1.85 and Edition 2024 ([#13516](https://github.com/astral-sh/uv/pull/13516))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect default extras in uv remove ([#13380](https://github.com/astral-sh/uv/pull/13380))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix PowerShell code blocks ([#13511](https://github.com/astral-sh/uv/pull/13511))
|
||||
|
||||
## 0.7.7
|
||||
|
||||
### Python
|
||||
|
||||
- Work around third-party packages that (incorrectly) assume the interpreter is dynamically linking
|
||||
libpython
|
||||
- Allow the experimental JIT to be enabled at runtime on Python 3.13 and 3.14 on macOS on aarch64
|
||||
aka Apple Silicon
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250521)
|
||||
for more details.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `uv version` lock and sync ([#13317](https://github.com/astral-sh/uv/pull/13317))
|
||||
- Fix references to `ldd` in diagnostics to correctly refer to `ld.so`
|
||||
([#13552](https://github.com/astral-sh/uv/pull/13552))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Clarify adding SSH Git dependencies ([#13534](https://github.com/astral-sh/uv/pull/13534))
|
||||
|
||||
## 0.7.8
|
||||
|
||||
### Python
|
||||
|
||||
We are reverting most of our Python changes from `uv 0.7.6` and `uv 0.7.7` due to a miscompilation
|
||||
that makes the Python interpreter behave incorrectly, resulting in spurious type-errors involving
|
||||
str. This issue seems to be isolated to x86_64 Linux, and affected at least Python 3.12, 3.13, and
|
||||
3.14.
|
||||
|
||||
The following changes that were introduced in those versions of uv are temporarily being reverted
|
||||
while we test and deploy a proper fix for the miscompilation:
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See [the issue for details](https://github.com/astral-sh/uv/issues/13610).
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove misleading line in pin documentation ([#13611](https://github.com/astral-sh/uv/pull/13611))
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Python
|
||||
|
||||
The changes reverted in [0.7.8](#078) have been restored.
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250529)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve obfuscation of credentials in URLs ([#13560](https://github.com/astral-sh/uv/pull/13560))
|
||||
- Allow running non-default Python implementations via `uvx`
|
||||
([#13583](https://github.com/astral-sh/uv/pull/13583))
|
||||
- Add `uvw` as alias for `uv` without console window on Windows
|
||||
([#11786](https://github.com/astral-sh/uv/pull/11786))
|
||||
- Allow discovery of x86-64 managed Python builds on macOS
|
||||
([#13722](https://github.com/astral-sh/uv/pull/13722))
|
||||
- Differentiate between implicit vs explicit architecture requests
|
||||
([#13723](https://github.com/astral-sh/uv/pull/13723))
|
||||
- Implement ordering for Python architectures to prefer native installations
|
||||
([#13709](https://github.com/astral-sh/uv/pull/13709))
|
||||
- Only show the first match per platform (and architecture) by default in `uv python list`
|
||||
([#13721](https://github.com/astral-sh/uv/pull/13721))
|
||||
- Write the path of the parent environment to an `extends-environment` key in the `pyvenv.cfg` file
|
||||
of an ephemeral environment ([#13598](https://github.com/astral-sh/uv/pull/13598))
|
||||
- Improve the error message when libc cannot be found, e.g., when using the distroless containers
|
||||
([#13549](https://github.com/astral-sh/uv/pull/13549))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid rendering info log level ([#13642](https://github.com/astral-sh/uv/pull/13642))
|
||||
- Improve performance of `uv-python` crate's manylinux submodule
|
||||
([#11131](https://github.com/astral-sh/uv/pull/11131))
|
||||
- Optimize `Version` display ([#13643](https://github.com/astral-sh/uv/pull/13643))
|
||||
- Reduce number of reference-checks for `uv cache clean`
|
||||
([#13669](https://github.com/astral-sh/uv/pull/13669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reinstalling dependency group members with `--all-packages`
|
||||
([#13678](https://github.com/astral-sh/uv/pull/13678))
|
||||
- Don't fail direct URL hash checking with dependency metadata
|
||||
([#13736](https://github.com/astral-sh/uv/pull/13736))
|
||||
- Exit early on `self update` if global `--offline` is set
|
||||
([#13663](https://github.com/astral-sh/uv/pull/13663))
|
||||
- Fix cases where the uv lock is incorrectly marked as out of date
|
||||
([#13635](https://github.com/astral-sh/uv/pull/13635))
|
||||
- Include pre-release versions in `uv python install --reinstall`
|
||||
([#13645](https://github.com/astral-sh/uv/pull/13645))
|
||||
- Set `LC_ALL=C` for git when checking git worktree
|
||||
([#13637](https://github.com/astral-sh/uv/pull/13637))
|
||||
- Avoid rejecting Windows paths for remote Python download JSON targets
|
||||
([#13625](https://github.com/astral-sh/uv/pull/13625))
|
||||
|
||||
### Preview
|
||||
|
||||
- Add `uv add --bounds` to configure version constraints
|
||||
([#12946](https://github.com/astral-sh/uv/pull/12946))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation about Python versions to Tools concept page
|
||||
([#7673](https://github.com/astral-sh/uv/pull/7673))
|
||||
- Add example of enabling Dependabot ([#13692](https://github.com/astral-sh/uv/pull/13692))
|
||||
- Fix `exclude-newer` date format for persistent configuration files
|
||||
([#13706](https://github.com/astral-sh/uv/pull/13706))
|
||||
- Quote versions variables in GitLab documentation
|
||||
([#13679](https://github.com/astral-sh/uv/pull/13679))
|
||||
- Update Dependabot support status ([#13690](https://github.com/astral-sh/uv/pull/13690))
|
||||
- Explicitly specify to add a new repo entry to the repos list item in the `.pre-commit-config.yaml`
|
||||
([#10243](https://github.com/astral-sh/uv/pull/10243))
|
||||
- Add integration with marimo guide ([#13691](https://github.com/astral-sh/uv/pull/13691))
|
||||
- Add pronunciation to README ([#5336](https://github.com/astral-sh/uv/pull/5336))
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--show-extras` to `uv tool list` ([#13783](https://github.com/astral-sh/uv/pull/13783))
|
||||
- Add dynamically generated sysconfig replacement mappings
|
||||
([#13441](https://github.com/astral-sh/uv/pull/13441))
|
||||
- Add data locations to install wheel logs ([#13797](https://github.com/astral-sh/uv/pull/13797))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid redaction of placeholder `git` username when using SSH authentication
|
||||
([#13799](https://github.com/astral-sh/uv/pull/13799))
|
||||
- Propagate credentials to files on devpi indexes ending in `/+simple`
|
||||
([#13743](https://github.com/astral-sh/uv/pull/13743))
|
||||
- Restore retention of credentials for direct URLs in `uv export`
|
||||
([#13809](https://github.com/astral-sh/uv/pull/13809))
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b1
|
||||
- Add Python 3.13.4
|
||||
- Add Python 3.12.11
|
||||
- Add Python 3.11.13
|
||||
- Add Python 3.10.18
|
||||
- Add Python 3.9.23
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add Pyodide support ([#12731](https://github.com/astral-sh/uv/pull/12731))
|
||||
- Better error message for version specifier with missing operator
|
||||
([#13803](https://github.com/astral-sh/uv/pull/13803))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Downgrade `reqwest` and `hyper-util` to resolve connection reset errors over IPv6
|
||||
([#13835](https://github.com/astral-sh/uv/pull/13835))
|
||||
- Prefer `uv`'s binary's version when checking if it's up to date
|
||||
([#13840](https://github.com/astral-sh/uv/pull/13840))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "terminal driver" instead of "shell" in `SIGINT` docs
|
||||
([#13787](https://github.com/astral-sh/uv/pull/13787))
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv python pin --rm` to remove `.python-version` pins
|
||||
([#13860](https://github.com/astral-sh/uv/pull/13860))
|
||||
- Don't hint at versions removed by `excluded-newer`
|
||||
([#13884](https://github.com/astral-sh/uv/pull/13884))
|
||||
- Add hint to use `tool.uv.environments` on resolution error
|
||||
([#13455](https://github.com/astral-sh/uv/pull/13455))
|
||||
- Add hint to use `tool.uv.required-environments` on resolution error
|
||||
([#13575](https://github.com/astral-sh/uv/pull/13575))
|
||||
- Improve `python pin` error messages ([#13862](https://github.com/astral-sh/uv/pull/13862))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Lock environments during `uv sync`, `uv add` and `uv remove` to prevent race conditions
|
||||
([#13869](https://github.com/astral-sh/uv/pull/13869))
|
||||
- Add `--no-editable` to `uv export` for `pylock.toml`
|
||||
([#13852](https://github.com/astral-sh/uv/pull/13852))
|
||||
|
||||
### Documentation
|
||||
|
||||
- List `.gitignore` in project init files ([#13855](https://github.com/astral-sh/uv/pull/13855))
|
||||
- Move the pip interface documentation into the concepts section
|
||||
([#13841](https://github.com/astral-sh/uv/pull/13841))
|
||||
- Remove the configuration section in favor of concepts / reference
|
||||
([#13842](https://github.com/astral-sh/uv/pull/13842))
|
||||
- Update Git and GitHub Actions docs to mention `gh auth login`
|
||||
([#13850](https://github.com/astral-sh/uv/pull/13850))
|
||||
|
||||
### Preview
|
||||
|
||||
- Fix directory glob traversal fallback preventing exclusion of all files
|
||||
([#13882](https://github.com/astral-sh/uv/pull/13882))
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b2
|
||||
- Add Python 3.13.5
|
||||
- Fix stability of `uuid.getnode` on 3.13
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250612)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Download versions in `uv python pin` if not found
|
||||
([#13946](https://github.com/astral-sh/uv/pull/13946))
|
||||
- Use TTY detection to determine if SIGINT forwarding is enabled
|
||||
([#13925](https://github.com/astral-sh/uv/pull/13925))
|
||||
- Avoid fetching an exact, cached Git commit, even if it isn't locked
|
||||
([#13748](https://github.com/astral-sh/uv/pull/13748))
|
||||
- Add `zstd` and `deflate` to `Accept-Encoding`
|
||||
([#13982](https://github.com/astral-sh/uv/pull/13982))
|
||||
- Build binaries for riscv64 ([#12688](https://github.com/astral-sh/uv/pull/12688))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check if relative URL is valid directory before treating as index
|
||||
([#13917](https://github.com/astral-sh/uv/pull/13917))
|
||||
- Ignore Python discovery errors during `uv python pin`
|
||||
([#13944](https://github.com/astral-sh/uv/pull/13944))
|
||||
- Do not allow `uv add --group ... --script` ([#13997](https://github.com/astral-sh/uv/pull/13997))
|
||||
|
||||
### Preview changes
|
||||
|
||||
- Build backend: Support namespace packages ([#13833](https://github.com/astral-sh/uv/pull/13833))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add 3.14 to the supported platform reference
|
||||
([#13990](https://github.com/astral-sh/uv/pull/13990))
|
||||
- Add an `llms.txt` to uv ([#13929](https://github.com/astral-sh/uv/pull/13929))
|
||||
- Add supported macOS version to the platform reference
|
||||
([#13993](https://github.com/astral-sh/uv/pull/13993))
|
||||
- Update platform support reference to include Python implementation list
|
||||
([#13991](https://github.com/astral-sh/uv/pull/13991))
|
||||
- Update pytorch.md ([#13899](https://github.com/astral-sh/uv/pull/13899))
|
||||
- Update the CLI help and reference to include references to the Python bin directory
|
||||
([#13978](https://github.com/astral-sh/uv/pull/13978))
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add XPU to `--torch-backend` ([#14172](https://github.com/astral-sh/uv/pull/14172))
|
||||
- Add ROCm backends to `--torch-backend` ([#14120](https://github.com/astral-sh/uv/pull/14120))
|
||||
- Remove preview label from `--torch-backend` ([#14119](https://github.com/astral-sh/uv/pull/14119))
|
||||
- Add `[tool.uv.dependency-groups].mygroup.requires-python`
|
||||
([#13735](https://github.com/astral-sh/uv/pull/13735))
|
||||
- Add auto-detection for AMD GPUs ([#14176](https://github.com/astral-sh/uv/pull/14176))
|
||||
- Show retries for HTTP status code errors ([#13897](https://github.com/astral-sh/uv/pull/13897))
|
||||
- Support transparent Python patch version upgrades
|
||||
([#13954](https://github.com/astral-sh/uv/pull/13954))
|
||||
- Warn on empty index directory ([#13940](https://github.com/astral-sh/uv/pull/13940))
|
||||
- Publish to DockerHub ([#14088](https://github.com/astral-sh/uv/pull/14088))
|
||||
|
||||
### Performance
|
||||
|
||||
- Make cold resolves about 10% faster ([#14035](https://github.com/astral-sh/uv/pull/14035))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Don't use walrus operator in interpreter query script
|
||||
([#14108](https://github.com/astral-sh/uv/pull/14108))
|
||||
- Fix handling of changes to `requires-python`
|
||||
([#14076](https://github.com/astral-sh/uv/pull/14076))
|
||||
- Fix implied `platform_machine` marker for `win_amd64` platform tag
|
||||
([#14041](https://github.com/astral-sh/uv/pull/14041))
|
||||
- Only update existing symlink directories on preview uninstall
|
||||
([#14179](https://github.com/astral-sh/uv/pull/14179))
|
||||
- Serialize Python requests for tools as canonicalized strings
|
||||
([#14109](https://github.com/astral-sh/uv/pull/14109))
|
||||
- Support netrc and same-origin credential propagation on index redirects
|
||||
([#14126](https://github.com/astral-sh/uv/pull/14126))
|
||||
- Support reading `dependency-groups` from pyproject.tomls with no `[project]`
|
||||
([#13742](https://github.com/astral-sh/uv/pull/13742))
|
||||
- Handle an existing shebang in `uv init --script`
|
||||
([#14141](https://github.com/astral-sh/uv/pull/14141))
|
||||
- Prevent concurrent updates of the environment in `uv run`
|
||||
([#14153](https://github.com/astral-sh/uv/pull/14153))
|
||||
- Filter managed Python distributions by platform before querying when included in request
|
||||
([#13936](https://github.com/astral-sh/uv/pull/13936))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Replace cuda124 with cuda128 ([#14168](https://github.com/astral-sh/uv/pull/14168))
|
||||
- Document the way member sources shadow workspace sources
|
||||
([#14136](https://github.com/astral-sh/uv/pull/14136))
|
||||
- Sync documented PyTorch integration index for CUDA and ROCm versions from PyTorch website
|
||||
([#14100](https://github.com/astral-sh/uv/pull/14100))
|
||||
|
||||
## 0.7.15
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Consistently use `Ordering::Relaxed` for standalone atomic use cases
|
||||
([#14190](https://github.com/astral-sh/uv/pull/14190))
|
||||
- Warn on ambiguous relative paths for `--index`
|
||||
([#14152](https://github.com/astral-sh/uv/pull/14152))
|
||||
- Skip GitHub fast path when rate-limited ([#13033](https://github.com/astral-sh/uv/pull/13033))
|
||||
- Preserve newlines in `schema.json` descriptions
|
||||
([#13693](https://github.com/astral-sh/uv/pull/13693))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add check for using minor version link when creating a venv on Windows
|
||||
([#14252](https://github.com/astral-sh/uv/pull/14252))
|
||||
- Strip query parameters when parsing source URL
|
||||
([#14224](https://github.com/astral-sh/uv/pull/14224))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a link to PyPI FAQ to clarify what per-project token is
|
||||
([#14242](https://github.com/astral-sh/uv/pull/14242))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Allow symlinks in the build backend ([#14212](https://github.com/astral-sh/uv/pull/14212))
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b3
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250626)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Include path or URL when failing to convert in lockfile
|
||||
([#14292](https://github.com/astral-sh/uv/pull/14292))
|
||||
- Warn when `~=` is used as a Python version specifier without a patch version
|
||||
([#14008](https://github.com/astral-sh/uv/pull/14008))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ensure preview default Python installs are upgradeable
|
||||
([#14261](https://github.com/astral-sh/uv/pull/14261))
|
||||
|
||||
### Performance
|
||||
|
||||
- Share workspace cache between lock and sync operations
|
||||
([#14321](https://github.com/astral-sh/uv/pull/14321))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow local indexes to reference remote files
|
||||
([#14294](https://github.com/astral-sh/uv/pull/14294))
|
||||
- Avoid rendering desugared prefix matches in error messages
|
||||
([#14195](https://github.com/astral-sh/uv/pull/14195))
|
||||
- Avoid using path URL for workspace Git dependencies in `requirements.txt`
|
||||
([#14288](https://github.com/astral-sh/uv/pull/14288))
|
||||
- Normalize index URLs to remove trailing slash
|
||||
([#14245](https://github.com/astral-sh/uv/pull/14245))
|
||||
- Respect URL-encoded credentials in redirect location
|
||||
([#14315](https://github.com/astral-sh/uv/pull/14315))
|
||||
- Lock the source tree when running setuptools, to protect concurrent builds
|
||||
([#14174](https://github.com/astral-sh/uv/pull/14174))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Note that GCP Artifact Registry download URLs must have `/simple` component
|
||||
([#14251](https://github.com/astral-sh/uv/pull/14251))
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply build constraints when resolving `--with` dependencies
|
||||
([#14340](https://github.com/astral-sh/uv/pull/14340))
|
||||
- Drop trailing slashes when converting index URL from URL
|
||||
([#14346](https://github.com/astral-sh/uv/pull/14346))
|
||||
- Ignore `UV_PYTHON_CACHE_DIR` when empty ([#14336](https://github.com/astral-sh/uv/pull/14336))
|
||||
- Fix error message ordering for `pyvenv.cfg` version conflict
|
||||
([#14329](https://github.com/astral-sh/uv/pull/14329))
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Python
|
||||
|
||||
- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14 These are not downloaded by default, since
|
||||
x86-64 Python has broader ecosystem support on Windows. However, they can be requested with
|
||||
`cpython-<version>-windows-aarch64`.
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry`
|
||||
([#14378](https://github.com/astral-sh/uv/pull/14378))
|
||||
- Reuse build (virtual) environments across resolution and installation
|
||||
([#14338](https://github.com/astral-sh/uv/pull/14338))
|
||||
- Improve trace message for cached Python interpreter query
|
||||
([#14328](https://github.com/astral-sh/uv/pull/14328))
|
||||
- Use parsed URLs for conflicting URL error message
|
||||
([#14380](https://github.com/astral-sh/uv/pull/14380))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ignore invalid build backend settings when not building
|
||||
([#14372](https://github.com/astral-sh/uv/pull/14372))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix equals-star and tilde-equals with `python_version` and `python_full_version`
|
||||
([#14271](https://github.com/astral-sh/uv/pull/14271))
|
||||
- Include the canonical path in the interpreter query cache key
|
||||
([#14331](https://github.com/astral-sh/uv/pull/14331))
|
||||
- Only drop build directories on program exit ([#14304](https://github.com/astral-sh/uv/pull/14304))
|
||||
- Error instead of panic on conflict between global and subcommand flags
|
||||
([#14368](https://github.com/astral-sh/uv/pull/14368))
|
||||
- Consistently normalize trailing slashes on URLs with no path segments
|
||||
([#14349](https://github.com/astral-sh/uv/pull/14349))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions for publishing to JFrog's Artifactory
|
||||
([#14253](https://github.com/astral-sh/uv/pull/14253))
|
||||
- Edits to the build backend documentation ([#14376](https://github.com/astral-sh/uv/pull/14376))
|
||||
|
||||
## 0.7.19
|
||||
|
||||
The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and
|
||||
considered ready for production use.
|
||||
|
||||
The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with
|
||||
the goal of requiring zero configuration for most users, but provides flexible configuration to
|
||||
accommodate most Python project structures. It integrates tightly with uv, to improve messaging and
|
||||
user experience. It validates project metadata and structures, preventing common mistakes. And,
|
||||
finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with
|
||||
other build backends.
|
||||
|
||||
To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section
|
||||
in your `pyproject.toml`:
|
||||
|
||||
```toml
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will
|
||||
remain compatible with all standards-compliant build backends.
|
||||
|
||||
### Python
|
||||
|
||||
- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Ignore Python patch version for `--universal` pip compile
|
||||
([#14405](https://github.com/astral-sh/uv/pull/14405))
|
||||
- Update the tilde version specifier warning to include more context
|
||||
([#14335](https://github.com/astral-sh/uv/pull/14335))
|
||||
- Clarify behavior and hint on tool install when no executables are available
|
||||
([#14423](https://github.com/astral-sh/uv/pull/14423))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make project and interpreter lock acquisition non-fatal
|
||||
([#14404](https://github.com/astral-sh/uv/pull/14404))
|
||||
- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects
|
||||
([#14403](https://github.com/astral-sh/uv/pull/14403))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a migration guide from pip to uv projects
|
||||
([#12382](https://github.com/astral-sh/uv/pull/12382))
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b4
|
||||
- Add zstd support to Python 3.14 on Unix (it already was available on Windows)
|
||||
- Add PyPy 7.3.20 (for Python 3.11.13)
|
||||
|
||||
See the [PyPy](https://pypy.org/posts/2025/07/pypy-v7320-release.html) and
|
||||
[`python-build-standalone`](https://github.com/astral-sh/python-build-standalone/releases/tag/20250708)
|
||||
release notes for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--workspace` flag to `uv add` ([#14496](https://github.com/astral-sh/uv/pull/14496))
|
||||
- Add auto-detection for Intel GPUs ([#14386](https://github.com/astral-sh/uv/pull/14386))
|
||||
- Drop trailing arguments when writing shebangs
|
||||
([#14519](https://github.com/astral-sh/uv/pull/14519))
|
||||
- Add debug message when skipping Python downloads
|
||||
([#14509](https://github.com/astral-sh/uv/pull/14509))
|
||||
- Add support for declaring multiple modules in namespace packages
|
||||
([#14460](https://github.com/astral-sh/uv/pull/14460))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert normalization of trailing slashes on index URLs
|
||||
([#14511](https://github.com/astral-sh/uv/pull/14511))
|
||||
- Fix forced resolution with all extras in `uv version`
|
||||
([#14434](https://github.com/astral-sh/uv/pull/14434))
|
||||
- Fix handling of pre-releases in preferences ([#14498](https://github.com/astral-sh/uv/pull/14498))
|
||||
- Remove transparent variants in `uv-extract` to enable retries
|
||||
([#14450](https://github.com/astral-sh/uv/pull/14450))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Add method to get packages involved in a `NoSolutionError`
|
||||
([#14457](https://github.com/astral-sh/uv/pull/14457))
|
||||
- Make `ErrorTree` for `NoSolutionError` public
|
||||
([#14444](https://github.com/astral-sh/uv/pull/14444))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Finish incomplete sentence in pip migration guide
|
||||
([#14432](https://github.com/astral-sh/uv/pull/14432))
|
||||
- Remove `cache-dependency-glob` examples for `setup-uv`
|
||||
([#14493](https://github.com/astral-sh/uv/pull/14493))
|
||||
- Remove `uv pip sync` suggestion with `pyproject.toml`
|
||||
([#14510](https://github.com/astral-sh/uv/pull/14510))
|
||||
- Update documentation for GitHub to use `setup-uv@v6`
|
||||
([#14490](https://github.com/astral-sh/uv/pull/14490))
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Python
|
||||
|
||||
- Restore the SQLite `fts4`, `fts5`, `rtree`, and `geopoly` extensions on macOS and Linux
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250712)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--python-platform` to `uv sync` ([#14320](https://github.com/astral-sh/uv/pull/14320))
|
||||
- Support pre-releases in `uv version --bump` ([#13578](https://github.com/astral-sh/uv/pull/13578))
|
||||
- Add `-w` shorthand for `--with` ([#14530](https://github.com/astral-sh/uv/pull/14530))
|
||||
- Add an exception handler on Windows to display information on crash
|
||||
([#14582](https://github.com/astral-sh/uv/pull/14582))
|
||||
- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522))
|
||||
- Add `UV_HTTP_RETRIES` to customize retry counts
|
||||
([#14544](https://github.com/astral-sh/uv/pull/14544))
|
||||
- Follow leaf symlinks matched by globs in `cache-key`
|
||||
([#13438](https://github.com/astral-sh/uv/pull/13438))
|
||||
- Support parent path components (`..`) in globs in `cache-key`
|
||||
([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
- Improve `cache-key` performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `uv sync --output-format json` ([#13689](https://github.com/astral-sh/uv/pull/13689))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python`
|
||||
([#14606](https://github.com/astral-sh/uv/pull/14606))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document how to nest dependency groups with `include-group`
|
||||
([#14539](https://github.com/astral-sh/uv/pull/14539))
|
||||
- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554))
|
||||
- Update CONTRIBUTING.md with instructions to format Markdown files via Docker
|
||||
([#14246](https://github.com/astral-sh/uv/pull/14246))
|
||||
- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533))
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Python
|
||||
|
||||
- Upgrade GraalPy to 24.2.2
|
||||
|
||||
See the [GraalPy release notes](https://github.com/oracle/graalpython/releases/tag/graal-24.2.2) for
|
||||
more details.
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_COMPILE_BYTECODE_TIMEOUT` environment variable
|
||||
([#14369](https://github.com/astral-sh/uv/pull/14369))
|
||||
- Allow users to override index `cache-control` headers
|
||||
([#14620](https://github.com/astral-sh/uv/pull/14620))
|
||||
- Add `UV_LIBC` to override libc selection in multi-libc environment
|
||||
([#14646](https://github.com/astral-sh/uv/pull/14646))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `--all-arches` when paired with `--only-downloads`
|
||||
([#14629](https://github.com/astral-sh/uv/pull/14629))
|
||||
- Skip Windows Python interpreters that return a broken MSIX package code
|
||||
([#14636](https://github.com/astral-sh/uv/pull/14636))
|
||||
- Warn on invalid `uv.toml` when provided via direct path
|
||||
([#14653](https://github.com/astral-sh/uv/pull/14653))
|
||||
- Improve async signal safety in Windows exception handler
|
||||
([#14619](https://github.com/astral-sh/uv/pull/14619))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Mention the `revision` in the lockfile versioning doc
|
||||
([#14634](https://github.com/astral-sh/uv/pull/14634))
|
||||
- Move "Conflicting dependencies" to the "Resolution" page
|
||||
([#14633](https://github.com/astral-sh/uv/pull/14633))
|
||||
- Rename "Dependency specifiers" section to exclude PEP 508 reference
|
||||
([#14631](https://github.com/astral-sh/uv/pull/14631))
|
||||
- Suggest `uv cache clean` prior to `--reinstall`
|
||||
([#14659](https://github.com/astral-sh/uv/pull/14659))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Make preview Python registration on Windows non-fatal
|
||||
([#14614](https://github.com/astral-sh/uv/pull/14614))
|
||||
- Update preview installation of Python executables to be non-fatal
|
||||
([#14612](https://github.com/astral-sh/uv/pull/14612))
|
||||
- Add `uv python update-shell` ([#14627](https://github.com/astral-sh/uv/pull/14627))
|
||||
1108
changelogs/0.8.x.md
1108
changelogs/0.8.x.md
File diff suppressed because it is too large
Load Diff
27
clippy.toml
27
clippy.toml
|
|
@ -8,7 +8,6 @@ doc-valid-idents = [
|
|||
"PyTorch",
|
||||
"ROCm",
|
||||
"XPU",
|
||||
"PowerShell",
|
||||
".." # Include the defaults
|
||||
]
|
||||
|
||||
|
|
@ -17,11 +16,6 @@ disallowed-types = [
|
|||
"std::fs::File",
|
||||
"std::fs::OpenOptions",
|
||||
"std::fs::ReadDir",
|
||||
"tokio::fs::DirBuilder",
|
||||
"tokio::fs::DirEntry",
|
||||
"tokio::fs::File",
|
||||
"tokio::fs::OpenOptions",
|
||||
"tokio::fs::ReadDir",
|
||||
]
|
||||
|
||||
disallowed-methods = [
|
||||
|
|
@ -43,28 +37,7 @@ disallowed-methods = [
|
|||
"std::fs::soft_link",
|
||||
"std::fs::symlink_metadata",
|
||||
"std::fs::write",
|
||||
"tokio::fs::canonicalize",
|
||||
"tokio::fs::copy",
|
||||
"tokio::fs::create_dir",
|
||||
"tokio::fs::create_dir_all",
|
||||
"tokio::fs::hard_link",
|
||||
"tokio::fs::metadata",
|
||||
"tokio::fs::read",
|
||||
"tokio::fs::read_dir",
|
||||
"tokio::fs::read_link",
|
||||
"tokio::fs::read_to_string",
|
||||
"tokio::fs::remove_dir",
|
||||
"tokio::fs::remove_dir_all",
|
||||
"tokio::fs::remove_file",
|
||||
"tokio::fs::rename",
|
||||
"tokio::fs::set_permissions",
|
||||
"tokio::fs::symlink_metadata",
|
||||
"tokio::fs::try_exists",
|
||||
"tokio::fs::write",
|
||||
{ path = "std::os::unix::fs::symlink", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_dir", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_file", allow-invalid = true },
|
||||
{ path = "tokio::fs::symlink", allow-invalid = true },
|
||||
{ path = "tokio::fs::symlink_dir", allow-invalid = true },
|
||||
{ path = "tokio::fs::symlink_file", allow-invalid = true },
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,13 +1,7 @@
|
|||
[package]
|
||||
name = "uv-auth"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
@ -16,44 +10,31 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-cache-key = { workspace = true }
|
||||
uv-fs = { workspace = true }
|
||||
uv-keyring = { workspace = true, features = ["apple-native", "secret-service", "windows-native"] }
|
||||
uv-once-map = { workspace = true }
|
||||
uv-preview = { workspace = true }
|
||||
uv-redacted = { workspace = true }
|
||||
uv-small-str = { workspace = true }
|
||||
uv-state = { workspace = true }
|
||||
uv-static = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
arcstr = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
etcetera = { workspace = true }
|
||||
fs-err = { workspace = true, features = ["tokio"] }
|
||||
futures = { workspace = true }
|
||||
http = { workspace = true }
|
||||
jiff = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
reqsign = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
reqwest-middleware = { workspace = true }
|
||||
rust-netrc = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true }
|
||||
insta = { version = "1.40.0" }
|
||||
tempfile = { workspace = true }
|
||||
test-log = { workspace = true }
|
||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||
tokio = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-auth
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-auth).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
/// An encoded JWT access token.
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct AccessToken(String);
|
||||
|
||||
impl AccessToken {
|
||||
/// Return the [`AccessToken`] as a vector of bytes.
|
||||
pub fn into_bytes(self) -> Vec<u8> {
|
||||
self.0.into_bytes()
|
||||
}
|
||||
|
||||
/// Return the [`AccessToken`] as a string slice.
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for AccessToken {
|
||||
fn from(value: String) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for AccessToken {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
self.0.as_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for AccessToken {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "****")
|
||||
}
|
||||
}
|
||||
|
|
@ -11,8 +11,8 @@ use url::Url;
|
|||
use uv_once_map::OnceMap;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::credentials::{Authentication, Username};
|
||||
use crate::{Credentials, Realm};
|
||||
use crate::Realm;
|
||||
use crate::credentials::{Credentials, Username};
|
||||
|
||||
type FxOnceMap<K, V> = OnceMap<K, V, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
|
|
@ -33,14 +33,13 @@ impl Display for FetchUrl {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)] // All internal types are redacted.
|
||||
pub struct CredentialsCache {
|
||||
/// A cache per realm and username
|
||||
realms: RwLock<FxHashMap<(Realm, Username), Arc<Authentication>>>,
|
||||
realms: RwLock<FxHashMap<(Realm, Username), Arc<Credentials>>>,
|
||||
/// A cache tracking the result of realm or index URL fetches from external services
|
||||
pub(crate) fetches: FxOnceMap<(FetchUrl, Username), Option<Arc<Authentication>>>,
|
||||
pub(crate) fetches: FxOnceMap<(FetchUrl, Username), Option<Arc<Credentials>>>,
|
||||
/// A cache per URL, uses a trie for efficient prefix queries.
|
||||
urls: RwLock<UrlTrie<Arc<Authentication>>>,
|
||||
urls: RwLock<UrlTrie>,
|
||||
}
|
||||
|
||||
impl Default for CredentialsCache {
|
||||
|
|
@ -59,33 +58,8 @@ impl CredentialsCache {
|
|||
}
|
||||
}
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
||||
if let Some(credentials) = Credentials::from_url(url) {
|
||||
trace!("Caching credentials for {url}");
|
||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
||||
trace!("Caching credentials for {url}");
|
||||
self.insert(url, Arc::new(Authentication::from(credentials)));
|
||||
}
|
||||
|
||||
/// Return the credentials that should be used for a realm and username, if any.
|
||||
pub(crate) fn get_realm(
|
||||
&self,
|
||||
realm: Realm,
|
||||
username: Username,
|
||||
) -> Option<Arc<Authentication>> {
|
||||
pub(crate) fn get_realm(&self, realm: Realm, username: Username) -> Option<Arc<Credentials>> {
|
||||
let realms = self.realms.read().unwrap();
|
||||
let given_username = username.is_some();
|
||||
let key = (realm, username);
|
||||
|
|
@ -119,7 +93,7 @@ impl CredentialsCache {
|
|||
/// Note we do not cache per username, but if a username is passed we will confirm that the
|
||||
/// cached credentials have a username equal to the provided one — otherwise `None` is returned.
|
||||
/// If multiple usernames are used per URL, the realm cache should be queried instead.
|
||||
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Authentication>> {
|
||||
pub(crate) fn get_url(&self, url: &Url, username: &Username) -> Option<Arc<Credentials>> {
|
||||
let urls = self.urls.read().unwrap();
|
||||
let credentials = urls.get(url);
|
||||
if let Some(credentials) = credentials {
|
||||
|
|
@ -138,7 +112,7 @@ impl CredentialsCache {
|
|||
}
|
||||
|
||||
/// Update the cache with the given credentials.
|
||||
pub(crate) fn insert(&self, url: &Url, credentials: Arc<Authentication>) {
|
||||
pub(crate) fn insert(&self, url: &Url, credentials: Arc<Credentials>) {
|
||||
// Do not cache empty credentials
|
||||
if credentials.is_empty() {
|
||||
return;
|
||||
|
|
@ -165,8 +139,8 @@ impl CredentialsCache {
|
|||
fn insert_realm(
|
||||
&self,
|
||||
key: (Realm, Username),
|
||||
credentials: &Arc<Authentication>,
|
||||
) -> Option<Arc<Authentication>> {
|
||||
credentials: &Arc<Credentials>,
|
||||
) -> Option<Arc<Credentials>> {
|
||||
// Do not cache empty credentials
|
||||
if credentials.is_empty() {
|
||||
return None;
|
||||
|
|
@ -174,8 +148,8 @@ impl CredentialsCache {
|
|||
|
||||
let mut realms = self.realms.write().unwrap();
|
||||
|
||||
// Always replace existing entries if we have a password or token
|
||||
if credentials.is_authenticated() {
|
||||
// Always replace existing entries if we have a password
|
||||
if credentials.password().is_some() {
|
||||
return realms.insert(key, credentials.clone());
|
||||
}
|
||||
|
||||
|
|
@ -192,33 +166,24 @@ impl CredentialsCache {
|
|||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct UrlTrie<T> {
|
||||
states: Vec<TrieState<T>>,
|
||||
struct UrlTrie {
|
||||
states: Vec<TrieState>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct TrieState<T> {
|
||||
#[derive(Debug, Default)]
|
||||
struct TrieState {
|
||||
children: Vec<(String, usize)>,
|
||||
value: Option<T>,
|
||||
value: Option<Arc<Credentials>>,
|
||||
}
|
||||
|
||||
impl<T> Default for TrieState<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
children: vec![],
|
||||
value: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> UrlTrie<T> {
|
||||
fn new() -> Self {
|
||||
let mut trie = Self { states: vec![] };
|
||||
impl UrlTrie {
|
||||
fn new() -> UrlTrie {
|
||||
let mut trie = UrlTrie { states: vec![] };
|
||||
trie.alloc();
|
||||
trie
|
||||
}
|
||||
|
||||
fn get(&self, url: &Url) -> Option<&T> {
|
||||
fn get(&self, url: &Url) -> Option<&Arc<Credentials>> {
|
||||
let mut state = 0;
|
||||
let realm = Realm::from(url).to_string();
|
||||
for component in [realm.as_str()]
|
||||
|
|
@ -233,7 +198,7 @@ impl<T> UrlTrie<T> {
|
|||
self.states[state].value.as_ref()
|
||||
}
|
||||
|
||||
fn insert(&mut self, url: &Url, value: T) {
|
||||
fn insert(&mut self, url: &Url, value: Arc<Credentials>) {
|
||||
let mut state = 0;
|
||||
let realm = Realm::from(url).to_string();
|
||||
for component in [realm.as_str()]
|
||||
|
|
@ -261,7 +226,7 @@ impl<T> UrlTrie<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T> TrieState<T> {
|
||||
impl TrieState {
|
||||
fn get(&self, component: &str) -> Option<usize> {
|
||||
let i = self.index(component).ok()?;
|
||||
Some(self.children[i].1)
|
||||
|
|
@ -295,21 +260,28 @@ impl From<(Realm, Username)> for RealmUsername {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::Credentials;
|
||||
use crate::credentials::Password;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_trie() {
|
||||
let credentials1 =
|
||||
Credentials::basic(Some("username1".to_string()), Some("password1".to_string()));
|
||||
let credentials2 =
|
||||
Credentials::basic(Some("username2".to_string()), Some("password2".to_string()));
|
||||
let credentials3 =
|
||||
Credentials::basic(Some("username3".to_string()), Some("password3".to_string()));
|
||||
let credentials4 =
|
||||
Credentials::basic(Some("username4".to_string()), Some("password4".to_string()));
|
||||
let credentials1 = Arc::new(Credentials::basic(
|
||||
Some("username1".to_string()),
|
||||
Some("password1".to_string()),
|
||||
));
|
||||
let credentials2 = Arc::new(Credentials::basic(
|
||||
Some("username2".to_string()),
|
||||
Some("password2".to_string()),
|
||||
));
|
||||
let credentials3 = Arc::new(Credentials::basic(
|
||||
Some("username3".to_string()),
|
||||
Some("password3".to_string()),
|
||||
));
|
||||
let credentials4 = Arc::new(Credentials::basic(
|
||||
Some("username4".to_string()),
|
||||
Some("password4".to_string()),
|
||||
));
|
||||
|
||||
let mut trie = UrlTrie::new();
|
||||
trie.insert(
|
||||
|
|
@ -367,10 +339,10 @@ mod tests {
|
|||
fn test_url_with_credentials() {
|
||||
let username = Username::new(Some(String::from("username")));
|
||||
let password = Password::new(String::from("password"));
|
||||
let credentials = Arc::new(Authentication::from(Credentials::Basic {
|
||||
let credentials = Arc::new(Credentials::Basic {
|
||||
username: username.clone(),
|
||||
password: Some(password),
|
||||
}));
|
||||
});
|
||||
let cache = CredentialsCache::default();
|
||||
// Insert with URL with credentials and get with redacted URL.
|
||||
let url = Url::parse("https://username:password@example.com/foobar").unwrap();
|
||||
|
|
|
|||
|
|
@ -1,41 +1,34 @@
|
|||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use std::str::FromStr;
|
||||
|
||||
use base64::prelude::BASE64_STANDARD;
|
||||
use base64::read::DecoderReader;
|
||||
use base64::write::EncoderWriter;
|
||||
use http::Uri;
|
||||
use std::borrow::Cow;
|
||||
use std::fmt;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use netrc::Netrc;
|
||||
use reqsign::aws::DefaultSigner;
|
||||
use reqwest::Request;
|
||||
use reqwest::header::HeaderValue;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io::Read;
|
||||
use std::io::Write;
|
||||
use url::Url;
|
||||
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_static::EnvVars;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Credentials {
|
||||
/// RFC 7617 HTTP Basic Authentication
|
||||
Basic {
|
||||
/// The username to use for authentication.
|
||||
username: Username,
|
||||
/// The password to use for authentication.
|
||||
password: Option<Password>,
|
||||
},
|
||||
/// RFC 6750 Bearer Token Authentication
|
||||
Bearer {
|
||||
/// The token to use for authentication.
|
||||
token: Token,
|
||||
token: Vec<u8>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Default)]
|
||||
pub struct Username(Option<String>);
|
||||
|
||||
impl Username {
|
||||
|
|
@ -76,8 +69,7 @@ impl From<Option<String>> for Username {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default)]
|
||||
pub struct Password(String);
|
||||
|
||||
impl Password {
|
||||
|
|
@ -85,15 +77,9 @@ impl Password {
|
|||
Self(password)
|
||||
}
|
||||
|
||||
/// Return the [`Password`] as a string slice.
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
|
||||
/// Convert the [`Password`] into its underlying [`String`].
|
||||
pub fn into_string(self) -> String {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Password {
|
||||
|
|
@ -102,36 +88,6 @@ impl fmt::Debug for Password {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash, Default, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct Token(Vec<u8>);
|
||||
|
||||
impl Token {
|
||||
pub fn new(token: Vec<u8>) -> Self {
|
||||
Self(token)
|
||||
}
|
||||
|
||||
/// Return the [`Token`] as a byte slice.
|
||||
pub fn as_slice(&self) -> &[u8] {
|
||||
self.0.as_slice()
|
||||
}
|
||||
|
||||
/// Convert the [`Token`] into its underlying [`Vec<u8>`].
|
||||
pub fn into_bytes(self) -> Vec<u8> {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Return whether the [`Token`] is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Token {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "****")
|
||||
}
|
||||
}
|
||||
impl Credentials {
|
||||
/// Create a set of HTTP Basic Authentication credentials.
|
||||
#[allow(dead_code)]
|
||||
|
|
@ -145,9 +101,7 @@ impl Credentials {
|
|||
/// Create a set of Bearer Authentication credentials.
|
||||
#[allow(dead_code)]
|
||||
pub fn bearer(token: Vec<u8>) -> Self {
|
||||
Self::Bearer {
|
||||
token: Token::new(token),
|
||||
}
|
||||
Self::Bearer { token }
|
||||
}
|
||||
|
||||
pub fn username(&self) -> Option<&str> {
|
||||
|
|
@ -178,16 +132,6 @@ impl Credentials {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn is_authenticated(&self) -> bool {
|
||||
match self {
|
||||
Self::Basic {
|
||||
username: _,
|
||||
password,
|
||||
} => password.is_some(),
|
||||
Self::Bearer { token } => !token.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Self::Basic { username, password } => username.is_none() && password.is_none(),
|
||||
|
|
@ -214,7 +158,7 @@ impl Credentials {
|
|||
return None;
|
||||
}
|
||||
|
||||
Some(Self::Basic {
|
||||
Some(Credentials::Basic {
|
||||
username: Username::new(Some(entry.login.clone())),
|
||||
password: Some(Password(entry.password.clone())),
|
||||
})
|
||||
|
|
@ -318,7 +262,7 @@ impl Credentials {
|
|||
// Parse a `Bearer` authentication header.
|
||||
if let Some(token) = header.as_bytes().strip_prefix(b"Bearer ") {
|
||||
return Some(Self::Bearer {
|
||||
token: Token::new(token.to_vec()),
|
||||
token: token.to_vec(),
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -382,127 +326,6 @@ impl Credentials {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum Authentication {
|
||||
/// HTTP Basic or Bearer Authentication credentials.
|
||||
Credentials(Credentials),
|
||||
|
||||
/// AWS Signature Version 4 signing.
|
||||
Signer(DefaultSigner),
|
||||
}
|
||||
|
||||
impl PartialEq for Authentication {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(Self::Credentials(a), Self::Credentials(b)) => a == b,
|
||||
(Self::Signer(..), Self::Signer(..)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Authentication {}
|
||||
|
||||
impl From<Credentials> for Authentication {
|
||||
fn from(credentials: Credentials) -> Self {
|
||||
Self::Credentials(credentials)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DefaultSigner> for Authentication {
|
||||
fn from(signer: DefaultSigner) -> Self {
|
||||
Self::Signer(signer)
|
||||
}
|
||||
}
|
||||
|
||||
impl Authentication {
|
||||
/// Return the password used for authentication, if any.
|
||||
pub(crate) fn password(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::Credentials(credentials) => credentials.password(),
|
||||
Self::Signer(..) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the username used for authentication, if any.
|
||||
pub(crate) fn username(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::Credentials(credentials) => credentials.username(),
|
||||
Self::Signer(..) => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the username used for authentication, if any.
|
||||
pub(crate) fn as_username(&self) -> Cow<'_, Username> {
|
||||
match self {
|
||||
Self::Credentials(credentials) => credentials.as_username(),
|
||||
Self::Signer(..) => Cow::Owned(Username::none()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the username used for authentication, if any.
|
||||
pub(crate) fn to_username(&self) -> Username {
|
||||
match self {
|
||||
Self::Credentials(credentials) => credentials.to_username(),
|
||||
Self::Signer(..) => Username::none(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return `true` if the object contains a means of authenticating.
|
||||
pub(crate) fn is_authenticated(&self) -> bool {
|
||||
match self {
|
||||
Self::Credentials(credentials) => credentials.is_authenticated(),
|
||||
Self::Signer(..) => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return `true` if the object contains no credentials.
|
||||
pub(crate) fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
Self::Credentials(credentials) => credentials.is_empty(),
|
||||
Self::Signer(..) => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Apply the authentication to the given request.
|
||||
///
|
||||
/// Any existing credentials will be overridden.
|
||||
#[must_use]
|
||||
pub(crate) async fn authenticate(&self, mut request: Request) -> Request {
|
||||
match self {
|
||||
Self::Credentials(credentials) => credentials.authenticate(request),
|
||||
Self::Signer(signer) => {
|
||||
// Build an `http::Request` from the `reqwest::Request`.
|
||||
// SAFETY: If we have a valid `reqwest::Request`, we expect (e.g.) the URL to be valid.
|
||||
let uri = Uri::from_str(request.url().as_str()).unwrap();
|
||||
let mut http_req = http::Request::builder()
|
||||
.method(request.method().clone())
|
||||
.uri(uri)
|
||||
.body(())
|
||||
.unwrap();
|
||||
*http_req.headers_mut() = request.headers().clone();
|
||||
|
||||
// Sign the parts.
|
||||
let (mut parts, ()) = http_req.into_parts();
|
||||
signer
|
||||
.sign(&mut parts, None)
|
||||
.await
|
||||
.expect("AWS signing should succeed");
|
||||
|
||||
// Copy over the signed headers.
|
||||
request.headers_mut().extend(parts.headers);
|
||||
|
||||
// Copy over the signed path and query, if any.
|
||||
if let Some(path_and_query) = parts.uri.path_and_query() {
|
||||
request.url_mut().set_path(path_and_query.path());
|
||||
request.url_mut().set_query(path_and_query.query());
|
||||
}
|
||||
request
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use insta::assert_debug_snapshot;
|
||||
|
|
@ -623,15 +446,4 @@ mod tests {
|
|||
"Basic { username: Username(Some(\"user\")), password: Some(****) }"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bearer_token_obfuscation() {
|
||||
let token = "super_secret_token";
|
||||
let credentials = Credentials::bearer(token.into());
|
||||
let debugged = format!("{credentials:?}");
|
||||
assert!(
|
||||
!debugged.contains(token),
|
||||
"Token should be obfuscated in Debug impl: {debugged}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,9 +42,9 @@ pub enum AuthPolicy {
|
|||
impl Display for AuthPolicy {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Self::Auto => write!(f, "auto"),
|
||||
Self::Always => write!(f, "always"),
|
||||
Self::Never => write!(f, "never"),
|
||||
AuthPolicy::Auto => write!(f, "auto"),
|
||||
AuthPolicy::Always => write!(f, "always"),
|
||||
AuthPolicy::Never => write!(f, "never"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -95,9 +95,9 @@ impl Indexes {
|
|||
index_urls
|
||||
}
|
||||
|
||||
/// Get the index for a URL if one exists.
|
||||
pub fn index_for(&self, url: &Url) -> Option<&Index> {
|
||||
self.find_prefix_index(url)
|
||||
/// Get the index URL prefix for a URL if one exists.
|
||||
pub fn index_url_for(&self, url: &Url) -> Option<&DisplaySafeUrl> {
|
||||
self.find_prefix_index(url).map(|index| &index.url)
|
||||
}
|
||||
|
||||
/// Get the [`AuthPolicy`] for a URL.
|
||||
|
|
|
|||
|
|
@ -1,14 +1,11 @@
|
|||
use std::{io::Write, process::Stdio};
|
||||
use tokio::process::Command;
|
||||
use tracing::{debug, instrument, trace, warn};
|
||||
use tracing::{instrument, trace, warn};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
use crate::credentials::Credentials;
|
||||
|
||||
/// Service name prefix for storing credentials in a keyring.
|
||||
static UV_SERVICE_PREFIX: &str = "uv:";
|
||||
|
||||
/// A backend for retrieving credentials from a keyring.
|
||||
///
|
||||
/// See pip's implementation for reference
|
||||
|
|
@ -18,47 +15,15 @@ pub struct KeyringProvider {
|
|||
backend: KeyringProviderBackend,
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Keyring(#[from] uv_keyring::Error),
|
||||
|
||||
#[error("The '{0}' keyring provider does not support storing credentials")]
|
||||
StoreUnsupported(KeyringProviderBackend),
|
||||
|
||||
#[error("The '{0}' keyring provider does not support removing credentials")]
|
||||
RemoveUnsupported(KeyringProviderBackend),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum KeyringProviderBackend {
|
||||
/// Use a native system keyring integration for credentials.
|
||||
Native,
|
||||
/// Use the external `keyring` command for credentials.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum KeyringProviderBackend {
|
||||
/// Use the `keyring` command to fetch credentials.
|
||||
Subprocess,
|
||||
#[cfg(test)]
|
||||
Dummy(Vec<(String, &'static str, &'static str)>),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for KeyringProviderBackend {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Native => write!(f, "native"),
|
||||
Self::Subprocess => write!(f, "subprocess"),
|
||||
#[cfg(test)]
|
||||
Self::Dummy(_) => write!(f, "dummy"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl KeyringProvider {
|
||||
/// Create a new [`KeyringProvider::Native`].
|
||||
pub fn native() -> Self {
|
||||
Self {
|
||||
backend: KeyringProviderBackend::Native,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new [`KeyringProvider::Subprocess`].
|
||||
pub fn subprocess() -> Self {
|
||||
Self {
|
||||
|
|
@ -66,124 +31,6 @@ impl KeyringProvider {
|
|||
}
|
||||
}
|
||||
|
||||
/// Store credentials for the given [`DisplaySafeUrl`] to the keyring.
|
||||
///
|
||||
/// Only [`KeyringProviderBackend::Native`] is supported at this time.
|
||||
#[instrument(skip_all, fields(url = % url.to_string(), username))]
|
||||
pub async fn store(
|
||||
&self,
|
||||
url: &DisplaySafeUrl,
|
||||
credentials: &Credentials,
|
||||
) -> Result<bool, Error> {
|
||||
let Some(username) = credentials.username() else {
|
||||
trace!("Unable to store credentials in keyring for {url} due to missing username");
|
||||
return Ok(false);
|
||||
};
|
||||
let Some(password) = credentials.password() else {
|
||||
trace!("Unable to store credentials in keyring for {url} due to missing password");
|
||||
return Ok(false);
|
||||
};
|
||||
|
||||
// Ensure we strip credentials from the URL before storing
|
||||
let url = url.without_credentials();
|
||||
|
||||
// If there's no path, we'll perform a host-level login
|
||||
let target = if let Some(host) = url.host_str().filter(|_| !url.path().is_empty()) {
|
||||
let mut target = String::new();
|
||||
if url.scheme() != "https" {
|
||||
target.push_str(url.scheme());
|
||||
target.push_str("://");
|
||||
}
|
||||
target.push_str(host);
|
||||
if let Some(port) = url.port() {
|
||||
target.push(':');
|
||||
target.push_str(&port.to_string());
|
||||
}
|
||||
target
|
||||
} else {
|
||||
url.to_string()
|
||||
};
|
||||
|
||||
match &self.backend {
|
||||
KeyringProviderBackend::Native => {
|
||||
self.store_native(&target, username, password).await?;
|
||||
Ok(true)
|
||||
}
|
||||
KeyringProviderBackend::Subprocess => {
|
||||
Err(Error::StoreUnsupported(self.backend.clone()))
|
||||
}
|
||||
#[cfg(test)]
|
||||
KeyringProviderBackend::Dummy(_) => Err(Error::StoreUnsupported(self.backend.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Store credentials to the system keyring.
|
||||
#[instrument(skip(self))]
|
||||
async fn store_native(
|
||||
&self,
|
||||
service: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<(), Error> {
|
||||
let prefixed_service = format!("{UV_SERVICE_PREFIX}{service}");
|
||||
let entry = uv_keyring::Entry::new(&prefixed_service, username)?;
|
||||
entry.set_password(password).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove credentials for the given [`DisplaySafeUrl`] and username from the keyring.
|
||||
///
|
||||
/// Only [`KeyringProviderBackend::Native`] is supported at this time.
|
||||
#[instrument(skip_all, fields(url = % url.to_string(), username))]
|
||||
pub async fn remove(&self, url: &DisplaySafeUrl, username: &str) -> Result<(), Error> {
|
||||
// Ensure we strip credentials from the URL before storing
|
||||
let url = url.without_credentials();
|
||||
|
||||
// If there's no path, we'll perform a host-level login
|
||||
let target = if let Some(host) = url.host_str().filter(|_| !url.path().is_empty()) {
|
||||
let mut target = String::new();
|
||||
if url.scheme() != "https" {
|
||||
target.push_str(url.scheme());
|
||||
target.push_str("://");
|
||||
}
|
||||
target.push_str(host);
|
||||
if let Some(port) = url.port() {
|
||||
target.push(':');
|
||||
target.push_str(&port.to_string());
|
||||
}
|
||||
target
|
||||
} else {
|
||||
url.to_string()
|
||||
};
|
||||
|
||||
match &self.backend {
|
||||
KeyringProviderBackend::Native => {
|
||||
self.remove_native(&target, username).await?;
|
||||
Ok(())
|
||||
}
|
||||
KeyringProviderBackend::Subprocess => {
|
||||
Err(Error::RemoveUnsupported(self.backend.clone()))
|
||||
}
|
||||
#[cfg(test)]
|
||||
KeyringProviderBackend::Dummy(_) => Err(Error::RemoveUnsupported(self.backend.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove credentials from the system keyring for the given `service_name`/`username`
|
||||
/// pair.
|
||||
#[instrument(skip(self))]
|
||||
async fn remove_native(
|
||||
&self,
|
||||
service_name: &str,
|
||||
username: &str,
|
||||
) -> Result<(), uv_keyring::Error> {
|
||||
let prefixed_service = format!("{UV_SERVICE_PREFIX}{service_name}");
|
||||
let entry = uv_keyring::Entry::new(&prefixed_service, username)?;
|
||||
entry.delete_credential().await?;
|
||||
trace!("Removed credentials for {username}@{service_name} from system keyring");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Fetch credentials for the given [`Url`] from the keyring.
|
||||
///
|
||||
/// Returns [`None`] if no password was found for the username or if any errors
|
||||
|
|
@ -193,11 +40,11 @@ impl KeyringProvider {
|
|||
// Validate the request
|
||||
debug_assert!(
|
||||
url.host_str().is_some(),
|
||||
"Should only use keyring for URLs with host"
|
||||
"Should only use keyring for urls with host"
|
||||
);
|
||||
debug_assert!(
|
||||
url.password().is_none(),
|
||||
"Should only use keyring for URLs without a password"
|
||||
"Should only use keyring for urls without a password"
|
||||
);
|
||||
debug_assert!(
|
||||
!username.map(str::is_empty).unwrap_or(false),
|
||||
|
|
@ -208,7 +55,6 @@ impl KeyringProvider {
|
|||
// <https://github.com/pypa/pip/blob/ae5fff36b0aad6e5e0037884927eaa29163c0611/src/pip/_internal/network/auth.py#L376C1-L379C14>
|
||||
trace!("Checking keyring for URL {url}");
|
||||
let mut credentials = match self.backend {
|
||||
KeyringProviderBackend::Native => self.fetch_native(url.as_str(), username).await,
|
||||
KeyringProviderBackend::Subprocess => {
|
||||
self.fetch_subprocess(url.as_str(), username).await
|
||||
}
|
||||
|
|
@ -226,7 +72,6 @@ impl KeyringProvider {
|
|||
};
|
||||
trace!("Checking keyring for host {host}");
|
||||
credentials = match self.backend {
|
||||
KeyringProviderBackend::Native => self.fetch_native(&host, username).await,
|
||||
KeyringProviderBackend::Subprocess => self.fetch_subprocess(&host, username).await,
|
||||
#[cfg(test)]
|
||||
KeyringProviderBackend::Dummy(ref store) => {
|
||||
|
|
@ -320,7 +165,7 @@ impl KeyringProvider {
|
|||
// N.B. We do not show the `service_name` here because we'll show the warning twice
|
||||
// otherwise, once for the URL and once for the realm.
|
||||
warn_user_once!(
|
||||
"Attempted to fetch credentials using the `keyring` command, but it does not support `--mode creds`; upgrade to `keyring>=v25.2.1` or provide a username"
|
||||
"Attempted to fetch credentials using the `keyring` command, but it does not support `--mode creds`; upgrade to `keyring>=v25.2.1` for support or provide a username"
|
||||
);
|
||||
} else if username.is_none() {
|
||||
// If we captured stderr, display it in case it's helpful to the user
|
||||
|
|
@ -330,31 +175,6 @@ impl KeyringProvider {
|
|||
}
|
||||
}
|
||||
|
||||
#[instrument(skip(self))]
|
||||
async fn fetch_native(
|
||||
&self,
|
||||
service: &str,
|
||||
username: Option<&str>,
|
||||
) -> Option<(String, String)> {
|
||||
let prefixed_service = format!("{UV_SERVICE_PREFIX}{service}");
|
||||
let username = username?;
|
||||
let Ok(entry) = uv_keyring::Entry::new(&prefixed_service, username) else {
|
||||
return None;
|
||||
};
|
||||
match entry.get_password().await {
|
||||
Ok(password) => return Some((username.to_string(), password)),
|
||||
Err(uv_keyring::Error::NoEntry) => {
|
||||
debug!("No entry found in system keyring for {service}");
|
||||
}
|
||||
Err(err) => {
|
||||
warn_user_once!(
|
||||
"Unable to fetch credentials for {service} from system keyring: {err}"
|
||||
);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn fetch_dummy(
|
||||
store: &Vec<(String, &'static str, &'static str)>,
|
||||
|
|
@ -404,13 +224,12 @@ mod tests {
|
|||
let url = Url::parse("file:/etc/bin/").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user"));
|
||||
if cfg!(debug_assertions) {
|
||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
||||
assert!(result.is_err());
|
||||
} else {
|
||||
assert_eq!(fetch.await, None);
|
||||
}
|
||||
let result = std::panic::AssertUnwindSafe(
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some("user")),
|
||||
)
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -418,13 +237,12 @@ mod tests {
|
|||
let url = Url::parse("https://user:password@example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
||||
if cfg!(debug_assertions) {
|
||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
||||
assert!(result.is_err());
|
||||
} else {
|
||||
assert_eq!(fetch.await, None);
|
||||
}
|
||||
let result = std::panic::AssertUnwindSafe(
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||
)
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
@ -432,13 +250,12 @@ mod tests {
|
|||
let url = Url::parse("https://example.com").unwrap();
|
||||
let keyring = KeyringProvider::empty();
|
||||
// Panics due to debug assertion; returns `None` in production
|
||||
let fetch = keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username()));
|
||||
if cfg!(debug_assertions) {
|
||||
let result = std::panic::AssertUnwindSafe(fetch).catch_unwind().await;
|
||||
assert!(result.is_err());
|
||||
} else {
|
||||
assert_eq!(fetch.await, None);
|
||||
}
|
||||
let result = std::panic::AssertUnwindSafe(
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(&url), Some(url.username())),
|
||||
)
|
||||
.catch_unwind()
|
||||
.await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
|||
|
|
@ -1,24 +1,47 @@
|
|||
pub use access_token::AccessToken;
|
||||
pub use cache::CredentialsCache;
|
||||
pub use credentials::{Credentials, Username};
|
||||
use std::sync::{Arc, LazyLock};
|
||||
|
||||
use tracing::trace;
|
||||
|
||||
use cache::CredentialsCache;
|
||||
pub use credentials::Credentials;
|
||||
pub use index::{AuthPolicy, Index, Indexes};
|
||||
pub use keyring::KeyringProvider;
|
||||
pub use middleware::AuthMiddleware;
|
||||
pub use pyx::{
|
||||
DEFAULT_TOLERANCE_SECS, PyxJwt, PyxOAuthTokens, PyxTokenStore, PyxTokens, TokenStoreError,
|
||||
};
|
||||
pub use realm::{Realm, RealmRef};
|
||||
pub use service::{Service, ServiceParseError};
|
||||
pub use store::{AuthBackend, AuthScheme, TextCredentialStore, TomlCredentialError};
|
||||
use realm::Realm;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
mod access_token;
|
||||
mod cache;
|
||||
mod credentials;
|
||||
mod index;
|
||||
mod keyring;
|
||||
mod middleware;
|
||||
mod providers;
|
||||
mod pyx;
|
||||
mod realm;
|
||||
mod service;
|
||||
mod store;
|
||||
|
||||
// TODO(zanieb): Consider passing a cache explicitly throughout
|
||||
|
||||
/// Global authentication cache for a uv invocation
|
||||
///
|
||||
/// This is used to share credentials across uv clients.
|
||||
pub(crate) static CREDENTIALS_CACHE: LazyLock<CredentialsCache> =
|
||||
LazyLock::new(CredentialsCache::default);
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials_from_url(url: &DisplaySafeUrl) -> bool {
|
||||
if let Some(credentials) = Credentials::from_url(url) {
|
||||
trace!("Caching credentials for {url}");
|
||||
CREDENTIALS_CACHE.insert(url, Arc::new(credentials));
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Populate the global authentication store with credentials on a URL, if there are any.
|
||||
///
|
||||
/// Returns `true` if the store was updated.
|
||||
pub fn store_credentials(url: &DisplaySafeUrl, credentials: Arc<Credentials>) {
|
||||
trace!("Caching credentials for {url}");
|
||||
CREDENTIALS_CACHE.insert(url, credentials);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,30 +4,17 @@ use anyhow::{anyhow, format_err};
|
|||
use http::{Extensions, StatusCode};
|
||||
use netrc::Netrc;
|
||||
use reqwest::{Request, Response};
|
||||
use reqwest_middleware::{ClientWithMiddleware, Error, Middleware, Next};
|
||||
use tokio::sync::Mutex;
|
||||
use reqwest_middleware::{Error, Middleware, Next};
|
||||
use tracing::{debug, trace, warn};
|
||||
|
||||
use uv_preview::{Preview, PreviewFeatures};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_static::EnvVars;
|
||||
use uv_warnings::owo_colors::OwoColorize;
|
||||
|
||||
use crate::credentials::Authentication;
|
||||
use crate::providers::{HuggingFaceProvider, S3EndpointProvider};
|
||||
use crate::pyx::{DEFAULT_TOLERANCE_SECS, PyxTokenStore};
|
||||
use crate::{
|
||||
AccessToken, CredentialsCache, KeyringProvider,
|
||||
CREDENTIALS_CACHE, CredentialsCache, KeyringProvider,
|
||||
cache::FetchUrl,
|
||||
credentials::{Credentials, Username},
|
||||
index::{AuthPolicy, Indexes},
|
||||
realm::Realm,
|
||||
};
|
||||
use crate::{Index, TextCredentialStore};
|
||||
|
||||
/// Cached check for whether we're running in Dependabot.
|
||||
static IS_DEPENDABOT: LazyLock<bool> =
|
||||
LazyLock::new(|| std::env::var(EnvVars::DEPENDABOT).is_ok_and(|value| value == "true"));
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
/// Strategy for loading netrc files.
|
||||
enum NetrcMode {
|
||||
|
|
@ -38,7 +25,7 @@ enum NetrcMode {
|
|||
|
||||
impl Default for NetrcMode {
|
||||
fn default() -> Self {
|
||||
Self::Automatic(LazyLock::new(|| match Netrc::new() {
|
||||
NetrcMode::Automatic(LazyLock::new(|| match Netrc::new() {
|
||||
Ok(netrc) => Some(netrc),
|
||||
Err(netrc::Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
debug!("No netrc file found");
|
||||
|
|
@ -56,135 +43,36 @@ impl NetrcMode {
|
|||
/// Get the parsed netrc file if enabled.
|
||||
fn get(&self) -> Option<&Netrc> {
|
||||
match self {
|
||||
Self::Automatic(lock) => lock.as_ref(),
|
||||
Self::Enabled(netrc) => Some(netrc),
|
||||
Self::Disabled => None,
|
||||
NetrcMode::Automatic(lock) => lock.as_ref(),
|
||||
NetrcMode::Enabled(netrc) => Some(netrc),
|
||||
NetrcMode::Disabled => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Strategy for loading text-based credential files.
|
||||
enum TextStoreMode {
|
||||
Automatic(tokio::sync::OnceCell<Option<TextCredentialStore>>),
|
||||
Enabled(TextCredentialStore),
|
||||
Disabled,
|
||||
}
|
||||
|
||||
impl Default for TextStoreMode {
|
||||
fn default() -> Self {
|
||||
Self::Automatic(tokio::sync::OnceCell::new())
|
||||
}
|
||||
}
|
||||
|
||||
impl TextStoreMode {
|
||||
async fn load_default_store() -> Option<TextCredentialStore> {
|
||||
let path = TextCredentialStore::default_file()
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to determine credentials file path: {}", err);
|
||||
})
|
||||
.ok()?;
|
||||
|
||||
match TextCredentialStore::read(&path).await {
|
||||
Ok((store, _lock)) => {
|
||||
debug!("Loaded credential file {}", path.display());
|
||||
Some(store)
|
||||
}
|
||||
Err(err)
|
||||
if err
|
||||
.as_io_error()
|
||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
||||
{
|
||||
debug!("No credentials file found at {}", path.display());
|
||||
None
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to load credentials from {}: {}",
|
||||
path.display(),
|
||||
err
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the parsed credential store, if enabled.
|
||||
async fn get(&self) -> Option<&TextCredentialStore> {
|
||||
match self {
|
||||
// TODO(zanieb): Reconsider this pattern. We're just mirroring the [`NetrcMode`]
|
||||
// implementation for now.
|
||||
Self::Automatic(lock) => lock.get_or_init(Self::load_default_store).await.as_ref(),
|
||||
Self::Enabled(store) => Some(store),
|
||||
Self::Disabled => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum TokenState {
|
||||
/// The token state has not yet been initialized from the store.
|
||||
Uninitialized,
|
||||
/// The token state has been initialized, and the store either returned tokens or `None` if
|
||||
/// the user has not yet authenticated.
|
||||
Initialized(Option<AccessToken>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum S3CredentialState {
|
||||
/// The S3 credential state has not yet been initialized.
|
||||
Uninitialized,
|
||||
/// The S3 credential state has been initialized, with either a signer or `None` if
|
||||
/// no S3 endpoint is configured.
|
||||
Initialized(Option<Arc<Authentication>>),
|
||||
}
|
||||
|
||||
/// A middleware that adds basic authentication to requests.
|
||||
///
|
||||
/// Uses a cache to propagate credentials from previously seen requests and
|
||||
/// fetches credentials from a netrc file, TOML file, and the keyring.
|
||||
/// fetches credentials from a netrc file and the keyring.
|
||||
pub struct AuthMiddleware {
|
||||
netrc: NetrcMode,
|
||||
text_store: TextStoreMode,
|
||||
keyring: Option<KeyringProvider>,
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
cache: Arc<CredentialsCache>,
|
||||
cache: Option<CredentialsCache>,
|
||||
/// Auth policies for specific URLs.
|
||||
indexes: Indexes,
|
||||
/// Set all endpoints as needing authentication. We never try to send an
|
||||
/// unauthenticated request, avoiding cloning an uncloneable request.
|
||||
only_authenticated: bool,
|
||||
/// The base client to use for requests within the middleware.
|
||||
base_client: Option<ClientWithMiddleware>,
|
||||
/// The pyx token store to use for persistent credentials.
|
||||
pyx_token_store: Option<PyxTokenStore>,
|
||||
/// Tokens to use for persistent credentials.
|
||||
pyx_token_state: Mutex<TokenState>,
|
||||
/// Cached S3 credentials to avoid running the credential helper multiple times.
|
||||
s3_credential_state: Mutex<S3CredentialState>,
|
||||
preview: Preview,
|
||||
}
|
||||
|
||||
impl Default for AuthMiddleware {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthMiddleware {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
netrc: NetrcMode::default(),
|
||||
text_store: TextStoreMode::default(),
|
||||
keyring: None,
|
||||
// TODO(konsti): There shouldn't be a credential cache without that in the initializer.
|
||||
cache: Arc::new(CredentialsCache::default()),
|
||||
cache: None,
|
||||
indexes: Indexes::new(),
|
||||
only_authenticated: false,
|
||||
base_client: None,
|
||||
pyx_token_store: None,
|
||||
pyx_token_state: Mutex::new(TokenState::Uninitialized),
|
||||
s3_credential_state: Mutex::new(S3CredentialState::Uninitialized),
|
||||
preview: Preview::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -201,19 +89,6 @@ impl AuthMiddleware {
|
|||
self
|
||||
}
|
||||
|
||||
/// Configure the text credential store to use.
|
||||
///
|
||||
/// `None` disables authentication via text store.
|
||||
#[must_use]
|
||||
pub fn with_text_store(mut self, store: Option<TextCredentialStore>) -> Self {
|
||||
self.text_store = if let Some(store) = store {
|
||||
TextStoreMode::Enabled(store)
|
||||
} else {
|
||||
TextStoreMode::Disabled
|
||||
};
|
||||
self
|
||||
}
|
||||
|
||||
/// Configure the [`KeyringProvider`] to use.
|
||||
#[must_use]
|
||||
pub fn with_keyring(mut self, keyring: Option<KeyringProvider>) -> Self {
|
||||
|
|
@ -221,24 +96,10 @@ impl AuthMiddleware {
|
|||
self
|
||||
}
|
||||
|
||||
/// Configure the [`Preview`] features to use.
|
||||
#[must_use]
|
||||
pub fn with_preview(mut self, preview: Preview) -> Self {
|
||||
self.preview = preview;
|
||||
self
|
||||
}
|
||||
|
||||
/// Configure the [`CredentialsCache`] to use.
|
||||
#[must_use]
|
||||
pub fn with_cache(mut self, cache: CredentialsCache) -> Self {
|
||||
self.cache = Arc::new(cache);
|
||||
self
|
||||
}
|
||||
|
||||
/// Configure the [`CredentialsCache`] to use from an existing [`Arc`].
|
||||
#[must_use]
|
||||
pub fn with_cache_arc(mut self, cache: Arc<CredentialsCache>) -> Self {
|
||||
self.cache = cache;
|
||||
self.cache = Some(cache);
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -257,23 +118,17 @@ impl AuthMiddleware {
|
|||
self
|
||||
}
|
||||
|
||||
/// Configure the [`ClientWithMiddleware`] to use for requests within the middleware.
|
||||
#[must_use]
|
||||
pub fn with_base_client(mut self, client: ClientWithMiddleware) -> Self {
|
||||
self.base_client = Some(client);
|
||||
self
|
||||
}
|
||||
|
||||
/// Configure the [`PyxTokenStore`] to use for persistent credentials.
|
||||
#[must_use]
|
||||
pub fn with_pyx_token_store(mut self, token_store: PyxTokenStore) -> Self {
|
||||
self.pyx_token_store = Some(token_store);
|
||||
self
|
||||
}
|
||||
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
/// Get the configured authentication store.
|
||||
///
|
||||
/// If not set, the global store is used.
|
||||
fn cache(&self) -> &CredentialsCache {
|
||||
&self.cache
|
||||
self.cache.as_ref().unwrap_or(&CREDENTIALS_CACHE)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for AuthMiddleware {
|
||||
fn default() -> Self {
|
||||
AuthMiddleware::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -322,16 +177,16 @@ impl Middleware for AuthMiddleware {
|
|||
next: Next<'_>,
|
||||
) -> reqwest_middleware::Result<Response> {
|
||||
// Check for credentials attached to the request already
|
||||
let request_credentials = Credentials::from_request(&request).map(Authentication::from);
|
||||
let request_credentials = Credentials::from_request(&request);
|
||||
|
||||
// In the middleware, existing credentials are already moved from the URL
|
||||
// to the headers so for display purposes we restore some information
|
||||
let url = tracing_url(&request, request_credentials.as_ref());
|
||||
let index = self.indexes.index_for(request.url());
|
||||
let maybe_index_url = self.indexes.index_url_for(request.url());
|
||||
let auth_policy = self.indexes.auth_policy_for(request.url());
|
||||
trace!("Handling request for {url} with authentication policy {auth_policy}");
|
||||
|
||||
let credentials: Option<Arc<Authentication>> = if matches!(auth_policy, AuthPolicy::Never) {
|
||||
let credentials: Option<Arc<Credentials>> = if matches!(auth_policy, AuthPolicy::Never) {
|
||||
None
|
||||
} else {
|
||||
if let Some(request_credentials) = request_credentials {
|
||||
|
|
@ -342,7 +197,7 @@ impl Middleware for AuthMiddleware {
|
|||
extensions,
|
||||
next,
|
||||
&url,
|
||||
index,
|
||||
maybe_index_url,
|
||||
auth_policy,
|
||||
)
|
||||
.await;
|
||||
|
|
@ -355,10 +210,10 @@ impl Middleware for AuthMiddleware {
|
|||
// making a failing request
|
||||
let credentials = self.cache().get_url(request.url(), &Username::none());
|
||||
if let Some(credentials) = credentials.as_ref() {
|
||||
request = credentials.authenticate(request).await;
|
||||
request = credentials.authenticate(request);
|
||||
|
||||
// If it's fully authenticated, finish the request
|
||||
if credentials.is_authenticated() {
|
||||
if credentials.password().is_some() {
|
||||
trace!("Request for {url} is fully authenticated");
|
||||
return self
|
||||
.complete_request(None, request, extensions, next, auth_policy)
|
||||
|
|
@ -375,24 +230,9 @@ impl Middleware for AuthMiddleware {
|
|||
.as_ref()
|
||||
.is_some_and(|credentials| credentials.username().is_some());
|
||||
|
||||
// Determine whether this is a "known" URL.
|
||||
let is_known_url = self
|
||||
.pyx_token_store
|
||||
.as_ref()
|
||||
.is_some_and(|token_store| token_store.is_known_url(request.url()));
|
||||
|
||||
let must_authenticate = self.only_authenticated
|
||||
|| (match auth_policy {
|
||||
AuthPolicy::Auto => is_known_url,
|
||||
AuthPolicy::Always => true,
|
||||
AuthPolicy::Never => false,
|
||||
}
|
||||
// Dependabot intercepts HTTP requests and injects credentials, which means that we
|
||||
// cannot eagerly enforce an `AuthPolicy` as we don't know whether credentials will be
|
||||
// added outside of uv.
|
||||
&& !*IS_DEPENDABOT);
|
||||
|
||||
let (mut retry_request, response) = if !must_authenticate {
|
||||
let retry_unauthenticated =
|
||||
!self.only_authenticated && !matches!(auth_policy, AuthPolicy::Always);
|
||||
let (mut retry_request, response) = if retry_unauthenticated {
|
||||
let url = tracing_url(&request, credentials.as_deref());
|
||||
if credentials.is_none() {
|
||||
trace!("Attempting unauthenticated request for {url}");
|
||||
|
|
@ -440,8 +280,8 @@ impl Middleware for AuthMiddleware {
|
|||
.as_ref()
|
||||
.map(|credentials| credentials.to_username())
|
||||
.unwrap_or(Username::none());
|
||||
let credentials = if let Some(index) = index {
|
||||
self.cache().get_url(&index.url, &username).or_else(|| {
|
||||
let credentials = if let Some(index_url) = maybe_index_url {
|
||||
self.cache().get_url(index_url, &username).or_else(|| {
|
||||
self.cache()
|
||||
.get_realm(Realm::from(&**retry_request_url), username)
|
||||
})
|
||||
|
|
@ -454,9 +294,9 @@ impl Middleware for AuthMiddleware {
|
|||
.or(credentials);
|
||||
|
||||
if let Some(credentials) = credentials.as_ref() {
|
||||
if credentials.is_authenticated() {
|
||||
if credentials.password().is_some() {
|
||||
trace!("Retrying request for {url} with credentials from cache {credentials:?}");
|
||||
retry_request = credentials.authenticate(retry_request).await;
|
||||
retry_request = credentials.authenticate(retry_request);
|
||||
return self
|
||||
.complete_request(None, retry_request, extensions, next, auth_policy)
|
||||
.await;
|
||||
|
|
@ -469,12 +309,12 @@ impl Middleware for AuthMiddleware {
|
|||
.fetch_credentials(
|
||||
credentials.as_deref(),
|
||||
retry_request_url,
|
||||
index,
|
||||
maybe_index_url,
|
||||
auth_policy,
|
||||
)
|
||||
.await
|
||||
{
|
||||
retry_request = credentials.authenticate(retry_request).await;
|
||||
retry_request = credentials.authenticate(retry_request);
|
||||
trace!("Retrying request for {url} with {credentials:?}");
|
||||
return self
|
||||
.complete_request(
|
||||
|
|
@ -490,7 +330,7 @@ impl Middleware for AuthMiddleware {
|
|||
if let Some(credentials) = credentials.as_ref() {
|
||||
if !attempt_has_username {
|
||||
trace!("Retrying request for {url} with username from cache {credentials:?}");
|
||||
retry_request = credentials.authenticate(retry_request).await;
|
||||
retry_request = credentials.authenticate(retry_request);
|
||||
return self
|
||||
.complete_request(None, retry_request, extensions, next, auth_policy)
|
||||
.await;
|
||||
|
|
@ -499,19 +339,6 @@ impl Middleware for AuthMiddleware {
|
|||
|
||||
if let Some(response) = response {
|
||||
Ok(response)
|
||||
} else if let Some(store) = is_known_url
|
||||
.then_some(self.pyx_token_store.as_ref())
|
||||
.flatten()
|
||||
{
|
||||
let domain = store
|
||||
.api()
|
||||
.domain()
|
||||
.unwrap_or("pyx.dev")
|
||||
.trim_start_matches("api.");
|
||||
Err(Error::Middleware(format_err!(
|
||||
"Run `{}` to authenticate uv with pyx",
|
||||
format!("uv auth login {domain}").green()
|
||||
)))
|
||||
} else {
|
||||
Err(Error::Middleware(format_err!(
|
||||
"Missing credentials for {url}"
|
||||
|
|
@ -526,7 +353,7 @@ impl AuthMiddleware {
|
|||
/// If credentials are present, insert them into the cache on success.
|
||||
async fn complete_request(
|
||||
&self,
|
||||
credentials: Option<Arc<Authentication>>,
|
||||
credentials: Option<Arc<Credentials>>,
|
||||
request: Request,
|
||||
extensions: &mut Extensions,
|
||||
next: Next<'_>,
|
||||
|
|
@ -536,7 +363,7 @@ impl AuthMiddleware {
|
|||
// Nothing to insert into the cache if we don't have credentials
|
||||
return next.run(request, extensions).await;
|
||||
};
|
||||
let url = DisplaySafeUrl::from_url(request.url().clone());
|
||||
let url = DisplaySafeUrl::from(request.url().clone());
|
||||
if matches!(auth_policy, AuthPolicy::Always) && credentials.password().is_none() {
|
||||
return Err(Error::Middleware(format_err!("Missing password for {url}")));
|
||||
}
|
||||
|
|
@ -547,7 +374,6 @@ impl AuthMiddleware {
|
|||
.as_ref()
|
||||
.is_ok_and(|response| response.error_for_status_ref().is_ok())
|
||||
{
|
||||
// TODO(zanieb): Consider also updating the system keyring after successful use
|
||||
trace!("Updating cached credentials for {url} to {credentials:?}");
|
||||
self.cache().insert(&url, credentials);
|
||||
}
|
||||
|
|
@ -558,18 +384,18 @@ impl AuthMiddleware {
|
|||
/// Use known request credentials to complete the request.
|
||||
async fn complete_request_with_request_credentials(
|
||||
&self,
|
||||
credentials: Authentication,
|
||||
credentials: Credentials,
|
||||
mut request: Request,
|
||||
extensions: &mut Extensions,
|
||||
next: Next<'_>,
|
||||
url: &DisplaySafeUrl,
|
||||
index: Option<&Index>,
|
||||
index_url: Option<&DisplaySafeUrl>,
|
||||
auth_policy: AuthPolicy,
|
||||
) -> reqwest_middleware::Result<Response> {
|
||||
let credentials = Arc::new(credentials);
|
||||
|
||||
// If there's a password, send the request and cache
|
||||
if credentials.is_authenticated() {
|
||||
if credentials.password().is_some() {
|
||||
trace!("Request for {url} already contains username and password");
|
||||
return self
|
||||
.complete_request(Some(credentials), request, extensions, next, auth_policy)
|
||||
|
|
@ -579,21 +405,17 @@ impl AuthMiddleware {
|
|||
trace!("Request for {url} is missing a password, looking for credentials");
|
||||
|
||||
// There's just a username, try to find a password.
|
||||
// If we have an index, check the cache for that URL. Otherwise,
|
||||
// If we have an index URL, check the cache for that URL. Otherwise,
|
||||
// check for the realm.
|
||||
let maybe_cached_credentials = if let Some(index) = index {
|
||||
let maybe_cached_credentials = if let Some(index_url) = index_url {
|
||||
self.cache()
|
||||
.get_url(&index.url, credentials.as_username().as_ref())
|
||||
.or_else(|| {
|
||||
self.cache()
|
||||
.get_url(&index.root_url, credentials.as_username().as_ref())
|
||||
})
|
||||
.get_url(index_url, credentials.as_username().as_ref())
|
||||
} else {
|
||||
self.cache()
|
||||
.get_realm(Realm::from(request.url()), credentials.to_username())
|
||||
};
|
||||
if let Some(credentials) = maybe_cached_credentials {
|
||||
request = credentials.authenticate(request).await;
|
||||
request = credentials.authenticate(request);
|
||||
// Do not insert already-cached credentials
|
||||
let credentials = None;
|
||||
return self
|
||||
|
|
@ -605,27 +427,27 @@ impl AuthMiddleware {
|
|||
.cache()
|
||||
.get_url(request.url(), credentials.as_username().as_ref())
|
||||
{
|
||||
request = credentials.authenticate(request).await;
|
||||
request = credentials.authenticate(request);
|
||||
// Do not insert already-cached credentials
|
||||
None
|
||||
} else if let Some(credentials) = self
|
||||
.fetch_credentials(
|
||||
Some(&credentials),
|
||||
DisplaySafeUrl::ref_cast(request.url()),
|
||||
index,
|
||||
index_url,
|
||||
auth_policy,
|
||||
)
|
||||
.await
|
||||
{
|
||||
request = credentials.authenticate(request).await;
|
||||
request = credentials.authenticate(request);
|
||||
Some(credentials)
|
||||
} else if index.is_some() {
|
||||
} else if index_url.is_some() {
|
||||
// If this is a known index, we fall back to checking for the realm.
|
||||
if let Some(credentials) = self
|
||||
.cache()
|
||||
.get_realm(Realm::from(request.url()), credentials.to_username())
|
||||
{
|
||||
request = credentials.authenticate(request).await;
|
||||
request = credentials.authenticate(request);
|
||||
Some(credentials)
|
||||
} else {
|
||||
Some(credentials)
|
||||
|
|
@ -635,8 +457,9 @@ impl AuthMiddleware {
|
|||
Some(credentials)
|
||||
};
|
||||
|
||||
self.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await
|
||||
return self
|
||||
.complete_request(credentials, request, extensions, next, auth_policy)
|
||||
.await;
|
||||
}
|
||||
|
||||
/// Fetch credentials for a URL.
|
||||
|
|
@ -644,19 +467,19 @@ impl AuthMiddleware {
|
|||
/// Supports netrc file and keyring lookups.
|
||||
async fn fetch_credentials(
|
||||
&self,
|
||||
credentials: Option<&Authentication>,
|
||||
credentials: Option<&Credentials>,
|
||||
url: &DisplaySafeUrl,
|
||||
index: Option<&Index>,
|
||||
maybe_index_url: Option<&DisplaySafeUrl>,
|
||||
auth_policy: AuthPolicy,
|
||||
) -> Option<Arc<Authentication>> {
|
||||
) -> Option<Arc<Credentials>> {
|
||||
let username = Username::from(
|
||||
credentials.map(|credentials| credentials.username().unwrap_or_default().to_string()),
|
||||
);
|
||||
|
||||
// Fetches can be expensive, so we will only run them _once_ per realm or index URL and username combination
|
||||
// All other requests for the same realm or index URL will wait until the first one completes
|
||||
let key = if let Some(index) = index {
|
||||
(FetchUrl::Index(index.url.clone()), username)
|
||||
let key = if let Some(index_url) = maybe_index_url {
|
||||
(FetchUrl::Index(index_url.clone()), username)
|
||||
} else {
|
||||
(FetchUrl::Realm(Realm::from(&**url)), username)
|
||||
};
|
||||
|
|
@ -680,78 +503,6 @@ impl AuthMiddleware {
|
|||
return credentials;
|
||||
}
|
||||
|
||||
// Support for known providers, like Hugging Face and S3.
|
||||
if let Some(credentials) = HuggingFaceProvider::credentials_for(url)
|
||||
.map(Authentication::from)
|
||||
.map(Arc::new)
|
||||
{
|
||||
debug!("Found Hugging Face credentials for {url}");
|
||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||
return Some(credentials);
|
||||
}
|
||||
|
||||
if S3EndpointProvider::is_s3_endpoint(url, self.preview) {
|
||||
let mut s3_state = self.s3_credential_state.lock().await;
|
||||
|
||||
// If the S3 credential state is uninitialized, initialize it.
|
||||
let credentials = match &*s3_state {
|
||||
S3CredentialState::Uninitialized => {
|
||||
trace!("Initializing S3 credentials for {url}");
|
||||
let signer = S3EndpointProvider::create_signer();
|
||||
let credentials = Arc::new(Authentication::from(signer));
|
||||
*s3_state = S3CredentialState::Initialized(Some(credentials.clone()));
|
||||
Some(credentials)
|
||||
}
|
||||
S3CredentialState::Initialized(credentials) => credentials.clone(),
|
||||
};
|
||||
|
||||
if let Some(credentials) = credentials {
|
||||
debug!("Found S3 credentials for {url}");
|
||||
self.cache().fetches.done(key, Some(credentials.clone()));
|
||||
return Some(credentials);
|
||||
}
|
||||
}
|
||||
|
||||
// If this is a known URL, authenticate it via the token store.
|
||||
if let Some(base_client) = self.base_client.as_ref() {
|
||||
if let Some(token_store) = self.pyx_token_store.as_ref() {
|
||||
if token_store.is_known_url(url) {
|
||||
let mut token_state = self.pyx_token_state.lock().await;
|
||||
|
||||
// If the token store is uninitialized, initialize it.
|
||||
let token = match *token_state {
|
||||
TokenState::Uninitialized => {
|
||||
trace!("Initializing token store for {url}");
|
||||
let generated = match token_store
|
||||
.access_token(base_client, DEFAULT_TOLERANCE_SECS)
|
||||
.await
|
||||
{
|
||||
Ok(Some(token)) => Some(token),
|
||||
Ok(None) => None,
|
||||
Err(err) => {
|
||||
warn!("Failed to generate access tokens: {err}");
|
||||
None
|
||||
}
|
||||
};
|
||||
*token_state = TokenState::Initialized(generated.clone());
|
||||
generated
|
||||
}
|
||||
TokenState::Initialized(ref tokens) => tokens.clone(),
|
||||
};
|
||||
|
||||
let credentials = token.map(|token| {
|
||||
trace!("Using credentials from token store for {url}");
|
||||
Arc::new(Authentication::from(Credentials::from(token)))
|
||||
});
|
||||
|
||||
// Register the fetch for this key
|
||||
self.cache().fetches.done(key.clone(), credentials.clone());
|
||||
|
||||
return credentials;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Netrc support based on: <https://github.com/gribouille/netrc>.
|
||||
let credentials = if let Some(credentials) = self.netrc.get().and_then(|netrc| {
|
||||
debug!("Checking netrc for credentials for {url}");
|
||||
|
|
@ -766,51 +517,6 @@ impl AuthMiddleware {
|
|||
debug!("Found credentials in netrc file for {url}");
|
||||
Some(credentials)
|
||||
|
||||
// Text credential store support.
|
||||
} else if let Some(credentials) = self.text_store.get().await.and_then(|text_store| {
|
||||
debug!("Checking text store for credentials for {url}");
|
||||
text_store
|
||||
.get_credentials(
|
||||
url,
|
||||
credentials
|
||||
.as_ref()
|
||||
.and_then(|credentials| credentials.username()),
|
||||
)
|
||||
.cloned()
|
||||
}) {
|
||||
debug!("Found credentials in plaintext store for {url}");
|
||||
Some(credentials)
|
||||
} else if let Some(credentials) = {
|
||||
if self.preview.is_enabled(PreviewFeatures::NATIVE_AUTH) {
|
||||
let native_store = KeyringProvider::native();
|
||||
let username = credentials.and_then(|credentials| credentials.username());
|
||||
let display_username = if let Some(username) = username {
|
||||
format!("{username}@")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
if let Some(index) = index {
|
||||
// N.B. The native store performs an exact look up right now, so we use the root
|
||||
// URL of the index instead of relying on prefix-matching.
|
||||
debug!(
|
||||
"Checking native store for credentials for index URL {}{}",
|
||||
display_username, index.root_url
|
||||
);
|
||||
native_store.fetch(&index.root_url, username).await
|
||||
} else {
|
||||
debug!(
|
||||
"Checking native store for credentials for URL {}{}",
|
||||
display_username, url
|
||||
);
|
||||
native_store.fetch(url, username).await
|
||||
}
|
||||
// TODO(zanieb): We should have a realm fallback here too
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} {
|
||||
debug!("Found credentials in native store for {url}");
|
||||
Some(credentials)
|
||||
// N.B. The keyring provider performs lookups for the exact URL then falls back to the host.
|
||||
// But, in the absence of an index URL, we cache the result per realm. So in that case,
|
||||
// if a keyring implementation returns different credentials for different URLs in the
|
||||
|
|
@ -821,37 +527,24 @@ impl AuthMiddleware {
|
|||
// URLs; instead, we fetch if there's a username or if the user has requested to
|
||||
// always authenticate.
|
||||
if let Some(username) = credentials.and_then(|credentials| credentials.username()) {
|
||||
if let Some(index) = index {
|
||||
debug!(
|
||||
"Checking keyring for credentials for index URL {}@{}",
|
||||
username, index.url
|
||||
);
|
||||
keyring
|
||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), Some(username))
|
||||
.await
|
||||
if let Some(index_url) = maybe_index_url {
|
||||
debug!("Checking keyring for credentials for index URL {}@{}", username, index_url);
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(index_url), Some(username)).await
|
||||
} else {
|
||||
debug!(
|
||||
"Checking keyring for credentials for full URL {}@{}",
|
||||
username, url
|
||||
);
|
||||
debug!("Checking keyring for credentials for full URL {}@{}", username, url);
|
||||
keyring.fetch(url, Some(username)).await
|
||||
}
|
||||
} else if matches!(auth_policy, AuthPolicy::Always) {
|
||||
if let Some(index) = index {
|
||||
if let Some(index_url) = maybe_index_url {
|
||||
debug!(
|
||||
"Checking keyring for credentials for index URL {} without username due to `authenticate = always`",
|
||||
index.url
|
||||
"Checking keyring for credentials for index URL {index_url} without username due to `authenticate = always`"
|
||||
);
|
||||
keyring
|
||||
.fetch(DisplaySafeUrl::ref_cast(&index.url), None)
|
||||
.await
|
||||
keyring.fetch(DisplaySafeUrl::ref_cast(index_url), None).await
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
debug!(
|
||||
"Skipping keyring fetch for {url} without username; use `authenticate = always` to force"
|
||||
);
|
||||
debug!("Skipping keyring fetch for {url} without username; use `authenticate = always` to force");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
|
@ -861,9 +554,8 @@ impl AuthMiddleware {
|
|||
Some(credentials)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let credentials = credentials.map(Authentication::from).map(Arc::new);
|
||||
}
|
||||
.map(Arc::new);
|
||||
|
||||
// Register the fetch for this key
|
||||
self.cache().fetches.done(key, credentials.clone());
|
||||
|
|
@ -872,9 +564,9 @@ impl AuthMiddleware {
|
|||
}
|
||||
}
|
||||
|
||||
fn tracing_url(request: &Request, credentials: Option<&Authentication>) -> DisplaySafeUrl {
|
||||
let mut url = DisplaySafeUrl::from_url(request.url().clone());
|
||||
if let Some(Authentication::Credentials(creds)) = credentials {
|
||||
fn tracing_url(request: &Request, credentials: Option<&Credentials>) -> DisplaySafeUrl {
|
||||
let mut url = DisplaySafeUrl::from(request.url().clone());
|
||||
if let Some(creds) = credentials {
|
||||
if let Some(username) = creds.username() {
|
||||
let _ = url.set_username(username);
|
||||
}
|
||||
|
|
@ -1015,10 +707,10 @@ mod tests {
|
|||
let cache = CredentialsCache::new();
|
||||
cache.insert(
|
||||
&base_url,
|
||||
Arc::new(Authentication::from(Credentials::basic(
|
||||
Arc::new(Credentials::basic(
|
||||
Some(username.to_string()),
|
||||
Some(password.to_string()),
|
||||
))),
|
||||
)),
|
||||
);
|
||||
|
||||
let client = test_client_builder()
|
||||
|
|
@ -1069,10 +761,7 @@ mod tests {
|
|||
let cache = CredentialsCache::new();
|
||||
cache.insert(
|
||||
&base_url,
|
||||
Arc::new(Authentication::from(Credentials::basic(
|
||||
Some(username.to_string()),
|
||||
None,
|
||||
))),
|
||||
Arc::new(Credentials::basic(Some(username.to_string()), None)),
|
||||
);
|
||||
|
||||
let client = test_client_builder()
|
||||
|
|
@ -1465,10 +1154,7 @@ mod tests {
|
|||
// URL.
|
||||
cache.insert(
|
||||
&base_url,
|
||||
Arc::new(Authentication::from(Credentials::basic(
|
||||
Some(username.to_string()),
|
||||
None,
|
||||
))),
|
||||
Arc::new(Credentials::basic(Some(username.to_string()), None)),
|
||||
);
|
||||
let client = test_client_builder()
|
||||
.with(AuthMiddleware::new().with_cache(cache).with_keyring(Some(
|
||||
|
|
@ -1517,17 +1203,17 @@ mod tests {
|
|||
// Seed the cache with our credentials
|
||||
cache.insert(
|
||||
&base_url_1,
|
||||
Arc::new(Authentication::from(Credentials::basic(
|
||||
Arc::new(Credentials::basic(
|
||||
Some(username_1.to_string()),
|
||||
Some(password_1.to_string()),
|
||||
))),
|
||||
)),
|
||||
);
|
||||
cache.insert(
|
||||
&base_url_2,
|
||||
Arc::new(Authentication::from(Credentials::basic(
|
||||
Arc::new(Credentials::basic(
|
||||
Some(username_2.to_string()),
|
||||
Some(password_2.to_string()),
|
||||
))),
|
||||
)),
|
||||
);
|
||||
|
||||
let client = test_client_builder()
|
||||
|
|
@ -1712,17 +1398,17 @@ mod tests {
|
|||
// Seed the cache with our credentials
|
||||
cache.insert(
|
||||
&base_url_1,
|
||||
Arc::new(Authentication::from(Credentials::basic(
|
||||
Arc::new(Credentials::basic(
|
||||
Some(username_1.to_string()),
|
||||
Some(password_1.to_string()),
|
||||
))),
|
||||
)),
|
||||
);
|
||||
cache.insert(
|
||||
&base_url_2,
|
||||
Arc::new(Authentication::from(Credentials::basic(
|
||||
Arc::new(Credentials::basic(
|
||||
Some(username_2.to_string()),
|
||||
Some(password_2.to_string()),
|
||||
))),
|
||||
)),
|
||||
);
|
||||
|
||||
let client = test_client_builder()
|
||||
|
|
@ -2062,13 +1748,13 @@ mod tests {
|
|||
let base_url_2 = base_url.join("prefix_2")?;
|
||||
let indexes = Indexes::from_indexes(vec![
|
||||
Index {
|
||||
url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
||||
root_url: DisplaySafeUrl::from_url(base_url_1.clone()),
|
||||
url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||
root_url: DisplaySafeUrl::from(base_url_1.clone()),
|
||||
auth_policy: AuthPolicy::Auto,
|
||||
},
|
||||
Index {
|
||||
url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
||||
root_url: DisplaySafeUrl::from_url(base_url_2.clone()),
|
||||
url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||
root_url: DisplaySafeUrl::from(base_url_2.clone()),
|
||||
auth_policy: AuthPolicy::Auto,
|
||||
},
|
||||
]);
|
||||
|
|
@ -2170,8 +1856,8 @@ mod tests {
|
|||
let base_url = Url::parse(&server.uri())?;
|
||||
let index_url = base_url.join("prefix_1")?;
|
||||
let indexes = Indexes::from_indexes(vec![Index {
|
||||
url: DisplaySafeUrl::from_url(index_url.clone()),
|
||||
root_url: DisplaySafeUrl::from_url(index_url.clone()),
|
||||
url: DisplaySafeUrl::from(index_url.clone()),
|
||||
root_url: DisplaySafeUrl::from(index_url.clone()),
|
||||
auth_policy: AuthPolicy::Auto,
|
||||
}]);
|
||||
|
||||
|
|
@ -2225,7 +1911,7 @@ mod tests {
|
|||
}
|
||||
|
||||
fn indexes_for(url: &Url, policy: AuthPolicy) -> Indexes {
|
||||
let mut url = DisplaySafeUrl::from_url(url.clone());
|
||||
let mut url = DisplaySafeUrl::from(url.clone());
|
||||
url.set_password(None).ok();
|
||||
url.set_username("").ok();
|
||||
Indexes::from_indexes(vec![Index {
|
||||
|
|
@ -2326,7 +2012,7 @@ mod tests {
|
|||
assert!(matches!(
|
||||
client.get(server.uri()).send().await,
|
||||
Err(reqwest_middleware::Error::Middleware(_))
|
||||
));
|
||||
),);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -2425,20 +2111,20 @@ mod tests {
|
|||
DisplaySafeUrl::parse("https://pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
||||
);
|
||||
|
||||
let creds = Authentication::from(Credentials::Basic {
|
||||
let creds = Credentials::Basic {
|
||||
username: Username::new(Some(String::from("user"))),
|
||||
password: None,
|
||||
});
|
||||
};
|
||||
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
||||
assert_eq!(
|
||||
tracing_url(&req, Some(&creds)),
|
||||
DisplaySafeUrl::parse("https://user@pypi-proxy.fly.dev/basic-auth/simple").unwrap()
|
||||
);
|
||||
|
||||
let creds = Authentication::from(Credentials::Basic {
|
||||
let creds = Credentials::Basic {
|
||||
username: Username::new(Some(String::from("user"))),
|
||||
password: Some(Password::new(String::from("password"))),
|
||||
});
|
||||
};
|
||||
let req = create_request("https://pypi-proxy.fly.dev/basic-auth/simple");
|
||||
assert_eq!(
|
||||
tracing_url(&req, Some(&creds)),
|
||||
|
|
@ -2447,132 +2133,6 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn test_text_store_basic_auth() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
|
||||
let server = start_test_server(username, password).await;
|
||||
let base_url = Url::parse(&server.uri())?;
|
||||
|
||||
// Create a text credential store with matching credentials
|
||||
let mut store = TextCredentialStore::default();
|
||||
let service = crate::Service::try_from(base_url.to_string()).unwrap();
|
||||
let credentials =
|
||||
Credentials::basic(Some(username.to_string()), Some(password.to_string()));
|
||||
store.insert(service.clone(), credentials);
|
||||
|
||||
let client = test_client_builder()
|
||||
.with(
|
||||
AuthMiddleware::new()
|
||||
.with_cache(CredentialsCache::new())
|
||||
.with_text_store(Some(store)),
|
||||
)
|
||||
.build();
|
||||
|
||||
assert_eq!(
|
||||
client.get(server.uri()).send().await?.status(),
|
||||
200,
|
||||
"Credentials should be pulled from the text store"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn test_text_store_disabled() -> Result<(), Error> {
|
||||
let username = "user";
|
||||
let password = "password";
|
||||
let server = start_test_server(username, password).await;
|
||||
|
||||
let client = test_client_builder()
|
||||
.with(
|
||||
AuthMiddleware::new()
|
||||
.with_cache(CredentialsCache::new())
|
||||
.with_text_store(None), // Explicitly disable text store
|
||||
)
|
||||
.build();
|
||||
|
||||
assert_eq!(
|
||||
client.get(server.uri()).send().await?.status(),
|
||||
401,
|
||||
"Credentials should not be found when text store is disabled"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test(tokio::test)]
|
||||
async fn test_text_store_by_username() -> Result<(), Error> {
|
||||
let username = "testuser";
|
||||
let password = "testpass";
|
||||
let wrong_username = "wronguser";
|
||||
|
||||
let server = start_test_server(username, password).await;
|
||||
let base_url = Url::parse(&server.uri())?;
|
||||
|
||||
let mut store = TextCredentialStore::default();
|
||||
let service = crate::Service::try_from(base_url.to_string()).unwrap();
|
||||
let credentials =
|
||||
crate::Credentials::basic(Some(username.to_string()), Some(password.to_string()));
|
||||
store.insert(service.clone(), credentials);
|
||||
|
||||
let client = test_client_builder()
|
||||
.with(
|
||||
AuthMiddleware::new()
|
||||
.with_cache(CredentialsCache::new())
|
||||
.with_text_store(Some(store)),
|
||||
)
|
||||
.build();
|
||||
|
||||
// Request with matching username should succeed
|
||||
let url_with_username = format!(
|
||||
"{}://{}@{}",
|
||||
base_url.scheme(),
|
||||
username,
|
||||
base_url.host_str().unwrap()
|
||||
);
|
||||
let url_with_port = if let Some(port) = base_url.port() {
|
||||
format!("{}:{}{}", url_with_username, port, base_url.path())
|
||||
} else {
|
||||
format!("{}{}", url_with_username, base_url.path())
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
client.get(&url_with_port).send().await?.status(),
|
||||
200,
|
||||
"Request with matching username should succeed"
|
||||
);
|
||||
|
||||
// Request with non-matching username should fail
|
||||
let url_with_wrong_username = format!(
|
||||
"{}://{}@{}",
|
||||
base_url.scheme(),
|
||||
wrong_username,
|
||||
base_url.host_str().unwrap()
|
||||
);
|
||||
let url_with_port = if let Some(port) = base_url.port() {
|
||||
format!("{}:{}{}", url_with_wrong_username, port, base_url.path())
|
||||
} else {
|
||||
format!("{}{}", url_with_wrong_username, base_url.path())
|
||||
};
|
||||
|
||||
assert_eq!(
|
||||
client.get(&url_with_port).send().await?.status(),
|
||||
401,
|
||||
"Request with non-matching username should fail"
|
||||
);
|
||||
|
||||
// Request without username should succeed
|
||||
assert_eq!(
|
||||
client.get(server.uri()).send().await?.status(),
|
||||
200,
|
||||
"Request with no username should succeed"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn create_request(url: &str) -> Request {
|
||||
Request::new(Method::GET, Url::parse(url).unwrap())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,104 +0,0 @@
|
|||
use std::borrow::Cow;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use reqsign::aws::DefaultSigner;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
|
||||
use uv_preview::{Preview, PreviewFeatures};
|
||||
use uv_static::EnvVars;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
use crate::Credentials;
|
||||
use crate::credentials::Token;
|
||||
use crate::realm::{Realm, RealmRef};
|
||||
|
||||
/// The [`Realm`] for the Hugging Face platform.
|
||||
static HUGGING_FACE_REALM: LazyLock<Realm> = LazyLock::new(|| {
|
||||
let url = Url::parse("https://huggingface.co").expect("Failed to parse Hugging Face URL");
|
||||
Realm::from(&url)
|
||||
});
|
||||
|
||||
/// The authentication token for the Hugging Face platform, if set.
|
||||
static HUGGING_FACE_TOKEN: LazyLock<Option<Vec<u8>>> = LazyLock::new(|| {
|
||||
// Extract the Hugging Face token from the environment variable, if it exists.
|
||||
let hf_token = std::env::var(EnvVars::HF_TOKEN)
|
||||
.ok()
|
||||
.map(String::into_bytes)
|
||||
.filter(|token| !token.is_empty())?;
|
||||
|
||||
if std::env::var_os(EnvVars::UV_NO_HF_TOKEN).is_some() {
|
||||
debug!("Ignoring Hugging Face token from environment due to `UV_NO_HF_TOKEN`");
|
||||
return None;
|
||||
}
|
||||
|
||||
debug!("Found Hugging Face token in environment");
|
||||
Some(hf_token)
|
||||
});
|
||||
|
||||
/// A provider for authentication credentials for the Hugging Face platform.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct HuggingFaceProvider;
|
||||
|
||||
impl HuggingFaceProvider {
|
||||
/// Returns the credentials for the Hugging Face platform, if available.
|
||||
pub(crate) fn credentials_for(url: &Url) -> Option<Credentials> {
|
||||
if RealmRef::from(url) == *HUGGING_FACE_REALM {
|
||||
if let Some(token) = HUGGING_FACE_TOKEN.as_ref() {
|
||||
return Some(Credentials::Bearer {
|
||||
token: Token::new(token.clone()),
|
||||
});
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// The [`Url`] for the S3 endpoint, if set.
|
||||
static S3_ENDPOINT_REALM: LazyLock<Option<Realm>> = LazyLock::new(|| {
|
||||
let s3_endpoint_url = std::env::var(EnvVars::UV_S3_ENDPOINT_URL).ok()?;
|
||||
let url = Url::parse(&s3_endpoint_url).expect("Failed to parse S3 endpoint URL");
|
||||
Some(Realm::from(&url))
|
||||
});
|
||||
|
||||
/// A provider for authentication credentials for S3 endpoints.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct S3EndpointProvider;
|
||||
|
||||
impl S3EndpointProvider {
|
||||
/// Returns `true` if the URL matches the configured S3 endpoint.
|
||||
pub(crate) fn is_s3_endpoint(url: &Url, preview: Preview) -> bool {
|
||||
if let Some(s3_endpoint_realm) = S3_ENDPOINT_REALM.as_ref().map(RealmRef::from) {
|
||||
if !preview.is_enabled(PreviewFeatures::S3_ENDPOINT) {
|
||||
warn_user_once!(
|
||||
"The `s3-endpoint` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
|
||||
PreviewFeatures::S3_ENDPOINT
|
||||
);
|
||||
}
|
||||
|
||||
// Treat any URL on the same domain or subdomain as available for S3 signing.
|
||||
let realm = RealmRef::from(url);
|
||||
if realm == s3_endpoint_realm || realm.is_subdomain_of(s3_endpoint_realm) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
/// Creates a new S3 signer with the configured region.
|
||||
///
|
||||
/// This is potentially expensive as it may invoke credential helpers, so the result
|
||||
/// should be cached.
|
||||
pub(crate) fn create_signer() -> DefaultSigner {
|
||||
// TODO(charlie): Can `reqsign` infer the region for us? Profiles, for example,
|
||||
// often have a region set already.
|
||||
let region = std::env::var(EnvVars::AWS_REGION)
|
||||
.map(Cow::Owned)
|
||||
.unwrap_or_else(|_| {
|
||||
std::env::var(EnvVars::AWS_DEFAULT_REGION)
|
||||
.map(Cow::Owned)
|
||||
.unwrap_or_else(|_| Cow::Borrowed("us-east-1"))
|
||||
});
|
||||
reqsign::aws::default_signer("s3", ®ion)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,737 +0,0 @@
|
|||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::Duration;
|
||||
|
||||
use base64::Engine;
|
||||
use base64::prelude::BASE64_URL_SAFE_NO_PAD;
|
||||
use etcetera::BaseStrategy;
|
||||
use reqwest_middleware::ClientWithMiddleware;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
|
||||
use uv_cache_key::CanonicalUrl;
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
use uv_small_str::SmallString;
|
||||
use uv_state::{StateBucket, StateStore};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::credentials::Token;
|
||||
use crate::{AccessToken, Credentials, Realm};
|
||||
|
||||
/// Retrieve the pyx API key from the environment variable, or return `None`.
|
||||
fn read_pyx_api_key() -> Option<String> {
|
||||
std::env::var(EnvVars::PYX_API_KEY)
|
||||
.ok()
|
||||
.or_else(|| std::env::var(EnvVars::UV_API_KEY).ok())
|
||||
}
|
||||
|
||||
/// Retrieve the pyx authentication token (JWT) from the environment variable, or return `None`.
|
||||
fn read_pyx_auth_token() -> Option<AccessToken> {
|
||||
std::env::var(EnvVars::PYX_AUTH_TOKEN)
|
||||
.ok()
|
||||
.or_else(|| std::env::var(EnvVars::UV_AUTH_TOKEN).ok())
|
||||
.map(AccessToken::from)
|
||||
}
|
||||
|
||||
/// An access token with an accompanying refresh token.
|
||||
///
|
||||
/// Refresh tokens are single-use tokens that can be exchanged for a renewed access token
|
||||
/// and a new refresh token.
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
pub struct PyxOAuthTokens {
|
||||
pub access_token: AccessToken,
|
||||
pub refresh_token: String,
|
||||
}
|
||||
|
||||
/// An access token with an accompanying API key.
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
pub struct PyxApiKeyTokens {
|
||||
pub access_token: AccessToken,
|
||||
pub api_key: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
pub enum PyxTokens {
|
||||
/// An access token with an accompanying refresh token.
|
||||
///
|
||||
/// Refresh tokens are single-use tokens that can be exchanged for a renewed access token
|
||||
/// and a new refresh token.
|
||||
OAuth(PyxOAuthTokens),
|
||||
/// An access token with an accompanying API key.
|
||||
///
|
||||
/// API keys are long-lived tokens that can be exchanged for an access token.
|
||||
ApiKey(PyxApiKeyTokens),
|
||||
}
|
||||
|
||||
impl From<PyxTokens> for AccessToken {
|
||||
fn from(tokens: PyxTokens) -> Self {
|
||||
match tokens {
|
||||
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token,
|
||||
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PyxTokens> for Credentials {
|
||||
fn from(tokens: PyxTokens) -> Self {
|
||||
let access_token = match tokens {
|
||||
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token,
|
||||
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token,
|
||||
};
|
||||
Self::from(access_token)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AccessToken> for Credentials {
|
||||
fn from(access_token: AccessToken) -> Self {
|
||||
Self::Bearer {
|
||||
token: Token::new(access_token.into_bytes()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The default tolerance for the access token expiration.
|
||||
pub const DEFAULT_TOLERANCE_SECS: u64 = 60 * 5;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct PyxDirectories {
|
||||
/// The root directory for the token store (e.g., `/Users/ferris/.local/share/pyx/credentials`).
|
||||
root: PathBuf,
|
||||
/// The subdirectory for the token store (e.g., `/Users/ferris/.local/share/uv/credentials/3859a629b26fda96`).
|
||||
subdirectory: PathBuf,
|
||||
}
|
||||
|
||||
impl PyxDirectories {
|
||||
/// Detect the [`PyxDirectories`] for a given API URL.
|
||||
fn from_api(api: &DisplaySafeUrl) -> Result<Self, io::Error> {
|
||||
// Store credentials in a subdirectory based on the API URL.
|
||||
let digest = uv_cache_key::cache_digest(&CanonicalUrl::new(api));
|
||||
|
||||
// If the user explicitly set `PYX_CREDENTIALS_DIR`, use that.
|
||||
if let Some(root) = std::env::var_os(EnvVars::PYX_CREDENTIALS_DIR) {
|
||||
let root = std::path::absolute(root)?;
|
||||
let subdirectory = root.join(&digest);
|
||||
return Ok(Self { root, subdirectory });
|
||||
}
|
||||
|
||||
// If the user has pyx credentials in their uv credentials directory, read them for
|
||||
// backwards compatibility.
|
||||
let root = if let Some(tool_dir) = std::env::var_os(EnvVars::UV_CREDENTIALS_DIR) {
|
||||
std::path::absolute(tool_dir)?
|
||||
} else {
|
||||
StateStore::from_settings(None)?.bucket(StateBucket::Credentials)
|
||||
};
|
||||
let subdirectory = root.join(&digest);
|
||||
if subdirectory.exists() {
|
||||
return Ok(Self { root, subdirectory });
|
||||
}
|
||||
|
||||
// Otherwise, use (e.g.) `~/.local/share/pyx`.
|
||||
let Ok(xdg) = etcetera::base_strategy::choose_base_strategy() else {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::NotFound,
|
||||
"Could not determine user data directory",
|
||||
));
|
||||
};
|
||||
|
||||
let root = xdg.data_dir().join("pyx").join("credentials");
|
||||
let subdirectory = root.join(&digest);
|
||||
Ok(Self { root, subdirectory })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PyxTokenStore {
|
||||
/// The root directory for the token store (e.g., `/Users/ferris/.local/share/pyx/credentials`).
|
||||
root: PathBuf,
|
||||
/// The subdirectory for the token store (e.g., `/Users/ferris/.local/share/uv/credentials/3859a629b26fda96`).
|
||||
subdirectory: PathBuf,
|
||||
/// The API URL for the token store (e.g., `https://api.pyx.dev`).
|
||||
api: DisplaySafeUrl,
|
||||
/// The CDN domain for the token store (e.g., `astralhosted.com`).
|
||||
cdn: SmallString,
|
||||
}
|
||||
|
||||
impl PyxTokenStore {
|
||||
/// Create a new [`PyxTokenStore`] from settings.
|
||||
pub fn from_settings() -> Result<Self, TokenStoreError> {
|
||||
// Read the API URL and CDN domain from the environment variables, or fallback to the
|
||||
// defaults.
|
||||
let api = if let Ok(api_url) = std::env::var(EnvVars::PYX_API_URL) {
|
||||
DisplaySafeUrl::parse(&api_url)
|
||||
} else {
|
||||
DisplaySafeUrl::parse("https://api.pyx.dev")
|
||||
}?;
|
||||
let cdn = std::env::var(EnvVars::PYX_CDN_DOMAIN)
|
||||
.ok()
|
||||
.map(SmallString::from)
|
||||
.unwrap_or_else(|| SmallString::from(arcstr::literal!("astralhosted.com")));
|
||||
|
||||
// Determine the root directory for the token store.
|
||||
let PyxDirectories { root, subdirectory } = PyxDirectories::from_api(&api)?;
|
||||
|
||||
Ok(Self {
|
||||
root,
|
||||
subdirectory,
|
||||
api,
|
||||
cdn,
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the root directory for the token store.
|
||||
pub fn root(&self) -> &Path {
|
||||
&self.root
|
||||
}
|
||||
|
||||
/// Return the API URL for the token store.
|
||||
pub fn api(&self) -> &DisplaySafeUrl {
|
||||
&self.api
|
||||
}
|
||||
|
||||
/// Get or initialize an [`AccessToken`] from the store.
|
||||
///
|
||||
/// If an access token is set in the environment, it will be returned as-is.
|
||||
///
|
||||
/// If an access token is present on-disk, it will be returned (and refreshed, if necessary).
|
||||
///
|
||||
/// If no access token is found, but an API key is present, the API key will be used to
|
||||
/// bootstrap an access token.
|
||||
pub async fn access_token(
|
||||
&self,
|
||||
client: &ClientWithMiddleware,
|
||||
tolerance_secs: u64,
|
||||
) -> Result<Option<AccessToken>, TokenStoreError> {
|
||||
// If the access token is already set in the environment, return it.
|
||||
if let Some(access_token) = read_pyx_auth_token() {
|
||||
return Ok(Some(access_token));
|
||||
}
|
||||
|
||||
// Initialize the tokens from the store.
|
||||
let tokens = self.init(client, tolerance_secs).await?;
|
||||
|
||||
// Extract the access token from the OAuth tokens or API key.
|
||||
Ok(tokens.map(AccessToken::from))
|
||||
}
|
||||
|
||||
/// Initialize the [`PyxTokens`] from the store.
|
||||
///
|
||||
/// If an access token is already present, it will be returned (and refreshed, if necessary).
|
||||
///
|
||||
/// If no access token is found, but an API key is present, the API key will be used to
|
||||
/// bootstrap an access token.
|
||||
pub async fn init(
|
||||
&self,
|
||||
client: &ClientWithMiddleware,
|
||||
tolerance_secs: u64,
|
||||
) -> Result<Option<PyxTokens>, TokenStoreError> {
|
||||
match self.read().await? {
|
||||
Some(tokens) => {
|
||||
// Refresh the tokens if they are expired.
|
||||
let tokens = self.refresh(tokens, client, tolerance_secs).await?;
|
||||
Ok(Some(tokens))
|
||||
}
|
||||
None => {
|
||||
// If no tokens are present, bootstrap them from an API key.
|
||||
self.bootstrap(client).await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the tokens to the store.
|
||||
pub async fn write(&self, tokens: &PyxTokens) -> Result<(), TokenStoreError> {
|
||||
fs_err::tokio::create_dir_all(&self.subdirectory).await?;
|
||||
match tokens {
|
||||
PyxTokens::OAuth(tokens) => {
|
||||
// Write OAuth tokens to a generic `tokens.json` file.
|
||||
fs_err::tokio::write(
|
||||
self.subdirectory.join("tokens.json"),
|
||||
serde_json::to_vec(tokens)?,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
PyxTokens::ApiKey(tokens) => {
|
||||
// Write API key tokens to a file based on the API key.
|
||||
let digest = uv_cache_key::cache_digest(&tokens.api_key);
|
||||
fs_err::tokio::write(
|
||||
self.subdirectory.join(format!("{digest}.json")),
|
||||
&tokens.access_token,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns `true` if the user appears to have an authentication token set.
|
||||
pub fn has_auth_token(&self) -> bool {
|
||||
read_pyx_auth_token().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if the user appears to have an API key set.
|
||||
pub fn has_api_key(&self) -> bool {
|
||||
read_pyx_api_key().is_some()
|
||||
}
|
||||
|
||||
/// Returns `true` if the user appears to have OAuth tokens stored on disk.
|
||||
pub fn has_oauth_tokens(&self) -> bool {
|
||||
self.subdirectory.join("tokens.json").is_file()
|
||||
}
|
||||
|
||||
/// Returns `true` if the user appears to have credentials (which may be invalid).
|
||||
pub fn has_credentials(&self) -> bool {
|
||||
self.has_auth_token() || self.has_api_key() || self.has_oauth_tokens()
|
||||
}
|
||||
|
||||
/// Read the tokens from the store.
|
||||
pub async fn read(&self) -> Result<Option<PyxTokens>, TokenStoreError> {
|
||||
if let Some(api_key) = read_pyx_api_key() {
|
||||
// Read the API key tokens from a file based on the API key.
|
||||
let digest = uv_cache_key::cache_digest(&api_key);
|
||||
match fs_err::tokio::read(self.subdirectory.join(format!("{digest}.json"))).await {
|
||||
Ok(data) => {
|
||||
let access_token =
|
||||
AccessToken::from(String::from_utf8(data).expect("Invalid UTF-8"));
|
||||
Ok(Some(PyxTokens::ApiKey(PyxApiKeyTokens {
|
||||
access_token,
|
||||
api_key,
|
||||
})))
|
||||
}
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
} else {
|
||||
match fs_err::tokio::read(self.subdirectory.join("tokens.json")).await {
|
||||
Ok(data) => {
|
||||
let tokens: PyxOAuthTokens = serde_json::from_slice(&data)?;
|
||||
Ok(Some(PyxTokens::OAuth(tokens)))
|
||||
}
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(None),
|
||||
Err(err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove the tokens from the store.
|
||||
pub async fn delete(&self) -> Result<(), io::Error> {
|
||||
fs_err::tokio::remove_dir_all(&self.subdirectory).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Bootstrap the tokens from the store.
|
||||
async fn bootstrap(
|
||||
&self,
|
||||
client: &ClientWithMiddleware,
|
||||
) -> Result<Option<PyxTokens>, TokenStoreError> {
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
struct Payload {
|
||||
access_token: AccessToken,
|
||||
}
|
||||
|
||||
// Retrieve the API key from the environment variable, if set.
|
||||
let Some(api_key) = read_pyx_api_key() else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
debug!("Bootstrapping access token from an API key");
|
||||
|
||||
// Parse the API URL.
|
||||
let mut url = self.api.clone();
|
||||
url.set_path("auth/cli/access-token");
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::POST, Url::from(url));
|
||||
request.headers_mut().insert(
|
||||
"Authorization",
|
||||
reqwest::header::HeaderValue::from_str(&format!("Bearer {api_key}"))?,
|
||||
);
|
||||
|
||||
let response = client.execute(request).await?;
|
||||
let Payload { access_token } = response.error_for_status()?.json::<Payload>().await?;
|
||||
let tokens = PyxTokens::ApiKey(PyxApiKeyTokens {
|
||||
access_token,
|
||||
api_key,
|
||||
});
|
||||
|
||||
// Write the tokens to disk.
|
||||
self.write(&tokens).await?;
|
||||
|
||||
Ok(Some(tokens))
|
||||
}
|
||||
|
||||
/// Refresh the tokens in the store, if they are expired.
|
||||
///
|
||||
/// In theory, we should _also_ refresh if we hit a 401; but for now, we only refresh ahead of
|
||||
/// time.
|
||||
async fn refresh(
|
||||
&self,
|
||||
tokens: PyxTokens,
|
||||
client: &ClientWithMiddleware,
|
||||
tolerance_secs: u64,
|
||||
) -> Result<PyxTokens, TokenStoreError> {
|
||||
// Decode the access token.
|
||||
let jwt = PyxJwt::decode(match &tokens {
|
||||
PyxTokens::OAuth(PyxOAuthTokens { access_token, .. }) => access_token,
|
||||
PyxTokens::ApiKey(PyxApiKeyTokens { access_token, .. }) => access_token,
|
||||
})?;
|
||||
|
||||
// If the access token is expired, refresh it.
|
||||
let is_up_to_date = match jwt.exp {
|
||||
None => {
|
||||
debug!("Access token has no expiration; refreshing...");
|
||||
false
|
||||
}
|
||||
Some(..) if tolerance_secs == 0 => {
|
||||
debug!("Refreshing access token due to zero tolerance...");
|
||||
false
|
||||
}
|
||||
Some(jwt) => {
|
||||
let exp = jiff::Timestamp::from_second(jwt)?;
|
||||
let now = jiff::Timestamp::now();
|
||||
if exp < now {
|
||||
debug!("Access token is expired (`{exp}`); refreshing...");
|
||||
false
|
||||
} else if exp < now + Duration::from_secs(tolerance_secs) {
|
||||
debug!(
|
||||
"Access token will expire within the tolerance (`{exp}`); refreshing..."
|
||||
);
|
||||
false
|
||||
} else {
|
||||
debug!("Access token is up-to-date (`{exp}`)");
|
||||
true
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if is_up_to_date {
|
||||
return Ok(tokens);
|
||||
}
|
||||
|
||||
let tokens = match tokens {
|
||||
PyxTokens::OAuth(PyxOAuthTokens { refresh_token, .. }) => {
|
||||
// Parse the API URL.
|
||||
let mut url = self.api.clone();
|
||||
url.set_path("auth/cli/refresh");
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::POST, Url::from(url));
|
||||
let body = serde_json::json!({
|
||||
"refresh_token": refresh_token
|
||||
});
|
||||
*request.body_mut() = Some(body.to_string().into());
|
||||
|
||||
let response = client.execute(request).await?;
|
||||
let tokens = response
|
||||
.error_for_status()?
|
||||
.json::<PyxOAuthTokens>()
|
||||
.await?;
|
||||
PyxTokens::OAuth(tokens)
|
||||
}
|
||||
PyxTokens::ApiKey(PyxApiKeyTokens { api_key, .. }) => {
|
||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||
struct Payload {
|
||||
access_token: AccessToken,
|
||||
}
|
||||
|
||||
// Parse the API URL.
|
||||
let mut url = self.api.clone();
|
||||
url.set_path("auth/cli/access-token");
|
||||
|
||||
let mut request = reqwest::Request::new(reqwest::Method::POST, Url::from(url));
|
||||
request.headers_mut().insert(
|
||||
"Authorization",
|
||||
reqwest::header::HeaderValue::from_str(&format!("Bearer {api_key}"))?,
|
||||
);
|
||||
|
||||
let response = client.execute(request).await?;
|
||||
let Payload { access_token } =
|
||||
response.error_for_status()?.json::<Payload>().await?;
|
||||
PyxTokens::ApiKey(PyxApiKeyTokens {
|
||||
access_token,
|
||||
api_key,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
// Write the new tokens to disk.
|
||||
self.write(&tokens).await?;
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
/// Returns `true` if the given URL is "known" to this token store (i.e., should be
|
||||
/// authenticated using the store's tokens).
|
||||
pub fn is_known_url(&self, url: &Url) -> bool {
|
||||
is_known_url(url, &self.api, &self.cdn)
|
||||
}
|
||||
|
||||
/// Returns `true` if the URL is on a "known" domain (i.e., the same domain as the API or CDN).
|
||||
///
|
||||
/// Like [`is_known_url`](Self::is_known_url), but also returns `true` if the API is on the
|
||||
/// subdomain of the URL (e.g., if the API is `api.pyx.dev` and the URL is `pyx.dev`).
|
||||
pub fn is_known_domain(&self, url: &Url) -> bool {
|
||||
is_known_domain(url, &self.api, &self.cdn)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum TokenStoreError {
|
||||
#[error(transparent)]
|
||||
Url(#[from] DisplaySafeUrlError),
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error(transparent)]
|
||||
Serialization(#[from] serde_json::Error),
|
||||
#[error(transparent)]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
#[error(transparent)]
|
||||
ReqwestMiddleware(#[from] reqwest_middleware::Error),
|
||||
#[error(transparent)]
|
||||
InvalidHeaderValue(#[from] reqwest::header::InvalidHeaderValue),
|
||||
#[error(transparent)]
|
||||
Jiff(#[from] jiff::Error),
|
||||
#[error(transparent)]
|
||||
Jwt(#[from] JwtError),
|
||||
}
|
||||
|
||||
impl TokenStoreError {
|
||||
/// Returns `true` if the error is a 401 (Unauthorized) error.
|
||||
pub fn is_unauthorized(&self) -> bool {
|
||||
match self {
|
||||
Self::Reqwest(err) => err.status() == Some(reqwest::StatusCode::UNAUTHORIZED),
|
||||
Self::ReqwestMiddleware(err) => err.status() == Some(reqwest::StatusCode::UNAUTHORIZED),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The payload of the JWT.
|
||||
#[derive(Debug, serde::Deserialize)]
|
||||
pub struct PyxJwt {
|
||||
/// The expiration time of the JWT, as a Unix timestamp.
|
||||
pub exp: Option<i64>,
|
||||
/// The issuer of the JWT.
|
||||
pub iss: Option<String>,
|
||||
/// The name of the organization, if any.
|
||||
#[serde(rename = "urn:pyx:org_name")]
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
impl PyxJwt {
|
||||
/// Decode the JWT from the access token.
|
||||
pub fn decode(access_token: &AccessToken) -> Result<Self, JwtError> {
|
||||
let mut token_segments = access_token.as_str().splitn(3, '.');
|
||||
|
||||
let _header = token_segments.next().ok_or(JwtError::MissingHeader)?;
|
||||
let payload = token_segments.next().ok_or(JwtError::MissingPayload)?;
|
||||
let _signature = token_segments.next().ok_or(JwtError::MissingSignature)?;
|
||||
if token_segments.next().is_some() {
|
||||
return Err(JwtError::TooManySegments);
|
||||
}
|
||||
|
||||
let decoded = BASE64_URL_SAFE_NO_PAD.decode(payload)?;
|
||||
|
||||
let jwt = serde_json::from_slice::<Self>(&decoded)?;
|
||||
Ok(jwt)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum JwtError {
|
||||
#[error("JWT is missing a header")]
|
||||
MissingHeader,
|
||||
#[error("JWT is missing a payload")]
|
||||
MissingPayload,
|
||||
#[error("JWT is missing a signature")]
|
||||
MissingSignature,
|
||||
#[error("JWT has too many segments")]
|
||||
TooManySegments,
|
||||
#[error(transparent)]
|
||||
Base64(#[from] base64::DecodeError),
|
||||
#[error(transparent)]
|
||||
Serde(#[from] serde_json::Error),
|
||||
}
|
||||
|
||||
fn is_known_url(url: &Url, api: &DisplaySafeUrl, cdn: &str) -> bool {
|
||||
// Determine whether the URL matches the API realm.
|
||||
if Realm::from(url) == Realm::from(&**api) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Determine whether the URL matches the CDN domain (or a subdomain of it).
|
||||
//
|
||||
// For example, if URL is on `files.astralhosted.com` and the CDN domain is
|
||||
// `astralhosted.com`, consider it known.
|
||||
if matches!(url.scheme(), "https") && matches_domain(url, cdn) {
|
||||
return true;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn is_known_domain(url: &Url, api: &DisplaySafeUrl, cdn: &str) -> bool {
|
||||
// Determine whether the URL matches the API domain.
|
||||
if let Some(domain) = url.domain() {
|
||||
if matches_domain(api, domain) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
is_known_url(url, api, cdn)
|
||||
}
|
||||
|
||||
/// Returns `true` if the target URL is on the given domain.
|
||||
fn matches_domain(url: &Url, domain: &str) -> bool {
|
||||
url.domain().is_some_and(|subdomain| {
|
||||
subdomain == domain
|
||||
|| subdomain
|
||||
.strip_suffix(domain)
|
||||
.is_some_and(|prefix| prefix.ends_with('.'))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_is_known_url() {
|
||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
||||
let cdn_domain = "astralhosted.com";
|
||||
|
||||
// Same realm as API.
|
||||
assert!(is_known_url(
|
||||
&Url::parse("https://api.pyx.dev/simple/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// Different path on same API domain
|
||||
assert!(is_known_url(
|
||||
&Url::parse("https://api.pyx.dev/v1/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// CDN domain.
|
||||
assert!(is_known_url(
|
||||
&Url::parse("https://astralhosted.com/packages/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// CDN subdomain.
|
||||
assert!(is_known_url(
|
||||
&Url::parse("https://files.astralhosted.com/packages/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// CDN on HTTP.
|
||||
assert!(!is_known_url(
|
||||
&Url::parse("http://astralhosted.com/packages/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// Unknown domain.
|
||||
assert!(!is_known_url(
|
||||
&Url::parse("https://pypi.org/simple/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// Similar but not matching domain.
|
||||
assert!(!is_known_url(
|
||||
&Url::parse("https://badastralhosted.com/packages/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_known_domain() {
|
||||
let api_url = DisplaySafeUrl::parse("https://api.pyx.dev").unwrap();
|
||||
let cdn_domain = "astralhosted.com";
|
||||
|
||||
// Same realm as API.
|
||||
assert!(is_known_domain(
|
||||
&Url::parse("https://api.pyx.dev/simple/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// API super-domain.
|
||||
assert!(is_known_domain(
|
||||
&Url::parse("https://pyx.dev").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// API subdomain.
|
||||
assert!(!is_known_domain(
|
||||
&Url::parse("https://foo.api.pyx.dev").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// Different subdomain.
|
||||
assert!(!is_known_domain(
|
||||
&Url::parse("https://beta.pyx.dev/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// CDN domain.
|
||||
assert!(is_known_domain(
|
||||
&Url::parse("https://astralhosted.com/packages/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// CDN subdomain.
|
||||
assert!(is_known_domain(
|
||||
&Url::parse("https://files.astralhosted.com/packages/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// Unknown domain.
|
||||
assert!(!is_known_domain(
|
||||
&Url::parse("https://pypi.org/simple/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
|
||||
// Different TLD.
|
||||
assert!(!is_known_domain(
|
||||
&Url::parse("https://pyx.com/").unwrap(),
|
||||
&api_url,
|
||||
cdn_domain
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_matches_domain() {
|
||||
assert!(matches_domain(
|
||||
&Url::parse("https://example.com").unwrap(),
|
||||
"example.com"
|
||||
));
|
||||
assert!(matches_domain(
|
||||
&Url::parse("https://foo.example.com").unwrap(),
|
||||
"example.com"
|
||||
));
|
||||
assert!(matches_domain(
|
||||
&Url::parse("https://bar.foo.example.com").unwrap(),
|
||||
"example.com"
|
||||
));
|
||||
|
||||
assert!(!matches_domain(
|
||||
&Url::parse("https://example.com").unwrap(),
|
||||
"other.com"
|
||||
));
|
||||
assert!(!matches_domain(
|
||||
&Url::parse("https://example.org").unwrap(),
|
||||
"example.com"
|
||||
));
|
||||
assert!(!matches_domain(
|
||||
&Url::parse("https://badexample.com").unwrap(),
|
||||
"example.com"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,7 +1,6 @@
|
|||
use std::hash::{Hash, Hasher};
|
||||
use std::{fmt::Display, fmt::Formatter};
|
||||
|
||||
use url::Url;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_small_str::SmallString;
|
||||
|
||||
/// Used to determine if authentication information should be retained on a new URL.
|
||||
|
|
@ -23,19 +22,13 @@ use uv_small_str::SmallString;
|
|||
// The port is only allowed to differ if it matches the "default port" for the scheme.
|
||||
// However, `url` (and therefore `reqwest`) sets the `port` to `None` if it matches the default port
|
||||
// so we do not need any special handling here.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Realm {
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct Realm {
|
||||
scheme: SmallString,
|
||||
host: Option<SmallString>,
|
||||
port: Option<u16>,
|
||||
}
|
||||
|
||||
impl From<&DisplaySafeUrl> for Realm {
|
||||
fn from(url: &DisplaySafeUrl) -> Self {
|
||||
Self::from(&**url)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Url> for Realm {
|
||||
fn from(url: &Url) -> Self {
|
||||
Self {
|
||||
|
|
@ -66,91 +59,6 @@ impl Display for Realm {
|
|||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Realm {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
RealmRef::from(self) == RealmRef::from(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Realm {}
|
||||
|
||||
impl Hash for Realm {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
RealmRef::from(self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to a [`Realm`] that can be used for zero-allocation comparisons.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct RealmRef<'a> {
|
||||
scheme: &'a str,
|
||||
host: Option<&'a str>,
|
||||
port: Option<u16>,
|
||||
}
|
||||
|
||||
impl RealmRef<'_> {
|
||||
/// Returns true if this realm is a subdomain of the other realm.
|
||||
pub(crate) fn is_subdomain_of(&self, other: Self) -> bool {
|
||||
other.scheme == self.scheme
|
||||
&& other.port == self.port
|
||||
&& other.host.is_some_and(|other_host| {
|
||||
self.host.is_some_and(|self_host| {
|
||||
self_host
|
||||
.strip_suffix(other_host)
|
||||
.is_some_and(|prefix| prefix.ends_with('.'))
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Url> for RealmRef<'a> {
|
||||
fn from(url: &'a Url) -> Self {
|
||||
Self {
|
||||
scheme: url.scheme(),
|
||||
host: url.host_str(),
|
||||
port: url.port(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for RealmRef<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.scheme == other.scheme && self.host == other.host && self.port == other.port
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for RealmRef<'_> {}
|
||||
|
||||
impl Hash for RealmRef<'_> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.scheme.hash(state);
|
||||
self.host.hash(state);
|
||||
self.port.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<RealmRef<'a>> for Realm {
|
||||
fn eq(&self, rhs: &RealmRef<'a>) -> bool {
|
||||
RealmRef::from(self) == *rhs
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Realm> for RealmRef<'_> {
|
||||
fn eq(&self, rhs: &Realm) -> bool {
|
||||
*self == RealmRef::from(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Realm> for RealmRef<'a> {
|
||||
fn from(realm: &'a Realm) -> Self {
|
||||
Self {
|
||||
scheme: &realm.scheme,
|
||||
host: realm.host.as_deref(),
|
||||
port: realm.port,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::{ParseError, Url};
|
||||
|
|
@ -237,87 +145,4 @@ mod tests {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_subdomain_of() -> Result<(), ParseError> {
|
||||
use crate::realm::RealmRef;
|
||||
|
||||
// Subdomain relationship: sub.example.com is a subdomain of example.com
|
||||
let subdomain_url = Url::parse("https://sub.example.com")?;
|
||||
let domain_url = Url::parse("https://example.com")?;
|
||||
let subdomain = RealmRef::from(&subdomain_url);
|
||||
let domain = RealmRef::from(&domain_url);
|
||||
assert!(subdomain.is_subdomain_of(domain));
|
||||
|
||||
// Deeper subdomain: foo.bar.example.com is a subdomain of example.com
|
||||
let deep_subdomain_url = Url::parse("https://foo.bar.example.com")?;
|
||||
let deep_subdomain = RealmRef::from(&deep_subdomain_url);
|
||||
assert!(deep_subdomain.is_subdomain_of(domain));
|
||||
|
||||
// Deeper subdomain: foo.bar.example.com is also a subdomain of bar.example.com
|
||||
let parent_subdomain_url = Url::parse("https://bar.example.com")?;
|
||||
let parent_subdomain = RealmRef::from(&parent_subdomain_url);
|
||||
assert!(deep_subdomain.is_subdomain_of(parent_subdomain));
|
||||
|
||||
// Not a subdomain: example.com is not a subdomain of sub.example.com
|
||||
assert!(!domain.is_subdomain_of(subdomain));
|
||||
|
||||
// Same domain is not a subdomain of itself
|
||||
assert!(!domain.is_subdomain_of(domain));
|
||||
|
||||
// Different TLD: example.org is not a subdomain of example.com
|
||||
let different_tld_url = Url::parse("https://example.org")?;
|
||||
let different_tld = RealmRef::from(&different_tld_url);
|
||||
assert!(!different_tld.is_subdomain_of(domain));
|
||||
|
||||
// Partial match but not a subdomain: notexample.com is not a subdomain of example.com
|
||||
let partial_match_url = Url::parse("https://notexample.com")?;
|
||||
let partial_match = RealmRef::from(&partial_match_url);
|
||||
assert!(!partial_match.is_subdomain_of(domain));
|
||||
|
||||
// Different scheme: http subdomain is not a subdomain of https domain
|
||||
let http_subdomain_url = Url::parse("http://sub.example.com")?;
|
||||
let https_domain_url = Url::parse("https://example.com")?;
|
||||
let http_subdomain = RealmRef::from(&http_subdomain_url);
|
||||
let https_domain = RealmRef::from(&https_domain_url);
|
||||
assert!(!http_subdomain.is_subdomain_of(https_domain));
|
||||
|
||||
// Different port: same subdomain with different port is not a subdomain
|
||||
let subdomain_port_8080_url = Url::parse("https://sub.example.com:8080")?;
|
||||
let domain_port_9090_url = Url::parse("https://example.com:9090")?;
|
||||
let subdomain_port_8080 = RealmRef::from(&subdomain_port_8080_url);
|
||||
let domain_port_9090 = RealmRef::from(&domain_port_9090_url);
|
||||
assert!(!subdomain_port_8080.is_subdomain_of(domain_port_9090));
|
||||
|
||||
// Same port: subdomain with same explicit port is a subdomain
|
||||
let subdomain_with_port_url = Url::parse("https://sub.example.com:8080")?;
|
||||
let domain_with_port_url = Url::parse("https://example.com:8080")?;
|
||||
let subdomain_with_port = RealmRef::from(&subdomain_with_port_url);
|
||||
let domain_with_port = RealmRef::from(&domain_with_port_url);
|
||||
assert!(subdomain_with_port.is_subdomain_of(domain_with_port));
|
||||
|
||||
// Default port handling: subdomain with implicit port is a subdomain
|
||||
let subdomain_default_url = Url::parse("https://sub.example.com")?;
|
||||
let domain_explicit_443_url = Url::parse("https://example.com:443")?;
|
||||
let subdomain_default = RealmRef::from(&subdomain_default_url);
|
||||
let domain_explicit_443 = RealmRef::from(&domain_explicit_443_url);
|
||||
assert!(subdomain_default.is_subdomain_of(domain_explicit_443));
|
||||
|
||||
// Edge case: empty host (shouldn't happen with valid URLs but testing defensive code)
|
||||
let file_url = Url::parse("file:///path/to/file")?;
|
||||
let https_url = Url::parse("https://example.com")?;
|
||||
let file_realm = RealmRef::from(&file_url);
|
||||
let https_realm = RealmRef::from(&https_url);
|
||||
assert!(!file_realm.is_subdomain_of(https_realm));
|
||||
assert!(!https_realm.is_subdomain_of(file_realm));
|
||||
|
||||
// Subdomain with path (path should be ignored)
|
||||
let subdomain_with_path_url = Url::parse("https://sub.example.com/path")?;
|
||||
let domain_with_path_url = Url::parse("https://example.com/other")?;
|
||||
let subdomain_with_path = RealmRef::from(&subdomain_with_path_url);
|
||||
let domain_with_path = RealmRef::from(&domain_with_path_url);
|
||||
assert!(subdomain_with_path.is_subdomain_of(domain_with_path));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,95 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::str::FromStr;
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ServiceParseError {
|
||||
#[error(transparent)]
|
||||
InvalidUrl(#[from] DisplaySafeUrlError),
|
||||
#[error("Unsupported scheme: {0}")]
|
||||
UnsupportedScheme(String),
|
||||
#[error("HTTPS is required for non-local hosts")]
|
||||
HttpsRequired,
|
||||
}
|
||||
|
||||
/// A service URL that wraps [`DisplaySafeUrl`] for CLI usage.
|
||||
///
|
||||
/// This type provides automatic URL parsing and validation when used as a CLI argument,
|
||||
/// eliminating the need for manual parsing in command functions.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)]
|
||||
#[serde(transparent)]
|
||||
pub struct Service(DisplaySafeUrl);
|
||||
|
||||
impl Service {
|
||||
/// Get the underlying [`DisplaySafeUrl`].
|
||||
pub fn url(&self) -> &DisplaySafeUrl {
|
||||
&self.0
|
||||
}
|
||||
|
||||
/// Convert into the underlying [`DisplaySafeUrl`].
|
||||
pub fn into_url(self) -> DisplaySafeUrl {
|
||||
self.0
|
||||
}
|
||||
|
||||
/// Validate that the URL scheme is supported.
|
||||
fn check_scheme(url: &Url) -> Result<(), ServiceParseError> {
|
||||
match url.scheme() {
|
||||
"https" => Ok(()),
|
||||
"http" if matches!(url.host_str(), Some("localhost" | "127.0.0.1")) => Ok(()),
|
||||
"http" => Err(ServiceParseError::HttpsRequired),
|
||||
value => Err(ServiceParseError::UnsupportedScheme(value.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Service {
|
||||
type Err = ServiceParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
// First try parsing as-is
|
||||
let url = match DisplaySafeUrl::parse(s) {
|
||||
Ok(url) => url,
|
||||
Err(DisplaySafeUrlError::Url(url::ParseError::RelativeUrlWithoutBase)) => {
|
||||
// If it's a relative URL, try prepending https://
|
||||
let with_https = format!("https://{s}");
|
||||
DisplaySafeUrl::parse(&with_https)?
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
|
||||
Self::check_scheme(&url)?;
|
||||
|
||||
Ok(Self(url))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Service {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for Service {
|
||||
type Error = ServiceParseError;
|
||||
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
Self::from_str(&value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Service> for String {
|
||||
fn from(service: Service) -> Self {
|
||||
service.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<DisplaySafeUrl> for Service {
|
||||
type Error = ServiceParseError;
|
||||
|
||||
fn try_from(value: DisplaySafeUrl) -> Result<Self, Self::Error> {
|
||||
Self::check_scheme(&value)?;
|
||||
Ok(Self(value))
|
||||
}
|
||||
}
|
||||
|
|
@ -1,688 +0,0 @@
|
|||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use fs_err as fs;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, with_added_extension};
|
||||
use uv_preview::{Preview, PreviewFeatures};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use uv_state::{StateBucket, StateStore};
|
||||
use uv_static::EnvVars;
|
||||
|
||||
use crate::credentials::{Password, Token, Username};
|
||||
use crate::realm::Realm;
|
||||
use crate::service::Service;
|
||||
use crate::{Credentials, KeyringProvider};
|
||||
|
||||
/// The storage backend to use in `uv auth` commands.
|
||||
#[derive(Debug)]
|
||||
pub enum AuthBackend {
|
||||
// TODO(zanieb): Right now, we're using a keyring provider for the system store but that's just
|
||||
// where the native implementation is living at the moment. We should consider refactoring these
|
||||
// into a shared API in the future.
|
||||
System(KeyringProvider),
|
||||
TextStore(TextCredentialStore, LockedFile),
|
||||
}
|
||||
|
||||
impl AuthBackend {
|
||||
pub async fn from_settings(preview: Preview) -> Result<Self, TomlCredentialError> {
|
||||
// If preview is enabled, we'll use the system-native store
|
||||
if preview.is_enabled(PreviewFeatures::NATIVE_AUTH) {
|
||||
return Ok(Self::System(KeyringProvider::native()));
|
||||
}
|
||||
|
||||
// Otherwise, we'll use the plaintext credential store
|
||||
let path = TextCredentialStore::default_file()?;
|
||||
match TextCredentialStore::read(&path).await {
|
||||
Ok((store, lock)) => Ok(Self::TextStore(store, lock)),
|
||||
Err(err)
|
||||
if err
|
||||
.as_io_error()
|
||||
.is_some_and(|err| err.kind() == std::io::ErrorKind::NotFound) =>
|
||||
{
|
||||
Ok(Self::TextStore(
|
||||
TextCredentialStore::default(),
|
||||
TextCredentialStore::lock(&path).await?,
|
||||
))
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Authentication scheme to use.
|
||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum AuthScheme {
|
||||
/// HTTP Basic Authentication
|
||||
///
|
||||
/// Uses a username and password.
|
||||
#[default]
|
||||
Basic,
|
||||
/// Bearer token authentication.
|
||||
///
|
||||
/// Uses a token provided as `Bearer <token>` in the `Authorization` header.
|
||||
Bearer,
|
||||
}
|
||||
|
||||
/// Errors that can occur when working with TOML credential storage.
|
||||
#[derive(Debug, Error)]
|
||||
pub enum TomlCredentialError {
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error(transparent)]
|
||||
LockedFile(#[from] LockedFileError),
|
||||
#[error("Failed to parse TOML credential file: {0}")]
|
||||
ParseError(#[from] toml::de::Error),
|
||||
#[error("Failed to serialize credentials to TOML")]
|
||||
SerializeError(#[from] toml::ser::Error),
|
||||
#[error(transparent)]
|
||||
BasicAuthError(#[from] BasicAuthError),
|
||||
#[error(transparent)]
|
||||
BearerAuthError(#[from] BearerAuthError),
|
||||
#[error("Failed to determine credentials directory")]
|
||||
CredentialsDirError,
|
||||
#[error("Token is not valid unicode")]
|
||||
TokenNotUnicode(#[from] std::string::FromUtf8Error),
|
||||
}
|
||||
|
||||
impl TomlCredentialError {
|
||||
pub fn as_io_error(&self) -> Option<&std::io::Error> {
|
||||
match self {
|
||||
Self::Io(err) => Some(err),
|
||||
Self::LockedFile(err) => err.as_io_error(),
|
||||
Self::ParseError(_)
|
||||
| Self::SerializeError(_)
|
||||
| Self::BasicAuthError(_)
|
||||
| Self::BearerAuthError(_)
|
||||
| Self::CredentialsDirError
|
||||
| Self::TokenNotUnicode(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum BasicAuthError {
|
||||
#[error("`username` is required with `scheme = basic`")]
|
||||
MissingUsername,
|
||||
#[error("`token` cannot be provided with `scheme = basic`")]
|
||||
UnexpectedToken,
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum BearerAuthError {
|
||||
#[error("`token` is required with `scheme = bearer`")]
|
||||
MissingToken,
|
||||
#[error("`username` cannot be provided with `scheme = bearer`")]
|
||||
UnexpectedUsername,
|
||||
#[error("`password` cannot be provided with `scheme = bearer`")]
|
||||
UnexpectedPassword,
|
||||
}
|
||||
|
||||
/// A single credential entry in a TOML credentials file.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(try_from = "TomlCredentialWire", into = "TomlCredentialWire")]
|
||||
struct TomlCredential {
|
||||
/// The service URL for this credential.
|
||||
service: Service,
|
||||
/// The credentials for this entry.
|
||||
credentials: Credentials,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct TomlCredentialWire {
|
||||
/// The service URL for this credential.
|
||||
service: Service,
|
||||
/// The username to use. Only allowed with [`AuthScheme::Basic`].
|
||||
username: Username,
|
||||
/// The authentication scheme.
|
||||
#[serde(default)]
|
||||
scheme: AuthScheme,
|
||||
/// The password to use. Only allowed with [`AuthScheme::Basic`].
|
||||
password: Option<Password>,
|
||||
/// The token to use. Only allowed with [`AuthScheme::Bearer`].
|
||||
token: Option<String>,
|
||||
}
|
||||
|
||||
impl From<TomlCredential> for TomlCredentialWire {
|
||||
fn from(value: TomlCredential) -> Self {
|
||||
match value.credentials {
|
||||
Credentials::Basic { username, password } => Self {
|
||||
service: value.service,
|
||||
username,
|
||||
scheme: AuthScheme::Basic,
|
||||
password,
|
||||
token: None,
|
||||
},
|
||||
Credentials::Bearer { token } => Self {
|
||||
service: value.service,
|
||||
username: Username::new(None),
|
||||
scheme: AuthScheme::Bearer,
|
||||
password: None,
|
||||
token: Some(String::from_utf8(token.into_bytes()).expect("Token is valid UTF-8")),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<TomlCredentialWire> for TomlCredential {
|
||||
type Error = TomlCredentialError;
|
||||
|
||||
fn try_from(value: TomlCredentialWire) -> Result<Self, Self::Error> {
|
||||
match value.scheme {
|
||||
AuthScheme::Basic => {
|
||||
if value.username.as_deref().is_none() {
|
||||
return Err(TomlCredentialError::BasicAuthError(
|
||||
BasicAuthError::MissingUsername,
|
||||
));
|
||||
}
|
||||
if value.token.is_some() {
|
||||
return Err(TomlCredentialError::BasicAuthError(
|
||||
BasicAuthError::UnexpectedToken,
|
||||
));
|
||||
}
|
||||
let credentials = Credentials::Basic {
|
||||
username: value.username,
|
||||
password: value.password,
|
||||
};
|
||||
Ok(Self {
|
||||
service: value.service,
|
||||
credentials,
|
||||
})
|
||||
}
|
||||
AuthScheme::Bearer => {
|
||||
if value.username.is_some() {
|
||||
return Err(TomlCredentialError::BearerAuthError(
|
||||
BearerAuthError::UnexpectedUsername,
|
||||
));
|
||||
}
|
||||
if value.password.is_some() {
|
||||
return Err(TomlCredentialError::BearerAuthError(
|
||||
BearerAuthError::UnexpectedPassword,
|
||||
));
|
||||
}
|
||||
if value.token.is_none() {
|
||||
return Err(TomlCredentialError::BearerAuthError(
|
||||
BearerAuthError::MissingToken,
|
||||
));
|
||||
}
|
||||
let credentials = Credentials::Bearer {
|
||||
token: Token::new(value.token.unwrap().into_bytes()),
|
||||
};
|
||||
Ok(Self {
|
||||
service: value.service,
|
||||
credentials,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, Default)]
|
||||
struct TomlCredentials {
|
||||
/// Array of credential entries.
|
||||
#[serde(rename = "credential")]
|
||||
credentials: Vec<TomlCredential>,
|
||||
}
|
||||
|
||||
/// A credential store with a plain text storage backend.
|
||||
#[derive(Debug, Default)]
|
||||
pub struct TextCredentialStore {
|
||||
credentials: FxHashMap<(Service, Username), Credentials>,
|
||||
}
|
||||
|
||||
impl TextCredentialStore {
|
||||
/// Return the directory for storing credentials.
|
||||
pub fn directory_path() -> Result<PathBuf, TomlCredentialError> {
|
||||
if let Some(dir) = std::env::var_os(EnvVars::UV_CREDENTIALS_DIR)
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(PathBuf::from)
|
||||
{
|
||||
return Ok(dir);
|
||||
}
|
||||
|
||||
Ok(StateStore::from_settings(None)?.bucket(StateBucket::Credentials))
|
||||
}
|
||||
|
||||
/// Return the standard file path for storing credentials.
|
||||
pub fn default_file() -> Result<PathBuf, TomlCredentialError> {
|
||||
let dir = Self::directory_path()?;
|
||||
Ok(dir.join("credentials.toml"))
|
||||
}
|
||||
|
||||
/// Acquire a lock on the credentials file at the given path.
|
||||
pub async fn lock(path: &Path) -> Result<LockedFile, TomlCredentialError> {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let lock = with_added_extension(path, ".lock");
|
||||
Ok(LockedFile::acquire(lock, LockedFileMode::Exclusive, "credentials store").await?)
|
||||
}
|
||||
|
||||
/// Read credentials from a file.
|
||||
fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, TomlCredentialError> {
|
||||
let content = fs::read_to_string(path)?;
|
||||
let credentials: TomlCredentials = toml::from_str(&content)?;
|
||||
|
||||
let credentials: FxHashMap<(Service, Username), Credentials> = credentials
|
||||
.credentials
|
||||
.into_iter()
|
||||
.map(|credential| {
|
||||
let username = match &credential.credentials {
|
||||
Credentials::Basic { username, .. } => username.clone(),
|
||||
Credentials::Bearer { .. } => Username::none(),
|
||||
};
|
||||
(
|
||||
(credential.service.clone(), username),
|
||||
credential.credentials,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Self { credentials })
|
||||
}
|
||||
|
||||
/// Read credentials from a file.
|
||||
///
|
||||
/// Returns [`TextCredentialStore`] and a [`LockedFile`] to hold if mutating the store.
|
||||
///
|
||||
/// If the store will not be written to following the read, the lock can be dropped.
|
||||
pub async fn read<P: AsRef<Path>>(path: P) -> Result<(Self, LockedFile), TomlCredentialError> {
|
||||
let lock = Self::lock(path.as_ref()).await?;
|
||||
let store = Self::from_file(path)?;
|
||||
Ok((store, lock))
|
||||
}
|
||||
|
||||
/// Persist credentials to a file.
|
||||
///
|
||||
/// Requires a [`LockedFile`] from [`TextCredentialStore::lock`] or
|
||||
/// [`TextCredentialStore::read`] to ensure exclusive access.
|
||||
pub fn write<P: AsRef<Path>>(
|
||||
self,
|
||||
path: P,
|
||||
_lock: LockedFile,
|
||||
) -> Result<(), TomlCredentialError> {
|
||||
let credentials = self
|
||||
.credentials
|
||||
.into_iter()
|
||||
.map(|((service, _username), credentials)| TomlCredential {
|
||||
service,
|
||||
credentials,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let toml_creds = TomlCredentials { credentials };
|
||||
let content = toml::to_string_pretty(&toml_creds)?;
|
||||
fs::create_dir_all(
|
||||
path.as_ref()
|
||||
.parent()
|
||||
.ok_or(TomlCredentialError::CredentialsDirError)?,
|
||||
)?;
|
||||
|
||||
// TODO(zanieb): We should use an atomic write here
|
||||
fs::write(path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get credentials for a given URL and username.
|
||||
///
|
||||
/// The most specific URL prefix match in the same [`Realm`] is returned, if any.
|
||||
pub fn get_credentials(
|
||||
&self,
|
||||
url: &DisplaySafeUrl,
|
||||
username: Option<&str>,
|
||||
) -> Option<&Credentials> {
|
||||
let request_realm = Realm::from(url);
|
||||
|
||||
// Perform an exact lookup first
|
||||
// TODO(zanieb): Consider adding `DisplaySafeUrlRef` so we can avoid this clone
|
||||
// TODO(zanieb): We could also return early here if we can't normalize to a `Service`
|
||||
if let Ok(url_service) = Service::try_from(url.clone()) {
|
||||
if let Some(credential) = self.credentials.get(&(
|
||||
url_service.clone(),
|
||||
Username::from(username.map(str::to_string)),
|
||||
)) {
|
||||
return Some(credential);
|
||||
}
|
||||
}
|
||||
|
||||
// If that fails, iterate through to find a prefix match
|
||||
let mut best: Option<(usize, &Service, &Credentials)> = None;
|
||||
|
||||
for ((service, stored_username), credential) in &self.credentials {
|
||||
let service_realm = Realm::from(service.url().deref());
|
||||
|
||||
// Only consider services in the same realm
|
||||
if service_realm != request_realm {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Service path must be a prefix of request path
|
||||
if !url.path().starts_with(service.url().path()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If a username is provided, it must match
|
||||
if let Some(request_username) = username {
|
||||
if Some(request_username) != stored_username.as_deref() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Update our best matching credential based on prefix length
|
||||
let specificity = service.url().path().len();
|
||||
if best.is_none_or(|(best_specificity, _, _)| specificity > best_specificity) {
|
||||
best = Some((specificity, service, credential));
|
||||
}
|
||||
}
|
||||
|
||||
// Return the most specific match
|
||||
if let Some((_, _, credential)) = best {
|
||||
return Some(credential);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Store credentials for a given service.
|
||||
pub fn insert(&mut self, service: Service, credentials: Credentials) -> Option<Credentials> {
|
||||
let username = match &credentials {
|
||||
Credentials::Basic { username, .. } => username.clone(),
|
||||
Credentials::Bearer { .. } => Username::none(),
|
||||
};
|
||||
self.credentials.insert((service, username), credentials)
|
||||
}
|
||||
|
||||
/// Remove credentials for a given service.
|
||||
pub fn remove(&mut self, service: &Service, username: Username) -> Option<Credentials> {
|
||||
// Remove the specific credential for this service and username
|
||||
self.credentials.remove(&(service.clone(), username))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::io::Write;
|
||||
use std::str::FromStr;
|
||||
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_toml_serialization() {
|
||||
let credentials = TomlCredentials {
|
||||
credentials: vec![
|
||||
TomlCredential {
|
||||
service: Service::from_str("https://example.com").unwrap(),
|
||||
credentials: Credentials::Basic {
|
||||
username: Username::new(Some("user1".to_string())),
|
||||
password: Some(Password::new("pass1".to_string())),
|
||||
},
|
||||
},
|
||||
TomlCredential {
|
||||
service: Service::from_str("https://test.org").unwrap(),
|
||||
credentials: Credentials::Basic {
|
||||
username: Username::new(Some("user2".to_string())),
|
||||
password: Some(Password::new("pass2".to_string())),
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
let toml_str = toml::to_string_pretty(&credentials).unwrap();
|
||||
let parsed: TomlCredentials = toml::from_str(&toml_str).unwrap();
|
||||
|
||||
assert_eq!(parsed.credentials.len(), 2);
|
||||
assert_eq!(
|
||||
parsed.credentials[0].service.to_string(),
|
||||
"https://example.com/"
|
||||
);
|
||||
assert_eq!(
|
||||
parsed.credentials[1].service.to_string(),
|
||||
"https://test.org/"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_credential_store_operations() {
|
||||
let mut store = TextCredentialStore::default();
|
||||
let credentials = Credentials::basic(Some("user".to_string()), Some("pass".to_string()));
|
||||
|
||||
let service = Service::from_str("https://example.com").unwrap();
|
||||
store.insert(service.clone(), credentials.clone());
|
||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_some());
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com/path").unwrap();
|
||||
let retrieved = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(retrieved.username(), Some("user"));
|
||||
assert_eq!(retrieved.password(), Some("pass"));
|
||||
|
||||
assert!(
|
||||
store
|
||||
.remove(&service, Username::from(Some("user".to_string())))
|
||||
.is_some()
|
||||
);
|
||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_file_operations() {
|
||||
let mut temp_file = NamedTempFile::new().unwrap();
|
||||
writeln!(
|
||||
temp_file,
|
||||
r#"
|
||||
[[credential]]
|
||||
service = "https://example.com"
|
||||
username = "testuser"
|
||||
scheme = "basic"
|
||||
password = "testpass"
|
||||
|
||||
[[credential]]
|
||||
service = "https://test.org"
|
||||
username = "user2"
|
||||
password = "pass2"
|
||||
"#
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let store = TextCredentialStore::from_file(temp_file.path()).unwrap();
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_some());
|
||||
let url = DisplaySafeUrl::parse("https://test.org/").unwrap();
|
||||
assert!(store.get_credentials(&url, None).is_some());
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
||||
let cred = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(cred.username(), Some("testuser"));
|
||||
assert_eq!(cred.password(), Some("testpass"));
|
||||
|
||||
// Test saving
|
||||
let temp_output = NamedTempFile::new().unwrap();
|
||||
store
|
||||
.write(
|
||||
temp_output.path(),
|
||||
TextCredentialStore::lock(temp_file.path()).await.unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let content = fs::read_to_string(temp_output.path()).unwrap();
|
||||
assert!(content.contains("example.com"));
|
||||
assert!(content.contains("testuser"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prefix_matching() {
|
||||
let mut store = TextCredentialStore::default();
|
||||
let credentials = Credentials::basic(Some("user".to_string()), Some("pass".to_string()));
|
||||
|
||||
// Store credentials for a specific path prefix
|
||||
let service = Service::from_str("https://example.com/api").unwrap();
|
||||
store.insert(service.clone(), credentials.clone());
|
||||
|
||||
// Should match URLs that are prefixes of the stored service
|
||||
let matching_urls = [
|
||||
"https://example.com/api",
|
||||
"https://example.com/api/v1",
|
||||
"https://example.com/api/v1/users",
|
||||
];
|
||||
|
||||
for url_str in matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(cred.is_some(), "Failed to match URL with prefix: {url_str}");
|
||||
}
|
||||
|
||||
// Should NOT match URLs that are not prefixes
|
||||
let non_matching_urls = [
|
||||
"https://example.com/different",
|
||||
"https://example.com/ap", // Not a complete path segment match
|
||||
"https://example.com", // Shorter than the stored prefix
|
||||
];
|
||||
|
||||
for url_str in non_matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(cred.is_none(), "Should not match non-prefix URL: {url_str}");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_realm_based_matching() {
|
||||
let mut store = TextCredentialStore::default();
|
||||
let credentials = Credentials::basic(Some("user".to_string()), Some("pass".to_string()));
|
||||
|
||||
// Store by full URL (realm)
|
||||
let service = Service::from_str("https://example.com").unwrap();
|
||||
store.insert(service.clone(), credentials.clone());
|
||||
|
||||
// Should match URLs in the same realm
|
||||
let matching_urls = [
|
||||
"https://example.com",
|
||||
"https://example.com/path",
|
||||
"https://example.com/different/path",
|
||||
"https://example.com:443/path", // Default HTTPS port
|
||||
];
|
||||
|
||||
for url_str in matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(
|
||||
cred.is_some(),
|
||||
"Failed to match URL in same realm: {url_str}"
|
||||
);
|
||||
}
|
||||
|
||||
// Should NOT match URLs in different realms
|
||||
let non_matching_urls = [
|
||||
"http://example.com", // Different scheme
|
||||
"https://different.com", // Different host
|
||||
"https://example.com:8080", // Different port
|
||||
];
|
||||
|
||||
for url_str in non_matching_urls {
|
||||
let url = DisplaySafeUrl::parse(url_str).unwrap();
|
||||
let cred = store.get_credentials(&url, None);
|
||||
assert!(
|
||||
cred.is_none(),
|
||||
"Should not match URL in different realm: {url_str}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_most_specific_prefix_matching() {
|
||||
let mut store = TextCredentialStore::default();
|
||||
let general_cred =
|
||||
Credentials::basic(Some("general".to_string()), Some("pass1".to_string()));
|
||||
let specific_cred =
|
||||
Credentials::basic(Some("specific".to_string()), Some("pass2".to_string()));
|
||||
|
||||
// Store credentials with different prefix lengths
|
||||
let general_service = Service::from_str("https://example.com/api").unwrap();
|
||||
let specific_service = Service::from_str("https://example.com/api/v1").unwrap();
|
||||
store.insert(general_service.clone(), general_cred);
|
||||
store.insert(specific_service.clone(), specific_cred);
|
||||
|
||||
// Should match the most specific prefix
|
||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
||||
let cred = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(cred.username(), Some("specific"));
|
||||
|
||||
// Should match the general prefix for non-specific paths
|
||||
let url = DisplaySafeUrl::parse("https://example.com/api/v2").unwrap();
|
||||
let cred = store.get_credentials(&url, None).unwrap();
|
||||
assert_eq!(cred.username(), Some("general"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_username_exact_url_match() {
|
||||
let mut store = TextCredentialStore::default();
|
||||
let url = DisplaySafeUrl::parse("https://example.com").unwrap();
|
||||
let service = Service::from_str("https://example.com").unwrap();
|
||||
let user1_creds = Credentials::basic(Some("user1".to_string()), Some("pass1".to_string()));
|
||||
store.insert(service.clone(), user1_creds.clone());
|
||||
|
||||
// Should return credentials when username matches
|
||||
let result = store.get_credentials(&url, Some("user1"));
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().username(), Some("user1"));
|
||||
assert_eq!(result.unwrap().password(), Some("pass1"));
|
||||
|
||||
// Should not return credentials when username doesn't match
|
||||
let result = store.get_credentials(&url, Some("user2"));
|
||||
assert!(result.is_none());
|
||||
|
||||
// Should return credentials when no username is specified
|
||||
let result = store.get_credentials(&url, None);
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().username(), Some("user1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_username_prefix_url_match() {
|
||||
let mut store = TextCredentialStore::default();
|
||||
|
||||
// Add credentials with different usernames for overlapping URL prefixes
|
||||
let general_service = Service::from_str("https://example.com/api").unwrap();
|
||||
let specific_service = Service::from_str("https://example.com/api/v1").unwrap();
|
||||
|
||||
let general_creds = Credentials::basic(
|
||||
Some("general_user".to_string()),
|
||||
Some("general_pass".to_string()),
|
||||
);
|
||||
let specific_creds = Credentials::basic(
|
||||
Some("specific_user".to_string()),
|
||||
Some("specific_pass".to_string()),
|
||||
);
|
||||
|
||||
store.insert(general_service, general_creds);
|
||||
store.insert(specific_service, specific_creds);
|
||||
|
||||
let url = DisplaySafeUrl::parse("https://example.com/api/v1/users").unwrap();
|
||||
|
||||
// Should match specific credentials when username matches
|
||||
let result = store.get_credentials(&url, Some("specific_user"));
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().username(), Some("specific_user"));
|
||||
|
||||
// Should match the general credentials when requesting general_user (falls back to less specific prefix)
|
||||
let result = store.get_credentials(&url, Some("general_user"));
|
||||
assert!(
|
||||
result.is_some(),
|
||||
"Should match general_user from less specific prefix"
|
||||
);
|
||||
assert_eq!(result.unwrap().username(), Some("general_user"));
|
||||
|
||||
// Should match most specific when no username specified
|
||||
let result = store.get_credentials(&url, None);
|
||||
assert!(result.is_some());
|
||||
assert_eq!(result.unwrap().username(), Some("specific_user"));
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +1,13 @@
|
|||
[package]
|
||||
name = "uv-bench"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.0"
|
||||
description = "uv Micro-benchmarks"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
|
|
@ -22,19 +23,18 @@ name = "uv"
|
|||
path = "benches/uv.rs"
|
||||
harness = false
|
||||
|
||||
[dev-dependencies]
|
||||
[dependencies]
|
||||
uv-cache = { workspace = true }
|
||||
uv-client = { workspace = true }
|
||||
uv-configuration = { workspace = true }
|
||||
uv-dispatch = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
uv-extract = { workspace = true }
|
||||
uv-extract = { workspace = true, optional = true }
|
||||
uv-install-wheel = { workspace = true }
|
||||
uv-pep440 = { workspace = true }
|
||||
uv-pep508 = { workspace = true }
|
||||
uv-platform-tags = { workspace = true }
|
||||
uv-preview = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-python = { workspace = true }
|
||||
uv-resolver = { workspace = true }
|
||||
|
|
@ -42,7 +42,10 @@ uv-types = { workspace = true }
|
|||
uv-workspace = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
criterion = { version = "4.0.3", default-features = false, package = "codspeed-criterion-compat", features = ["async_tokio"] }
|
||||
codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true }
|
||||
criterion = { version = "0.6.0", default-features = false, features = [
|
||||
"async_tokio",
|
||||
] }
|
||||
jiff = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
|
||||
|
|
@ -50,4 +53,5 @@ tokio = { workspace = true }
|
|||
ignored = ["uv-extract"]
|
||||
|
||||
[features]
|
||||
codspeed = ["codspeed-criterion-compat"]
|
||||
static = ["uv-extract/static"]
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-bench
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bench).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
use std::hint::black_box;
|
||||
use std::str::FromStr;
|
||||
|
||||
use criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||
use std::hint::black_box;
|
||||
use uv_bench::criterion::{Criterion, criterion_group, criterion_main, measurement::WallTime};
|
||||
use uv_cache::Cache;
|
||||
use uv_client::{BaseClientBuilder, RegistryClientBuilder};
|
||||
use uv_client::RegistryClientBuilder;
|
||||
use uv_distribution_types::Requirement;
|
||||
use uv_python::PythonEnvironment;
|
||||
use uv_resolver::Manifest;
|
||||
|
|
@ -59,14 +59,11 @@ fn setup(manifest: Manifest) -> impl Fn(bool) {
|
|||
.build()
|
||||
.unwrap();
|
||||
|
||||
let cache = Cache::from_path("../../.cache")
|
||||
.init_no_wait()
|
||||
.expect("No cache contention when running benchmarks")
|
||||
.unwrap();
|
||||
let cache = Cache::from_path("../../.cache").init().unwrap();
|
||||
let interpreter = PythonEnvironment::from_root("../../.venv", &cache)
|
||||
.unwrap()
|
||||
.into_interpreter();
|
||||
let client = RegistryClientBuilder::new(BaseClientBuilder::default(), cache.clone()).build();
|
||||
let client = RegistryClientBuilder::new(cache.clone()).build();
|
||||
|
||||
move |universal| {
|
||||
runtime
|
||||
|
|
@ -88,23 +85,22 @@ mod resolver {
|
|||
|
||||
use uv_cache::Cache;
|
||||
use uv_client::RegistryClient;
|
||||
use uv_configuration::{BuildOptions, Concurrency, Constraints, IndexStrategy, SourceStrategy};
|
||||
use uv_configuration::{
|
||||
BuildOptions, Concurrency, ConfigSettings, Constraints, IndexStrategy, PreviewMode,
|
||||
SourceStrategy,
|
||||
};
|
||||
use uv_dispatch::{BuildDispatch, SharedState};
|
||||
use uv_distribution::DistributionDatabase;
|
||||
use uv_distribution_types::{
|
||||
ConfigSettings, DependencyMetadata, ExtraBuildRequires, ExtraBuildVariables,
|
||||
IndexLocations, PackageConfigSettings, RequiresPython,
|
||||
};
|
||||
use uv_distribution_types::{DependencyMetadata, IndexLocations, RequiresPython};
|
||||
use uv_install_wheel::LinkMode;
|
||||
use uv_pep440::Version;
|
||||
use uv_pep508::{MarkerEnvironment, MarkerEnvironmentBuilder};
|
||||
use uv_platform_tags::{Arch, Os, Platform, Tags};
|
||||
use uv_preview::Preview;
|
||||
use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment};
|
||||
use uv_python::Interpreter;
|
||||
use uv_resolver::{
|
||||
ExcludeNewer, FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement,
|
||||
Resolver, ResolverEnvironment, ResolverOutput,
|
||||
FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, Resolver,
|
||||
ResolverEnvironment, ResolverOutput,
|
||||
};
|
||||
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
|
||||
use uv_workspace::WorkspaceCache;
|
||||
|
|
@ -134,7 +130,7 @@ mod resolver {
|
|||
);
|
||||
|
||||
static TAGS: LazyLock<Tags> = LazyLock::new(|| {
|
||||
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false, false).unwrap()
|
||||
Tags::from_env(&PLATFORM, (3, 11), "cpython", (3, 11), false, false).unwrap()
|
||||
});
|
||||
|
||||
pub(crate) async fn resolve(
|
||||
|
|
@ -145,13 +141,10 @@ mod resolver {
|
|||
universal: bool,
|
||||
) -> Result<ResolverOutput> {
|
||||
let build_isolation = BuildIsolation::default();
|
||||
let extra_build_requires = ExtraBuildRequires::default();
|
||||
let extra_build_variables = ExtraBuildVariables::default();
|
||||
let build_options = BuildOptions::default();
|
||||
let concurrency = Concurrency::default();
|
||||
let config_settings = ConfigSettings::default();
|
||||
let config_settings_package = PackageConfigSettings::default();
|
||||
let exclude_newer = ExcludeNewer::global(
|
||||
let exclude_newer = Some(
|
||||
jiff::civil::date(2024, 9, 1)
|
||||
.to_zoned(jiff::tz::TimeZone::UTC)
|
||||
.unwrap()
|
||||
|
|
@ -165,9 +158,7 @@ mod resolver {
|
|||
let index = InMemoryIndex::default();
|
||||
let index_locations = IndexLocations::default();
|
||||
let installed_packages = EmptyInstalledPackages;
|
||||
let options = OptionsBuilder::new()
|
||||
.exclude_newer(exclude_newer.clone())
|
||||
.build();
|
||||
let options = OptionsBuilder::new().exclude_newer(exclude_newer).build();
|
||||
let sources = SourceStrategy::default();
|
||||
let dependency_metadata = DependencyMetadata::default();
|
||||
let conflicts = Conflicts::empty();
|
||||
|
|
@ -185,7 +176,7 @@ mod resolver {
|
|||
let build_context = BuildDispatch::new(
|
||||
client,
|
||||
&cache,
|
||||
&build_constraints,
|
||||
build_constraints,
|
||||
interpreter,
|
||||
&index_locations,
|
||||
&flat_index,
|
||||
|
|
@ -193,10 +184,7 @@ mod resolver {
|
|||
state,
|
||||
IndexStrategy::default(),
|
||||
&config_settings,
|
||||
&config_settings_package,
|
||||
build_isolation,
|
||||
&extra_build_requires,
|
||||
&extra_build_variables,
|
||||
LinkMode::default(),
|
||||
&build_options,
|
||||
&hashes,
|
||||
|
|
@ -204,7 +192,7 @@ mod resolver {
|
|||
sources,
|
||||
workspace_cache,
|
||||
concurrency,
|
||||
Preview::default(),
|
||||
PreviewMode::Enabled,
|
||||
);
|
||||
|
||||
let markers = if universal {
|
||||
|
|
|
|||
|
|
@ -1 +1,10 @@
|
|||
pub mod criterion {
|
||||
//! This module re-exports the criterion API but picks the right backend depending on whether
|
||||
//! the benchmarks are built to run locally or with codspeed
|
||||
|
||||
#[cfg(not(feature = "codspeed"))]
|
||||
pub use criterion::*;
|
||||
|
||||
#[cfg(feature = "codspeed")]
|
||||
pub use codspeed_criterion_compat::*;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
[package]
|
||||
name = "uv-bin-install"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-cache = { workspace = true }
|
||||
uv-client = { workspace = true }
|
||||
uv-distribution-filename = { workspace = true }
|
||||
uv-extract = { workspace = true }
|
||||
uv-pep440 = { workspace = true }
|
||||
uv-platform = { workspace = true }
|
||||
uv-redacted = { workspace = true }
|
||||
|
||||
fs-err = { workspace = true, features = ["tokio"] }
|
||||
futures = { workspace = true }
|
||||
reqwest = { workspace = true }
|
||||
reqwest-middleware = { workspace = true }
|
||||
reqwest-retry = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-util = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-bin-install
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-bin-install).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,438 +0,0 @@
|
|||
//! Binary download and installation utilities for uv.
|
||||
//!
|
||||
//! These utilities are specifically for consuming distributions that are _not_ Python packages,
|
||||
//! e.g., `ruff` (which does have a Python package, but also has standalone binaries on GitHub).
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use futures::TryStreamExt;
|
||||
use reqwest_retry::RetryPolicy;
|
||||
use reqwest_retry::policies::ExponentialBackoff;
|
||||
use std::fmt;
|
||||
use thiserror::Error;
|
||||
use tokio::io::{AsyncRead, ReadBuf};
|
||||
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||
use tracing::debug;
|
||||
use url::Url;
|
||||
use uv_distribution_filename::SourceDistExtension;
|
||||
|
||||
use uv_cache::{Cache, CacheBucket, CacheEntry, Error as CacheError};
|
||||
use uv_client::{BaseClient, is_transient_network_error};
|
||||
use uv_extract::{Error as ExtractError, stream};
|
||||
use uv_pep440::Version;
|
||||
use uv_platform::Platform;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
/// Binary tools that can be installed.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum Binary {
|
||||
Ruff,
|
||||
}
|
||||
|
||||
impl Binary {
|
||||
/// Get the default version for this binary.
|
||||
pub fn default_version(&self) -> Version {
|
||||
match self {
|
||||
// TODO(zanieb): Figure out a nice way to automate updating this
|
||||
Self::Ruff => Version::new([0, 12, 5]),
|
||||
}
|
||||
}
|
||||
|
||||
/// The name of the binary.
|
||||
///
|
||||
/// See [`Binary::executable`] for the platform-specific executable name.
|
||||
pub fn name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Ruff => "ruff",
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the download URL for a specific version and platform.
|
||||
pub fn download_url(
|
||||
&self,
|
||||
version: &Version,
|
||||
platform: &str,
|
||||
format: ArchiveFormat,
|
||||
) -> Result<Url, Error> {
|
||||
match self {
|
||||
Self::Ruff => {
|
||||
let url = format!(
|
||||
"https://github.com/astral-sh/ruff/releases/download/{version}/ruff-{platform}.{}",
|
||||
format.extension()
|
||||
);
|
||||
Url::parse(&url).map_err(|err| Error::UrlParse { url, source: err })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the executable name
|
||||
pub fn executable(&self) -> String {
|
||||
format!("{}{}", self.name(), std::env::consts::EXE_SUFFIX)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Binary {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.name())
|
||||
}
|
||||
}
|
||||
|
||||
/// Archive formats for binary downloads.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ArchiveFormat {
|
||||
Zip,
|
||||
TarGz,
|
||||
}
|
||||
|
||||
impl ArchiveFormat {
|
||||
/// Get the file extension for this archive format.
|
||||
pub fn extension(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Zip => "zip",
|
||||
Self::TarGz => "tar.gz",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ArchiveFormat> for SourceDistExtension {
|
||||
fn from(val: ArchiveFormat) -> Self {
|
||||
match val {
|
||||
ArchiveFormat::Zip => Self::Zip,
|
||||
ArchiveFormat::TarGz => Self::TarGz,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur during binary download and installation.
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
#[error("Failed to download from: {url}")]
|
||||
Download {
|
||||
url: Url,
|
||||
#[source]
|
||||
source: reqwest_middleware::Error,
|
||||
},
|
||||
|
||||
#[error("Failed to parse URL: {url}")]
|
||||
UrlParse {
|
||||
url: String,
|
||||
#[source]
|
||||
source: url::ParseError,
|
||||
},
|
||||
|
||||
#[error("Failed to extract archive")]
|
||||
Extract {
|
||||
#[source]
|
||||
source: ExtractError,
|
||||
},
|
||||
|
||||
#[error("Binary not found in archive at expected location: {expected}")]
|
||||
BinaryNotFound { expected: PathBuf },
|
||||
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Cache(#[from] CacheError),
|
||||
|
||||
#[error("Failed to detect platform")]
|
||||
Platform(#[from] uv_platform::Error),
|
||||
|
||||
#[error("Attempt failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
||||
RetriedError {
|
||||
#[source]
|
||||
err: Box<Error>,
|
||||
retries: u32,
|
||||
},
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Return the number of attempts that were made to complete this request before this error was
|
||||
/// returned. Note that e.g. 3 retries equates to 4 attempts.
|
||||
fn attempts(&self) -> u32 {
|
||||
if let Self::RetriedError { retries, .. } = self {
|
||||
return retries + 1;
|
||||
}
|
||||
1
|
||||
}
|
||||
}
|
||||
|
||||
/// Install the given binary.
|
||||
pub async fn bin_install(
|
||||
binary: Binary,
|
||||
version: &Version,
|
||||
client: &BaseClient,
|
||||
retry_policy: &ExponentialBackoff,
|
||||
cache: &Cache,
|
||||
reporter: &dyn Reporter,
|
||||
) -> Result<PathBuf, Error> {
|
||||
let platform = Platform::from_env()?;
|
||||
let platform_name = platform.as_cargo_dist_triple();
|
||||
let cache_entry = CacheEntry::new(
|
||||
cache
|
||||
.bucket(CacheBucket::Binaries)
|
||||
.join(binary.name())
|
||||
.join(version.to_string())
|
||||
.join(&platform_name),
|
||||
binary.executable(),
|
||||
);
|
||||
|
||||
// Lock the directory to prevent racing installs
|
||||
let _lock = cache_entry.with_file(".lock").lock().await?;
|
||||
if cache_entry.path().exists() {
|
||||
return Ok(cache_entry.into_path_buf());
|
||||
}
|
||||
|
||||
let format = if platform.os.is_windows() {
|
||||
ArchiveFormat::Zip
|
||||
} else {
|
||||
ArchiveFormat::TarGz
|
||||
};
|
||||
|
||||
let download_url = binary.download_url(version, &platform_name, format)?;
|
||||
|
||||
let cache_dir = cache_entry.dir();
|
||||
fs_err::tokio::create_dir_all(&cache_dir).await?;
|
||||
|
||||
let path = download_and_unpack_with_retry(
|
||||
binary,
|
||||
version,
|
||||
client,
|
||||
retry_policy,
|
||||
cache,
|
||||
reporter,
|
||||
&platform_name,
|
||||
format,
|
||||
&download_url,
|
||||
&cache_entry,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Add executable bit
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::fs::Permissions;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let permissions = fs_err::tokio::metadata(&path).await?.permissions();
|
||||
if permissions.mode() & 0o111 != 0o111 {
|
||||
fs_err::tokio::set_permissions(
|
||||
&path,
|
||||
Permissions::from_mode(permissions.mode() | 0o111),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// Download and unpack a binary with retry on stream failures.
|
||||
async fn download_and_unpack_with_retry(
|
||||
binary: Binary,
|
||||
version: &Version,
|
||||
client: &BaseClient,
|
||||
retry_policy: &ExponentialBackoff,
|
||||
cache: &Cache,
|
||||
reporter: &dyn Reporter,
|
||||
platform_name: &str,
|
||||
format: ArchiveFormat,
|
||||
download_url: &Url,
|
||||
cache_entry: &CacheEntry,
|
||||
) -> Result<PathBuf, Error> {
|
||||
let mut total_attempts = 0;
|
||||
let mut retried_here = false;
|
||||
let start_time = SystemTime::now();
|
||||
|
||||
loop {
|
||||
let result = download_and_unpack(
|
||||
binary,
|
||||
version,
|
||||
client,
|
||||
cache,
|
||||
reporter,
|
||||
platform_name,
|
||||
format,
|
||||
download_url,
|
||||
cache_entry,
|
||||
)
|
||||
.await;
|
||||
|
||||
let result = match result {
|
||||
Ok(path) => Ok(path),
|
||||
Err(err) => {
|
||||
total_attempts += err.attempts();
|
||||
let past_retries = total_attempts - 1;
|
||||
|
||||
if is_transient_network_error(&err) {
|
||||
let retry_decision = retry_policy.should_retry(start_time, past_retries);
|
||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||
debug!(
|
||||
"Transient failure while installing {} {}; retrying...",
|
||||
binary.name(),
|
||||
version
|
||||
);
|
||||
let duration = execute_after
|
||||
.duration_since(SystemTime::now())
|
||||
.unwrap_or_else(|_| Duration::default());
|
||||
tokio::time::sleep(duration).await;
|
||||
retried_here = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if retried_here {
|
||||
Err(Error::RetriedError {
|
||||
err: Box::new(err),
|
||||
retries: past_retries,
|
||||
})
|
||||
} else {
|
||||
Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
/// Download and unpackage a binary,
|
||||
///
|
||||
/// NOTE [`download_and_unpack_with_retry`] should be used instead.
|
||||
async fn download_and_unpack(
|
||||
binary: Binary,
|
||||
version: &Version,
|
||||
client: &BaseClient,
|
||||
cache: &Cache,
|
||||
reporter: &dyn Reporter,
|
||||
platform_name: &str,
|
||||
format: ArchiveFormat,
|
||||
download_url: &Url,
|
||||
cache_entry: &CacheEntry,
|
||||
) -> Result<PathBuf, Error> {
|
||||
// Create a temporary directory for extraction
|
||||
let temp_dir = tempfile::tempdir_in(cache.bucket(CacheBucket::Binaries))?;
|
||||
|
||||
let response = client
|
||||
.for_host(&DisplaySafeUrl::from_url(download_url.clone()))
|
||||
.get(download_url.clone())
|
||||
.send()
|
||||
.await
|
||||
.map_err(|err| Error::Download {
|
||||
url: download_url.clone(),
|
||||
source: err,
|
||||
})?;
|
||||
|
||||
let inner_retries = response
|
||||
.extensions()
|
||||
.get::<reqwest_retry::RetryCount>()
|
||||
.map(|retries| retries.value());
|
||||
|
||||
if let Err(status_error) = response.error_for_status_ref() {
|
||||
let err = Error::Download {
|
||||
url: download_url.clone(),
|
||||
source: reqwest_middleware::Error::from(status_error),
|
||||
};
|
||||
if let Some(retries) = inner_retries {
|
||||
return Err(Error::RetriedError {
|
||||
err: Box::new(err),
|
||||
retries,
|
||||
});
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
// Get the download size from headers if available
|
||||
let size = response
|
||||
.headers()
|
||||
.get(reqwest::header::CONTENT_LENGTH)
|
||||
.and_then(|val| val.to_str().ok())
|
||||
.and_then(|val| val.parse::<u64>().ok());
|
||||
|
||||
// Stream download directly to extraction
|
||||
let reader = response
|
||||
.bytes_stream()
|
||||
.map_err(std::io::Error::other)
|
||||
.into_async_read()
|
||||
.compat();
|
||||
|
||||
let id = reporter.on_download_start(binary.name(), version, size);
|
||||
let mut progress_reader = ProgressReader::new(reader, id, reporter);
|
||||
stream::archive(&mut progress_reader, format.into(), temp_dir.path())
|
||||
.await
|
||||
.map_err(|e| Error::Extract { source: e })?;
|
||||
reporter.on_download_complete(id);
|
||||
|
||||
// Find the binary in the extracted files
|
||||
let extracted_binary = match format {
|
||||
ArchiveFormat::Zip => {
|
||||
// Windows ZIP archives contain the binary directly in the root
|
||||
temp_dir.path().join(binary.executable())
|
||||
}
|
||||
ArchiveFormat::TarGz => {
|
||||
// tar.gz archives contain the binary in a subdirectory
|
||||
temp_dir
|
||||
.path()
|
||||
.join(format!("{}-{platform_name}", binary.name()))
|
||||
.join(binary.executable())
|
||||
}
|
||||
};
|
||||
|
||||
if !extracted_binary.exists() {
|
||||
return Err(Error::BinaryNotFound {
|
||||
expected: extracted_binary,
|
||||
});
|
||||
}
|
||||
|
||||
// Move the binary to its final location before the temp directory is dropped
|
||||
fs_err::tokio::rename(&extracted_binary, cache_entry.path()).await?;
|
||||
|
||||
Ok(cache_entry.path().to_path_buf())
|
||||
}
|
||||
|
||||
/// Progress reporter for binary downloads.
|
||||
pub trait Reporter: Send + Sync {
|
||||
/// Called when a download starts.
|
||||
fn on_download_start(&self, name: &str, version: &Version, size: Option<u64>) -> usize;
|
||||
/// Called when download progress is made.
|
||||
fn on_download_progress(&self, id: usize, inc: u64);
|
||||
/// Called when a download completes.
|
||||
fn on_download_complete(&self, id: usize);
|
||||
}
|
||||
|
||||
/// An asynchronous reader that reports progress as bytes are read.
|
||||
struct ProgressReader<'a, R> {
|
||||
reader: R,
|
||||
index: usize,
|
||||
reporter: &'a dyn Reporter,
|
||||
}
|
||||
|
||||
impl<'a, R> ProgressReader<'a, R> {
|
||||
/// Create a new [`ProgressReader`] that wraps another reader.
|
||||
fn new(reader: R, index: usize, reporter: &'a dyn Reporter) -> Self {
|
||||
Self {
|
||||
reader,
|
||||
index,
|
||||
reporter,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> AsyncRead for ProgressReader<'_, R>
|
||||
where
|
||||
R: AsyncRead + Unpin,
|
||||
{
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut ReadBuf<'_>,
|
||||
) -> Poll<std::io::Result<()>> {
|
||||
Pin::new(&mut self.as_mut().reader)
|
||||
.poll_read(cx, buf)
|
||||
.map_ok(|()| {
|
||||
self.reporter
|
||||
.on_download_progress(self.index, buf.filled().len() as u64);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,13 +1,13 @@
|
|||
[package]
|
||||
name = "uv-build-backend"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
@ -26,7 +26,6 @@ uv-pypi-types = { workspace = true }
|
|||
uv-version = { workspace = true }
|
||||
uv-warnings = { workspace = true }
|
||||
|
||||
base64 = { workspace = true }
|
||||
csv = { workspace = true }
|
||||
flate2 = { workspace = true, default-features = false }
|
||||
fs-err = { workspace = true }
|
||||
|
|
@ -57,6 +56,5 @@ schemars = ["dep:schemars", "uv-pypi-types/schemars"]
|
|||
|
||||
[dev-dependencies]
|
||||
indoc = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
insta = { version = "1.40.0", features = ["filters"] }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-build-backend
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-backend).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,4 +1,3 @@
|
|||
use itertools::Itertools;
|
||||
mod metadata;
|
||||
mod serde_verbatim;
|
||||
mod settings;
|
||||
|
|
@ -8,11 +7,8 @@ mod wheel;
|
|||
pub use metadata::{PyProjectToml, check_direct_build};
|
||||
pub use settings::{BuildBackendSettings, WheelDataIncludes};
|
||||
pub use source_dist::{build_source_dist, list_source_dist};
|
||||
use uv_warnings::warn_user_once;
|
||||
pub use wheel::{build_editable, build_wheel, list_wheel, metadata};
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsStr;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
|
@ -32,20 +28,20 @@ use crate::settings::ModuleName;
|
|||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Invalid metadata format in: {}", _0.user_display())]
|
||||
Toml(PathBuf, #[source] toml::de::Error),
|
||||
#[error("Invalid project metadata")]
|
||||
#[error("Invalid pyproject.toml")]
|
||||
Toml(#[from] toml::de::Error),
|
||||
#[error("Invalid pyproject.toml")]
|
||||
Validation(#[from] ValidationError),
|
||||
#[error("Invalid module name: {0}")]
|
||||
InvalidModuleName(String, #[source] IdentifierParseError),
|
||||
#[error("Unsupported glob expression in: {field}")]
|
||||
#[error("Unsupported glob expression in: `{field}`")]
|
||||
PortableGlob {
|
||||
field: String,
|
||||
#[source]
|
||||
source: PortableGlobError,
|
||||
},
|
||||
/// <https://github.com/BurntSushi/ripgrep/discussions/2927>
|
||||
#[error("Glob expressions caused to large regex in: {field}")]
|
||||
#[error("Glob expressions caused to large regex in: `{field}`")]
|
||||
GlobSetTooLarge {
|
||||
field: String,
|
||||
#[source]
|
||||
|
|
@ -53,7 +49,7 @@ pub enum Error {
|
|||
},
|
||||
#[error("`pyproject.toml` must not be excluded from source distribution build")]
|
||||
PyprojectTomlExcluded,
|
||||
#[error("Failed to walk source tree: {}", root.user_display())]
|
||||
#[error("Failed to walk source tree: `{}`", root.user_display())]
|
||||
WalkDir {
|
||||
root: PathBuf,
|
||||
#[source]
|
||||
|
|
@ -63,19 +59,14 @@ pub enum Error {
|
|||
Zip(#[from] zip::result::ZipError),
|
||||
#[error("Failed to write RECORD file")]
|
||||
Csv(#[from] csv::Error),
|
||||
#[error("Expected a Python module at: {}", _0.user_display())]
|
||||
#[error("Expected a Python module at: `{}`", _0.user_display())]
|
||||
MissingInitPy(PathBuf),
|
||||
#[error("For namespace packages, `__init__.py[i]` is not allowed in parent directory: {}", _0.user_display())]
|
||||
#[error("For namespace packages, `__init__.py[i]` is not allowed in parent directory: `{}`", _0.user_display())]
|
||||
NotANamespace(PathBuf),
|
||||
/// Either an absolute path or a parent path through `..`.
|
||||
#[error("Module root must be inside the project: {}", _0.user_display())]
|
||||
#[error("Module root must be inside the project: `{}`", _0.user_display())]
|
||||
InvalidModuleRoot(PathBuf),
|
||||
/// Either an absolute path or a parent path through `..`.
|
||||
#[error("The path for the data directory {} must be inside the project: {}", name, path.user_display())]
|
||||
InvalidDataRoot { name: String, path: PathBuf },
|
||||
#[error("Virtual environments must not be added to source distributions or wheels, remove the directory or exclude it from the build: {}", _0.user_display())]
|
||||
VenvInSourceTree(PathBuf),
|
||||
#[error("Inconsistent metadata between prepare and build step: {0}")]
|
||||
#[error("Inconsistent metadata between prepare and build step: `{0}`")]
|
||||
InconsistentSteps(&'static str),
|
||||
#[error("Failed to write to {}", _0.user_display())]
|
||||
TarWrite(PathBuf, #[source] io::Error),
|
||||
|
|
@ -194,60 +185,6 @@ fn check_metadata_directory(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the list of module names without names which would be included twice
|
||||
///
|
||||
/// In normal cases it should do nothing:
|
||||
///
|
||||
/// * `["aaa"] -> ["aaa"]`
|
||||
/// * `["aaa", "bbb"] -> ["aaa", "bbb"]`
|
||||
///
|
||||
/// Duplicate elements are removed:
|
||||
///
|
||||
/// * `["aaa", "aaa"] -> ["aaa"]`
|
||||
/// * `["bbb", "aaa", "bbb"] -> ["aaa", "bbb"]`
|
||||
///
|
||||
/// Names with more specific paths are removed in favour of more general paths:
|
||||
///
|
||||
/// * `["aaa.foo", "aaa"] -> ["aaa"]`
|
||||
/// * `["bbb", "aaa", "bbb.foo", "ccc.foo", "ccc.foo.bar", "aaa"] -> ["aaa", "bbb.foo", "ccc.foo"]`
|
||||
///
|
||||
/// This does not preserve the order of the elements.
|
||||
fn prune_redundant_modules(mut names: Vec<String>) -> Vec<String> {
|
||||
names.sort();
|
||||
let mut pruned = Vec::with_capacity(names.len());
|
||||
for name in names {
|
||||
if let Some(last) = pruned.last() {
|
||||
if name == *last {
|
||||
continue;
|
||||
}
|
||||
// This is a more specific (narrow) module name than what came before
|
||||
if name
|
||||
.strip_prefix(last)
|
||||
.is_some_and(|suffix| suffix.starts_with('.'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
pruned.push(name);
|
||||
}
|
||||
pruned
|
||||
}
|
||||
|
||||
/// Wraps [`prune_redundant_modules`] with a conditional warning when modules are ignored
|
||||
fn prune_redundant_modules_warn(names: &[String], show_warnings: bool) -> Vec<String> {
|
||||
let pruned = prune_redundant_modules(names.to_vec());
|
||||
if show_warnings && names.len() != pruned.len() {
|
||||
let mut pruned: HashSet<_> = pruned.iter().collect();
|
||||
let ignored: Vec<_> = names.iter().filter(|name| !pruned.remove(name)).collect();
|
||||
let s = if ignored.len() == 1 { "" } else { "s" };
|
||||
warn_user_once!(
|
||||
"Ignoring redundant module name{s} in `tool.uv.build-backend.module-name`: `{}`",
|
||||
ignored.into_iter().join("`, `")
|
||||
);
|
||||
}
|
||||
pruned
|
||||
}
|
||||
|
||||
/// Returns the source root and the module path(s) with the `__init__.py[i]` below to it while
|
||||
/// checking the project layout and names.
|
||||
///
|
||||
|
|
@ -270,13 +207,10 @@ fn find_roots(
|
|||
relative_module_root: &Path,
|
||||
module_name: Option<&ModuleName>,
|
||||
namespace: bool,
|
||||
show_warnings: bool,
|
||||
) -> Result<(PathBuf, Vec<PathBuf>), Error> {
|
||||
let relative_module_root = uv_fs::normalize_path(relative_module_root);
|
||||
// Check that even if a path contains `..`, we only include files below the module root.
|
||||
if !uv_fs::normalize_path(&source_tree.join(&relative_module_root))
|
||||
.starts_with(uv_fs::normalize_path(source_tree))
|
||||
{
|
||||
let src_root = source_tree.join(&relative_module_root);
|
||||
if !src_root.starts_with(source_tree) {
|
||||
return Err(Error::InvalidModuleRoot(relative_module_root.to_path_buf()));
|
||||
}
|
||||
let src_root = source_tree.join(&relative_module_root);
|
||||
|
|
@ -289,8 +223,8 @@ fn find_roots(
|
|||
ModuleName::Name(name) => {
|
||||
vec![name.split('.').collect::<PathBuf>()]
|
||||
}
|
||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
||||
.into_iter()
|
||||
ModuleName::Names(names) => names
|
||||
.iter()
|
||||
.map(|name| name.split('.').collect::<PathBuf>())
|
||||
.collect(),
|
||||
}
|
||||
|
|
@ -308,9 +242,9 @@ fn find_roots(
|
|||
let modules_relative = if let Some(module_name) = module_name {
|
||||
match module_name {
|
||||
ModuleName::Name(name) => vec![module_path_from_module_name(&src_root, name)?],
|
||||
ModuleName::Names(names) => prune_redundant_modules_warn(names, show_warnings)
|
||||
.into_iter()
|
||||
.map(|name| module_path_from_module_name(&src_root, &name))
|
||||
ModuleName::Names(names) => names
|
||||
.iter()
|
||||
.map(|name| module_path_from_module_name(&src_root, name))
|
||||
.collect::<Result<_, _>>()?,
|
||||
}
|
||||
} else {
|
||||
|
|
@ -413,27 +347,6 @@ fn module_path_from_module_name(src_root: &Path, module_name: &str) -> Result<Pa
|
|||
Ok(module_relative)
|
||||
}
|
||||
|
||||
/// Error if we're adding a venv to a distribution.
|
||||
pub(crate) fn error_on_venv(file_name: &OsStr, path: &Path) -> Result<(), Error> {
|
||||
// On 64-bit Unix, `lib64` is a (compatibility) symlink to lib. If we traverse `lib64` before
|
||||
// `pyvenv.cfg`, we show a generic error for symlink directories instead.
|
||||
if !(file_name == "pyvenv.cfg" || file_name == "lib64") {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let Some(parent) = path.parent() else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if parent.join("bin").join("python").is_symlink()
|
||||
|| parent.join("Scripts").join("python.exe").is_file()
|
||||
{
|
||||
return Err(Error::VenvInSourceTree(parent.to_path_buf()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
@ -442,7 +355,6 @@ mod tests {
|
|||
use indoc::indoc;
|
||||
use insta::assert_snapshot;
|
||||
use itertools::Itertools;
|
||||
use regex::Regex;
|
||||
use sha2::Digest;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::iter;
|
||||
|
|
@ -450,8 +362,6 @@ mod tests {
|
|||
use uv_distribution_filename::{SourceDistFilename, WheelFilename};
|
||||
use uv_fs::{copy_dir_all, relative_to};
|
||||
|
||||
const MOCK_UV_VERSION: &str = "1.0.0+test";
|
||||
|
||||
fn format_err(err: &Error) -> String {
|
||||
let context = iter::successors(std::error::Error::source(&err), |&err| err.source())
|
||||
.map(|err| format!(" Caused by: {err}"))
|
||||
|
|
@ -478,20 +388,19 @@ mod tests {
|
|||
fn build(source_root: &Path, dist: &Path) -> Result<BuildResults, Error> {
|
||||
// Build a direct wheel, capture all its properties to compare it with the indirect wheel
|
||||
// latest and remove it since it has the same filename as the indirect wheel.
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, MOCK_UV_VERSION, false)?;
|
||||
let (_name, direct_wheel_list_files) = list_wheel(source_root, "1.0.0+test")?;
|
||||
let direct_wheel_filename = build_wheel(source_root, dist, None, "1.0.0+test")?;
|
||||
let direct_wheel_path = dist.join(direct_wheel_filename.to_string());
|
||||
let direct_wheel_contents = wheel_contents(&direct_wheel_path);
|
||||
let direct_wheel_hash = sha2::Sha256::digest(fs_err::read(&direct_wheel_path)?);
|
||||
fs_err::remove_file(&direct_wheel_path)?;
|
||||
|
||||
// Build a source distribution.
|
||||
let (_name, source_dist_list_files) =
|
||||
list_source_dist(source_root, MOCK_UV_VERSION, false)?;
|
||||
let (_name, source_dist_list_files) = list_source_dist(source_root, "1.0.0+test")?;
|
||||
// TODO(konsti): This should run in the unpacked source dist tempdir, but we need to
|
||||
// normalize the path.
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, MOCK_UV_VERSION, false)?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, MOCK_UV_VERSION, false)?;
|
||||
let (_name, wheel_list_files) = list_wheel(source_root, "1.0.0+test")?;
|
||||
let source_dist_filename = build_source_dist(source_root, dist, "1.0.0+test")?;
|
||||
let source_dist_path = dist.join(source_dist_filename.to_string());
|
||||
let source_dist_contents = sdist_contents(&source_dist_path);
|
||||
|
||||
|
|
@ -505,13 +414,7 @@ mod tests {
|
|||
source_dist_filename.name.as_dist_info_name(),
|
||||
source_dist_filename.version
|
||||
));
|
||||
let wheel_filename = build_wheel(
|
||||
&sdist_top_level_directory,
|
||||
dist,
|
||||
None,
|
||||
MOCK_UV_VERSION,
|
||||
false,
|
||||
)?;
|
||||
let wheel_filename = build_wheel(&sdist_top_level_directory, dist, None, "1.0.0+test")?;
|
||||
let wheel_contents = wheel_contents(&dist.join(wheel_filename.to_string()));
|
||||
|
||||
// Check that direct and indirect wheels are identical.
|
||||
|
|
@ -599,7 +502,7 @@ mod tests {
|
|||
/// platform-independent deterministic builds.
|
||||
#[test]
|
||||
fn built_by_uv_building() {
|
||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
||||
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||
let src = TempDir::new().unwrap();
|
||||
for dir in [
|
||||
"src",
|
||||
|
|
@ -612,14 +515,14 @@ mod tests {
|
|||
] {
|
||||
copy_dir_all(built_by_uv.join(dir), src.path().join(dir)).unwrap();
|
||||
}
|
||||
for filename in [
|
||||
for dir in [
|
||||
"pyproject.toml",
|
||||
"README.md",
|
||||
"uv.lock",
|
||||
"LICENSE-APACHE",
|
||||
"LICENSE-MIT",
|
||||
] {
|
||||
fs_err::copy(built_by_uv.join(filename), src.path().join(filename)).unwrap();
|
||||
fs_err::copy(built_by_uv.join(dir), src.path().join(dir)).unwrap();
|
||||
}
|
||||
|
||||
// Clear executable bit on Unix to build the same archive between Unix and Windows.
|
||||
|
|
@ -636,14 +539,6 @@ mod tests {
|
|||
fs_err::set_permissions(&path, perms).unwrap();
|
||||
}
|
||||
|
||||
// Redact the uv_build version to keep the hash stable across releases
|
||||
let pyproject_toml = fs_err::read_to_string(src.path().join("pyproject.toml")).unwrap();
|
||||
let current_requires =
|
||||
Regex::new(r#"requires = \["uv_build>=[0-9.]+,<[0-9.]+"\]"#).unwrap();
|
||||
let mocked_requires = r#"requires = ["uv_build>=1,<2"]"#;
|
||||
let pyproject_toml = current_requires.replace(pyproject_toml.as_str(), mocked_requires);
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml.as_bytes()).unwrap();
|
||||
|
||||
// Add some files to be excluded
|
||||
let module_root = src.path().join("src").join("built_by_uv");
|
||||
fs_err::create_dir_all(module_root.join("__pycache__")).unwrap();
|
||||
|
|
@ -662,7 +557,7 @@ mod tests {
|
|||
// Check that the source dist is reproducible across platforms.
|
||||
assert_snapshot!(
|
||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&source_dist_path).unwrap())),
|
||||
@"bb74bff575b135bb39e5c9bce56349441fb0923bb8857e32a5eaf34ec1843967"
|
||||
@"dab46bcc4d66960a11cfdc19604512a8e1a3241a67536f7e962166760e9c575c"
|
||||
);
|
||||
// Check both the files we report and the actual files
|
||||
assert_snapshot!(format_file_list(build.source_dist_list_files, src.path()), @r"
|
||||
|
|
@ -716,7 +611,7 @@ mod tests {
|
|||
// Check that the wheel is reproducible across platforms.
|
||||
assert_snapshot!(
|
||||
format!("{:x}", sha2::Sha256::digest(fs_err::read(&wheel_path).unwrap())),
|
||||
@"319afb04e87caf894b1362b508ec745253c6d241423ea59021694d2015e821da"
|
||||
@"ac3f68ac448023bca26de689d80401bff57f764396ae802bf4666234740ffbe3"
|
||||
);
|
||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
||||
built_by_uv-0.1.0.data/data/
|
||||
|
|
@ -759,31 +654,6 @@ mod tests {
|
|||
built_by_uv-0.1.0.dist-info/entry_points.txt (generated)
|
||||
built_by_uv-0.1.0.dist-info/METADATA (generated)
|
||||
");
|
||||
|
||||
let mut wheel = zip::ZipArchive::new(File::open(wheel_path).unwrap()).unwrap();
|
||||
let mut record = String::new();
|
||||
wheel
|
||||
.by_name("built_by_uv-0.1.0.dist-info/RECORD")
|
||||
.unwrap()
|
||||
.read_to_string(&mut record)
|
||||
.unwrap();
|
||||
assert_snapshot!(record, @r###"
|
||||
built_by_uv/__init__.py,sha256=AJ7XpTNWxYktP97ydb81UpnNqoebH7K4sHRakAMQKG4,44
|
||||
built_by_uv/arithmetic/__init__.py,sha256=x2agwFbJAafc9Z6TdJ0K6b6bLMApQdvRSQjP4iy7IEI,67
|
||||
built_by_uv/arithmetic/circle.py,sha256=FYZkv6KwrF9nJcwGOKigjke1dm1Fkie7qW1lWJoh3AE,287
|
||||
built_by_uv/arithmetic/pi.txt,sha256=-4HqoLoIrSKGf0JdTrM8BTTiIz8rq-MSCDL6LeF0iuU,8
|
||||
built_by_uv/cli.py,sha256=Jcm3PxSb8wTAN3dGm5vKEDQwCgoUXkoeggZeF34QyKM,44
|
||||
built_by_uv-0.1.0.dist-info/licenses/LICENSE-APACHE,sha256=QwcOLU5TJoTeUhuIXzhdCEEDDvorGiC6-3YTOl4TecE,11356
|
||||
built_by_uv-0.1.0.dist-info/licenses/LICENSE-MIT,sha256=F5Z0Cpu8QWyblXwXhrSo0b9WmYXQxd1LwLjVLJZwbiI,1077
|
||||
built_by_uv-0.1.0.dist-info/licenses/third-party-licenses/PEP-401.txt,sha256=KN-KAx829G2saLjVmByc08RFFtIDWvHulqPyD0qEBZI,270
|
||||
built_by_uv-0.1.0.data/headers/built_by_uv.h,sha256=p5-HBunJ1dY-xd4dMn03PnRClmGyRosScIp8rT46kg4,144
|
||||
built_by_uv-0.1.0.data/scripts/whoami.sh,sha256=T2cmhuDFuX-dTkiSkuAmNyIzvv8AKopjnuTCcr9o-eE,20
|
||||
built_by_uv-0.1.0.data/data/data.csv,sha256=7z7u-wXu7Qr2eBZFVpBILlNUiGSngv_1vYqZHVWOU94,265
|
||||
built_by_uv-0.1.0.dist-info/WHEEL,sha256=PaG_oOj9G2zCRqoLK0SjWBVZbGAMtIXDmm-MEGw9Wo0,83
|
||||
built_by_uv-0.1.0.dist-info/entry_points.txt,sha256=-IO6yaq6x6HSl-zWH96rZmgYvfyHlH00L5WQoCpz-YI,50
|
||||
built_by_uv-0.1.0.dist-info/METADATA,sha256=m6EkVvKrGmqx43b_VR45LHD37IZxPYC0NI6Qx9_UXLE,474
|
||||
built_by_uv-0.1.0.dist-info/RECORD,,
|
||||
"###);
|
||||
}
|
||||
|
||||
/// Test that `license = { file = "LICENSE" }` is supported.
|
||||
|
|
@ -799,7 +669,7 @@ mod tests {
|
|||
license = { file = "license.txt" }
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -821,7 +691,7 @@ mod tests {
|
|||
|
||||
// Build a wheel from a source distribution
|
||||
let output_dir = TempDir::new().unwrap();
|
||||
build_source_dist(src.path(), output_dir.path(), "0.5.15", false).unwrap();
|
||||
build_source_dist(src.path(), output_dir.path(), "0.5.15").unwrap();
|
||||
let sdist_tree = TempDir::new().unwrap();
|
||||
let source_dist_path = output_dir.path().join("pep_pep639_license-1.0.0.tar.gz");
|
||||
let sdist_reader = BufReader::new(File::open(&source_dist_path).unwrap());
|
||||
|
|
@ -832,7 +702,6 @@ mod tests {
|
|||
output_dir.path(),
|
||||
None,
|
||||
"0.5.15",
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
let wheel = output_dir
|
||||
|
|
@ -868,7 +737,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
},
|
||||
|
|
@ -897,7 +766,6 @@ mod tests {
|
|||
output_dir.path(),
|
||||
Some(&metadata_dir.path().join(&dist_info_dir)),
|
||||
"0.5.15",
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
let wheel = output_dir
|
||||
|
|
@ -933,7 +801,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -975,7 +843,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -1000,7 +868,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -1036,7 +904,7 @@ mod tests {
|
|||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
@"Expected a Python module at: [TEMP_PATH]/src/camel_case/__init__.py"
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/camel_case/__init__.py`"
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -1049,7 +917,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -1080,7 +948,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1101,7 +969,7 @@ mod tests {
|
|||
.replace('\\', "/");
|
||||
assert_snapshot!(
|
||||
err_message,
|
||||
@"Expected a Python module at: [TEMP_PATH]/src/stuffed_bird-stubs/__init__.pyi"
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/stuffed_bird-stubs/__init__.pyi`"
|
||||
);
|
||||
|
||||
// Create the correct file
|
||||
|
|
@ -1131,7 +999,7 @@ mod tests {
|
|||
version = "1.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
|
|
@ -1157,7 +1025,7 @@ mod tests {
|
|||
module-name = "simple_namespace.part"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1167,7 +1035,7 @@ mod tests {
|
|||
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"Expected a Python module at: [TEMP_PATH]/src/simple_namespace/part/__init__.py"
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part/__init__.py`"
|
||||
);
|
||||
|
||||
// Create the correct file
|
||||
|
|
@ -1189,7 +1057,7 @@ mod tests {
|
|||
File::create(&bogus_init_py).unwrap();
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: [TEMP_PATH]/src/simple_namespace"
|
||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||
);
|
||||
fs_err::remove_file(bogus_init_py).unwrap();
|
||||
|
||||
|
|
@ -1225,7 +1093,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1248,7 +1116,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1309,7 +1177,7 @@ mod tests {
|
|||
namespace = true
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1332,7 +1200,7 @@ mod tests {
|
|||
module-name = "cloud-stubs.db.schema"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1382,7 +1250,7 @@ mod tests {
|
|||
module-name = ["foo", "simple_namespace.part_a", "simple_namespace.part_b"]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
requires = ["uv_build>=0.5.15,<0.6"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
|
@ -1409,7 +1277,7 @@ mod tests {
|
|||
// The first module is missing an `__init__.py`.
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"Expected a Python module at: [TEMP_PATH]/src/foo/__init__.py"
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/foo/__init__.py`"
|
||||
);
|
||||
|
||||
// Create the first correct `__init__.py` file
|
||||
|
|
@ -1418,7 +1286,7 @@ mod tests {
|
|||
// The second module, a namespace, is missing an `__init__.py`.
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"Expected a Python module at: [TEMP_PATH]/src/simple_namespace/part_a/__init__.py"
|
||||
@"Expected a Python module at: `[TEMP_PATH]/src/simple_namespace/part_a/__init__.py`"
|
||||
);
|
||||
|
||||
// Create the other two correct `__init__.py` files
|
||||
|
|
@ -1448,7 +1316,7 @@ mod tests {
|
|||
File::create(&bogus_init_py).unwrap();
|
||||
assert_snapshot!(
|
||||
build_err(src.path()),
|
||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: [TEMP_PATH]/src/simple_namespace"
|
||||
@"For namespace packages, `__init__.py[i]` is not allowed in parent directory: `[TEMP_PATH]/src/simple_namespace`"
|
||||
);
|
||||
fs_err::remove_file(bogus_init_py).unwrap();
|
||||
|
||||
|
|
@ -1481,114 +1349,4 @@ mod tests {
|
|||
simple_namespace_part-1.0.0.dist-info/WHEEL
|
||||
");
|
||||
}
|
||||
|
||||
/// `prune_redundant_modules` should remove modules which are already
|
||||
/// included (either directly or via their parent)
|
||||
#[test]
|
||||
fn test_prune_redundant_modules() {
|
||||
fn check(input: &[&str], expect: &[&str]) {
|
||||
let input = input.iter().map(|s| (*s).to_string()).collect();
|
||||
let expect: Vec<_> = expect.iter().map(|s| (*s).to_string()).collect();
|
||||
assert_eq!(prune_redundant_modules(input), expect);
|
||||
}
|
||||
|
||||
// Basic cases
|
||||
check(&[], &[]);
|
||||
check(&["foo"], &["foo"]);
|
||||
check(&["foo", "bar"], &["bar", "foo"]);
|
||||
|
||||
// Deshadowing
|
||||
check(&["foo", "foo.bar"], &["foo"]);
|
||||
check(&["foo.bar", "foo"], &["foo"]);
|
||||
check(
|
||||
&["foo.bar.a", "foo.bar.b", "foo.bar", "foo", "foo.bar.a.c"],
|
||||
&["foo"],
|
||||
);
|
||||
check(
|
||||
&["bar.one", "bar.two", "baz", "bar", "baz.one"],
|
||||
&["bar", "baz"],
|
||||
);
|
||||
|
||||
// Potential false positives
|
||||
check(&["foo", "foobar"], &["foo", "foobar"]);
|
||||
check(
|
||||
&["foo", "foobar", "foo.bar", "foobar.baz"],
|
||||
&["foo", "foobar"],
|
||||
);
|
||||
check(&["foo.bar", "foo.baz"], &["foo.bar", "foo.baz"]);
|
||||
check(&["foo", "foo", "foo.bar", "foo.bar"], &["foo"]);
|
||||
|
||||
// Everything
|
||||
check(
|
||||
&[
|
||||
"foo.inner",
|
||||
"foo.inner.deeper",
|
||||
"foo",
|
||||
"bar",
|
||||
"bar.sub",
|
||||
"bar.sub.deep",
|
||||
"foobar",
|
||||
"baz.baz.bar",
|
||||
"baz.baz",
|
||||
"qux",
|
||||
],
|
||||
&["bar", "baz.baz", "foo", "foobar", "qux"],
|
||||
);
|
||||
}
|
||||
|
||||
/// A package with duplicate module names.
|
||||
#[test]
|
||||
fn duplicate_module_names() {
|
||||
let src = TempDir::new().unwrap();
|
||||
let pyproject_toml = indoc! {r#"
|
||||
[project]
|
||||
name = "duplicate"
|
||||
version = "1.0.0"
|
||||
|
||||
[tool.uv.build-backend]
|
||||
module-name = ["foo", "foo", "bar.baz", "bar.baz.submodule"]
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.5.15,<0.6.0"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
fs_err::write(src.path().join("pyproject.toml"), pyproject_toml).unwrap();
|
||||
fs_err::create_dir_all(src.path().join("src").join("foo")).unwrap();
|
||||
File::create(src.path().join("src").join("foo").join("__init__.py")).unwrap();
|
||||
fs_err::create_dir_all(src.path().join("src").join("bar").join("baz")).unwrap();
|
||||
File::create(
|
||||
src.path()
|
||||
.join("src")
|
||||
.join("bar")
|
||||
.join("baz")
|
||||
.join("__init__.py"),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let dist = TempDir::new().unwrap();
|
||||
let build = build(src.path(), dist.path()).unwrap();
|
||||
assert_snapshot!(build.source_dist_contents.join("\n"), @r"
|
||||
duplicate-1.0.0/
|
||||
duplicate-1.0.0/PKG-INFO
|
||||
duplicate-1.0.0/pyproject.toml
|
||||
duplicate-1.0.0/src
|
||||
duplicate-1.0.0/src/bar
|
||||
duplicate-1.0.0/src/bar/baz
|
||||
duplicate-1.0.0/src/bar/baz/__init__.py
|
||||
duplicate-1.0.0/src/foo
|
||||
duplicate-1.0.0/src/foo/__init__.py
|
||||
");
|
||||
assert_snapshot!(build.wheel_contents.join("\n"), @r"
|
||||
bar/
|
||||
bar/baz/
|
||||
bar/baz/__init__.py
|
||||
duplicate-1.0.0.dist-info/
|
||||
duplicate-1.0.0.dist-info/METADATA
|
||||
duplicate-1.0.0.dist-info/RECORD
|
||||
duplicate-1.0.0.dist-info/WHEEL
|
||||
foo/
|
||||
foo/__init__.py
|
||||
");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@ use std::ffi::OsStr;
|
|||
use std::fmt::Display;
|
||||
use std::fmt::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::{self, FromStr};
|
||||
use std::str::FromStr;
|
||||
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use tracing::{debug, trace, warn};
|
||||
use serde::Deserialize;
|
||||
use tracing::{debug, trace};
|
||||
use version_ranges::Ranges;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
|
|
@ -21,7 +21,7 @@ use uv_pep508::{
|
|||
use uv_pypi_types::{Metadata23, VerbatimParsedUrl};
|
||||
|
||||
use crate::serde_verbatim::SerdeVerbatim;
|
||||
use crate::{BuildBackendSettings, Error, error_on_venv};
|
||||
use crate::{BuildBackendSettings, Error};
|
||||
|
||||
/// By default, we ignore generated python files.
|
||||
pub(crate) const DEFAULT_EXCLUDES: &[&str] = &["__pycache__", "*.pyc", "*.pyo"];
|
||||
|
|
@ -40,7 +40,7 @@ pub enum ValidationError {
|
|||
UnknownExtension(String),
|
||||
#[error("Can't infer content type because `{}` does not have an extension. Please use a support extension (`.md`, `.rst`, `.txt`) or set the content type manually.", _0.user_display())]
|
||||
MissingExtension(PathBuf),
|
||||
#[error("Unsupported content type: {0}")]
|
||||
#[error("Unsupported content type: `{0}`")]
|
||||
UnsupportedContentType(String),
|
||||
#[error("`project.description` must be a single line")]
|
||||
DescriptionNewlines,
|
||||
|
|
@ -51,29 +51,23 @@ pub enum ValidationError {
|
|||
)]
|
||||
MixedLicenseGenerations,
|
||||
#[error(
|
||||
"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: {0}"
|
||||
"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `{0}`"
|
||||
)]
|
||||
InvalidGroup(String),
|
||||
#[error(
|
||||
"Entrypoint names must consist of letters, numbers, dots, underscores and dashes; invalid name: `{0}`"
|
||||
)]
|
||||
InvalidName(String),
|
||||
#[error("Use `project.scripts` instead of `project.entry-points.console_scripts`")]
|
||||
ReservedScripts,
|
||||
#[error("Use `project.gui-scripts` instead of `project.entry-points.gui_scripts`")]
|
||||
ReservedGuiScripts,
|
||||
#[error("`project.license` is not a valid SPDX expression: {0}")]
|
||||
#[error("`project.license` is not a valid SPDX expression: `{0}`")]
|
||||
InvalidSpdx(String, #[source] spdx::error::ParseError),
|
||||
#[error("`{field}` glob `{glob}` did not match any files")]
|
||||
LicenseGlobNoMatches { field: String, glob: String },
|
||||
#[error("License file `{}` must be UTF-8 encoded", _0)]
|
||||
LicenseFileNotUtf8(String),
|
||||
}
|
||||
|
||||
/// Check if the build backend is matching the currently running uv version.
|
||||
pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
struct PyProjectToml {
|
||||
build_system: BuildSystem,
|
||||
}
|
||||
|
||||
let pyproject_toml: PyProjectToml =
|
||||
match fs_err::read_to_string(source_tree.join("pyproject.toml"))
|
||||
.map_err(|err| err.to_string())
|
||||
|
|
@ -83,14 +77,12 @@ pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
|||
Ok(pyproject_toml) => pyproject_toml,
|
||||
Err(err) => {
|
||||
debug!(
|
||||
"Not using uv build backend direct build for source tree `{name}`, \
|
||||
failed to parse pyproject.toml: {err}"
|
||||
"Not using uv build backend direct build of {name}, no pyproject.toml: {err}"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
match pyproject_toml
|
||||
.build_system
|
||||
.check_build_system(uv_version::version())
|
||||
.as_slice()
|
||||
{
|
||||
|
|
@ -99,36 +91,16 @@ pub fn check_direct_build(source_tree: &Path, name: impl Display) -> bool {
|
|||
// Any warning -> no match
|
||||
[first, others @ ..] => {
|
||||
debug!(
|
||||
"Not using uv build backend direct build of `{name}`, pyproject.toml does not match: {first}"
|
||||
"Not using uv build backend direct build of {name}, pyproject.toml does not match: {first}"
|
||||
);
|
||||
for other in others {
|
||||
trace!("Further uv build backend direct build of `{name}` mismatch: {other}");
|
||||
trace!("Further uv build backend direct build of {name} mismatch: {other}");
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A package name as provided in a `pyproject.toml`.
|
||||
#[derive(Debug, Clone)]
|
||||
struct VerbatimPackageName {
|
||||
/// The package name as given in the `pyproject.toml`.
|
||||
given: String,
|
||||
/// The normalized package name.
|
||||
normalized: PackageName,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for VerbatimPackageName {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let given = String::deserialize(deserializer)?;
|
||||
let normalized = PackageName::from_str(&given).map_err(serde::de::Error::custom)?;
|
||||
Ok(Self { given, normalized })
|
||||
}
|
||||
}
|
||||
|
||||
/// A `pyproject.toml` as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(
|
||||
|
|
@ -147,18 +119,15 @@ pub struct PyProjectToml {
|
|||
|
||||
impl PyProjectToml {
|
||||
pub(crate) fn name(&self) -> &PackageName {
|
||||
&self.project.name.normalized
|
||||
&self.project.name
|
||||
}
|
||||
|
||||
pub(crate) fn version(&self) -> &Version {
|
||||
&self.project.version
|
||||
}
|
||||
|
||||
pub(crate) fn parse(path: &Path) -> Result<Self, Error> {
|
||||
let contents = fs_err::read_to_string(path)?;
|
||||
let pyproject_toml =
|
||||
toml::from_str(&contents).map_err(|err| Error::Toml(path.to_path_buf(), err))?;
|
||||
Ok(pyproject_toml)
|
||||
pub(crate) fn parse(contents: &str) -> Result<Self, Error> {
|
||||
Ok(toml::from_str(contents)?)
|
||||
}
|
||||
|
||||
pub(crate) fn readme(&self) -> Option<&Readme> {
|
||||
|
|
@ -196,9 +165,83 @@ impl PyProjectToml {
|
|||
self.tool.as_ref()?.uv.as_ref()?.build_backend.as_ref()
|
||||
}
|
||||
|
||||
/// See [`BuildSystem::check_build_system`].
|
||||
/// Returns user-facing warnings if the `[build-system]` table looks suspicious.
|
||||
///
|
||||
/// Example of a valid table:
|
||||
///
|
||||
/// ```toml
|
||||
/// [build-system]
|
||||
/// requires = ["uv_build>=0.4.15,<5"]
|
||||
/// build-backend = "uv_build"
|
||||
/// ```
|
||||
pub fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
||||
self.build_system.check_build_system(uv_version)
|
||||
let mut warnings = Vec::new();
|
||||
if self.build_system.build_backend.as_deref() != Some("uv_build") {
|
||||
warnings.push(format!(
|
||||
r#"The value for `build_system.build-backend` should be `"uv_build"`, not `"{}"`"#,
|
||||
self.build_system.build_backend.clone().unwrap_or_default()
|
||||
));
|
||||
}
|
||||
|
||||
let uv_version =
|
||||
Version::from_str(uv_version).expect("uv's own version is not PEP 440 compliant");
|
||||
let next_minor = uv_version.release().get(1).copied().unwrap_or_default() + 1;
|
||||
let next_breaking = Version::new([0, next_minor]);
|
||||
|
||||
let expected = || {
|
||||
format!(
|
||||
"Expected a single uv requirement in `build-system.requires`, found `{}`",
|
||||
toml::to_string(&self.build_system.requires).unwrap_or_default()
|
||||
)
|
||||
};
|
||||
|
||||
let [uv_requirement] = &self.build_system.requires.as_slice() else {
|
||||
warnings.push(expected());
|
||||
return warnings;
|
||||
};
|
||||
if uv_requirement.name.as_str() != "uv-build" {
|
||||
warnings.push(expected());
|
||||
return warnings;
|
||||
}
|
||||
let bounded = match &uv_requirement.version_or_url {
|
||||
None => false,
|
||||
Some(VersionOrUrl::Url(_)) => {
|
||||
// We can't validate the url
|
||||
true
|
||||
}
|
||||
Some(VersionOrUrl::VersionSpecifier(specifier)) => {
|
||||
// We don't check how wide the range is (that's up to the user), we just
|
||||
// check that the current version is compliant, to avoid accidentally using a
|
||||
// too new or too old uv, and we check that an upper bound exists. The latter
|
||||
// is very important to allow making breaking changes in uv without breaking
|
||||
// the existing immutable source distributions on pypi.
|
||||
if !specifier.contains(&uv_version) {
|
||||
// This is allowed to happen when testing prereleases, but we should still warn.
|
||||
warnings.push(format!(
|
||||
r#"`build_system.requires = ["{uv_requirement}"]` does not contain the
|
||||
current uv version {uv_version}"#,
|
||||
));
|
||||
}
|
||||
Ranges::from(specifier.clone())
|
||||
.bounding_range()
|
||||
.map(|bounding_range| bounding_range.1 != Bound::Unbounded)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
};
|
||||
|
||||
if !bounded {
|
||||
warnings.push(format!(
|
||||
"`build_system.requires = [\"{}\"]` is missing an \
|
||||
upper bound on the `uv_build` version such as `<{next_breaking}`. \
|
||||
Without bounding the `uv_build` version, the source distribution will break \
|
||||
when a future, breaking version of `uv_build` is released.",
|
||||
// Use an underscore consistently, to avoid confusing users between a package name with dash and a
|
||||
// module name with underscore
|
||||
uv_requirement.verbatim()
|
||||
));
|
||||
}
|
||||
|
||||
warnings
|
||||
}
|
||||
|
||||
/// Validate and convert a `pyproject.toml` to core metadata.
|
||||
|
|
@ -346,7 +389,97 @@ impl PyProjectToml {
|
|||
"2.3"
|
||||
};
|
||||
|
||||
let (license, license_expression, license_files) = self.license_metadata(root)?;
|
||||
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
||||
let (license, license_expression, license_files) =
|
||||
if let Some(license_globs) = &self.project.license_files {
|
||||
let license_expression = match &self.project.license {
|
||||
None => None,
|
||||
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
||||
Some(License::Text { .. } | License::File { .. }) => {
|
||||
return Err(ValidationError::MixedLicenseGenerations.into());
|
||||
}
|
||||
};
|
||||
|
||||
let mut license_files = Vec::new();
|
||||
let mut license_globs_parsed = Vec::new();
|
||||
for license_glob in license_globs {
|
||||
let pep639_glob =
|
||||
PortableGlobParser::Pep639
|
||||
.parse(license_glob)
|
||||
.map_err(|err| Error::PortableGlob {
|
||||
field: license_glob.to_string(),
|
||||
source: err,
|
||||
})?;
|
||||
license_globs_parsed.push(pep639_glob);
|
||||
}
|
||||
let license_globs =
|
||||
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
||||
Error::GlobSetTooLarge {
|
||||
field: "tool.uv.build-backend.source-include".to_string(),
|
||||
source: err,
|
||||
}
|
||||
})?;
|
||||
|
||||
for entry in WalkDir::new(root)
|
||||
.sort_by_file_name()
|
||||
.into_iter()
|
||||
.filter_entry(|entry| {
|
||||
license_globs.match_directory(
|
||||
entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root"),
|
||||
)
|
||||
})
|
||||
{
|
||||
let entry = entry.map_err(|err| Error::WalkDir {
|
||||
root: root.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
let relative = entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root");
|
||||
if !license_globs.match_path(relative) {
|
||||
trace!("Not a license files match: `{}`", relative.user_display());
|
||||
continue;
|
||||
}
|
||||
if !entry.file_type().is_file() {
|
||||
trace!(
|
||||
"Not a file in license files match: `{}`",
|
||||
relative.user_display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
debug!("License files match: `{}`", relative.user_display());
|
||||
license_files.push(relative.portable_display().to_string());
|
||||
}
|
||||
|
||||
// The glob order may be unstable
|
||||
license_files.sort();
|
||||
|
||||
(None, license_expression, license_files)
|
||||
} else {
|
||||
match &self.project.license {
|
||||
None => (None, None, Vec::new()),
|
||||
Some(License::Spdx(license_expression)) => {
|
||||
(None, Some(license_expression.clone()), Vec::new())
|
||||
}
|
||||
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
||||
Some(License::File { file }) => {
|
||||
let text = fs_err::read_to_string(root.join(file))?;
|
||||
(Some(text), None, Vec::new())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Check that the license expression is a valid SPDX identifier.
|
||||
if let Some(license_expression) = &license_expression {
|
||||
if let Err(err) = spdx::Expression::parse(license_expression) {
|
||||
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(konsti): https://peps.python.org/pep-0753/#label-normalization (Draft)
|
||||
let project_urls = self
|
||||
|
|
@ -391,7 +524,7 @@ impl PyProjectToml {
|
|||
|
||||
Ok(Metadata23 {
|
||||
metadata_version: metadata_version.to_string(),
|
||||
name: self.project.name.given.clone(),
|
||||
name: self.project.name.to_string(),
|
||||
version: self.project.version.to_string(),
|
||||
// Not supported.
|
||||
platforms: vec![],
|
||||
|
|
@ -416,7 +549,7 @@ impl PyProjectToml {
|
|||
license_files,
|
||||
classifiers: self.project.classifiers.clone().unwrap_or_default(),
|
||||
requires_dist: requires_dist.iter().map(ToString::to_string).collect(),
|
||||
provides_extra: extras.iter().map(ToString::to_string).collect(),
|
||||
provides_extras: extras.iter().map(ToString::to_string).collect(),
|
||||
// Not commonly set.
|
||||
provides_dist: vec![],
|
||||
// Not supported.
|
||||
|
|
@ -433,156 +566,6 @@ impl PyProjectToml {
|
|||
})
|
||||
}
|
||||
|
||||
/// Parse and validate the old (PEP 621) and new (PEP 639) license files.
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn license_metadata(
|
||||
&self,
|
||||
root: &Path,
|
||||
) -> Result<(Option<String>, Option<String>, Vec<String>), Error> {
|
||||
// TODO(konsti): Issue a warning on old license metadata once PEP 639 is universal.
|
||||
let (license, license_expression, license_files) = if let Some(license_globs) =
|
||||
&self.project.license_files
|
||||
{
|
||||
let license_expression = match &self.project.license {
|
||||
None => None,
|
||||
Some(License::Spdx(license_expression)) => Some(license_expression.clone()),
|
||||
Some(License::Text { .. } | License::File { .. }) => {
|
||||
return Err(ValidationError::MixedLicenseGenerations.into());
|
||||
}
|
||||
};
|
||||
|
||||
let mut license_files = Vec::new();
|
||||
let mut license_globs_parsed = Vec::with_capacity(license_globs.len());
|
||||
let mut license_glob_matchers = Vec::with_capacity(license_globs.len());
|
||||
|
||||
for license_glob in license_globs {
|
||||
let pep639_glob =
|
||||
PortableGlobParser::Pep639
|
||||
.parse(license_glob)
|
||||
.map_err(|err| Error::PortableGlob {
|
||||
field: license_glob.to_owned(),
|
||||
source: err,
|
||||
})?;
|
||||
license_glob_matchers.push(pep639_glob.compile_matcher());
|
||||
license_globs_parsed.push(pep639_glob);
|
||||
}
|
||||
|
||||
// Track whether each user-specified glob matched so we can flag the unmatched ones.
|
||||
let mut license_globs_matched = vec![false; license_globs_parsed.len()];
|
||||
|
||||
let license_globs =
|
||||
GlobDirFilter::from_globs(&license_globs_parsed).map_err(|err| {
|
||||
Error::GlobSetTooLarge {
|
||||
field: "project.license-files".to_string(),
|
||||
source: err,
|
||||
}
|
||||
})?;
|
||||
|
||||
for entry in WalkDir::new(root)
|
||||
.sort_by_file_name()
|
||||
.into_iter()
|
||||
.filter_entry(|entry| {
|
||||
license_globs.match_directory(
|
||||
entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root"),
|
||||
)
|
||||
})
|
||||
{
|
||||
let entry = entry.map_err(|err| Error::WalkDir {
|
||||
root: root.to_path_buf(),
|
||||
err,
|
||||
})?;
|
||||
|
||||
let relative = entry
|
||||
.path()
|
||||
.strip_prefix(root)
|
||||
.expect("walkdir starts with root");
|
||||
|
||||
if !license_globs.match_path(relative) {
|
||||
trace!("Not a license files match: {}", relative.user_display());
|
||||
continue;
|
||||
}
|
||||
|
||||
let file_type = entry.file_type();
|
||||
|
||||
if !(file_type.is_file() || file_type.is_symlink()) {
|
||||
trace!(
|
||||
"Not a file or symlink in license files match: {}",
|
||||
relative.user_display()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
error_on_venv(entry.file_name(), entry.path())?;
|
||||
|
||||
debug!("License files match: {}", relative.user_display());
|
||||
|
||||
for (matched, matcher) in license_globs_matched
|
||||
.iter_mut()
|
||||
.zip(license_glob_matchers.iter())
|
||||
{
|
||||
if *matched {
|
||||
continue;
|
||||
}
|
||||
|
||||
if matcher.is_match(relative) {
|
||||
*matched = true;
|
||||
}
|
||||
}
|
||||
|
||||
license_files.push(relative.portable_display().to_string());
|
||||
}
|
||||
|
||||
if let Some((pattern, _)) = license_globs_parsed
|
||||
.into_iter()
|
||||
.zip(license_globs_matched)
|
||||
.find(|(_, matched)| !matched)
|
||||
{
|
||||
return Err(ValidationError::LicenseGlobNoMatches {
|
||||
field: "project.license-files".to_string(),
|
||||
glob: pattern.to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
for license_file in &license_files {
|
||||
let file_path = root.join(license_file);
|
||||
let bytes = fs_err::read(&file_path)?;
|
||||
if str::from_utf8(&bytes).is_err() {
|
||||
return Err(ValidationError::LicenseFileNotUtf8(license_file.clone()).into());
|
||||
}
|
||||
}
|
||||
|
||||
// The glob order may be unstable
|
||||
license_files.sort();
|
||||
|
||||
(None, license_expression, license_files)
|
||||
} else {
|
||||
match &self.project.license {
|
||||
None => (None, None, Vec::new()),
|
||||
Some(License::Spdx(license_expression)) => {
|
||||
(None, Some(license_expression.clone()), Vec::new())
|
||||
}
|
||||
Some(License::Text { text }) => (Some(text.clone()), None, Vec::new()),
|
||||
Some(License::File { file }) => {
|
||||
let text = fs_err::read_to_string(root.join(file))?;
|
||||
(Some(text), None, Vec::new())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Check that the license expression is a valid SPDX identifier.
|
||||
if let Some(license_expression) = &license_expression {
|
||||
if let Err(err) = spdx::Expression::parse(license_expression) {
|
||||
return Err(ValidationError::InvalidSpdx(license_expression.clone(), err).into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok((license, license_expression, license_files))
|
||||
}
|
||||
|
||||
/// Validate and convert the entrypoints in `pyproject.toml`, including console and GUI scripts,
|
||||
/// to an `entry_points.txt`.
|
||||
///
|
||||
|
|
@ -637,14 +620,12 @@ impl PyProjectToml {
|
|||
|
||||
let _ = writeln!(writer, "[{group}]");
|
||||
for (name, object_reference) in entries {
|
||||
// More strict than the spec, we enforce the recommendation
|
||||
if !name
|
||||
.chars()
|
||||
.all(|c| c.is_alphanumeric() || c == '.' || c == '-' || c == '_')
|
||||
{
|
||||
warn!(
|
||||
"Entrypoint names should consist of letters, numbers, dots, underscores and \
|
||||
dashes; non-compliant name: {name}"
|
||||
);
|
||||
return Err(ValidationError::InvalidName(name.to_string()));
|
||||
}
|
||||
|
||||
// TODO(konsti): Validate that the object references are valid Python identifiers.
|
||||
|
|
@ -664,7 +645,7 @@ impl PyProjectToml {
|
|||
#[serde(rename_all = "kebab-case")]
|
||||
struct Project {
|
||||
/// The name of the project.
|
||||
name: VerbatimPackageName,
|
||||
name: PackageName,
|
||||
/// The version of the project.
|
||||
version: Version,
|
||||
/// The summary description of the project in one line.
|
||||
|
|
@ -722,7 +703,7 @@ struct Project {
|
|||
/// The optional `project.readme` key in a pyproject.toml as specified in
|
||||
/// <https://packaging.python.org/en/latest/specifications/pyproject-toml/#readme>.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(untagged, rename_all_fields = "kebab-case")]
|
||||
#[serde(untagged, rename_all = "kebab-case")]
|
||||
pub(crate) enum Readme {
|
||||
/// Relative path to the README.
|
||||
String(PathBuf),
|
||||
|
|
@ -732,7 +713,7 @@ pub(crate) enum Readme {
|
|||
content_type: String,
|
||||
charset: Option<String>,
|
||||
},
|
||||
/// The full description of the project as an inline value.
|
||||
/// The full description of the project as inline value.
|
||||
Text {
|
||||
text: String,
|
||||
content_type: String,
|
||||
|
|
@ -744,9 +725,9 @@ impl Readme {
|
|||
/// If the readme is a file, return the path to the file.
|
||||
pub(crate) fn path(&self) -> Option<&Path> {
|
||||
match self {
|
||||
Self::String(path) => Some(path),
|
||||
Self::File { file, .. } => Some(file),
|
||||
Self::Text { .. } => None,
|
||||
Readme::String(path) => Some(path),
|
||||
Readme::File { file, .. } => Some(file),
|
||||
Readme::Text { .. } => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -801,6 +782,18 @@ pub(crate) enum Contact {
|
|||
Email { email: String },
|
||||
}
|
||||
|
||||
/// The `[build-system]` section of a pyproject.toml as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
struct BuildSystem {
|
||||
/// PEP 508 dependencies required to execute the build system.
|
||||
requires: Vec<SerdeVerbatim<Requirement<VerbatimParsedUrl>>>,
|
||||
/// A string naming a Python object that will be used to perform the build.
|
||||
build_backend: Option<String>,
|
||||
/// <https://peps.python.org/pep-0517/#in-tree-build-backends>
|
||||
backend_path: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// The `tool` section as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
|
|
@ -817,100 +810,6 @@ pub(crate) struct ToolUv {
|
|||
build_backend: Option<BuildBackendSettings>,
|
||||
}
|
||||
|
||||
/// The `[build-system]` section of a pyproject.toml as specified in PEP 517.
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
struct BuildSystem {
|
||||
/// PEP 508 dependencies required to execute the build system.
|
||||
requires: Vec<SerdeVerbatim<Requirement<VerbatimParsedUrl>>>,
|
||||
/// A string naming a Python object that will be used to perform the build.
|
||||
build_backend: Option<String>,
|
||||
/// <https://peps.python.org/pep-0517/#in-tree-build-backends>
|
||||
backend_path: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl BuildSystem {
|
||||
/// Check if the `[build-system]` table matches the uv build backend expectations and return
|
||||
/// a list of warnings if it looks suspicious.
|
||||
///
|
||||
/// Example of a valid table:
|
||||
///
|
||||
/// ```toml
|
||||
/// [build-system]
|
||||
/// requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
/// build-backend = "uv_build"
|
||||
/// ```
|
||||
pub(crate) fn check_build_system(&self, uv_version: &str) -> Vec<String> {
|
||||
let mut warnings = Vec::new();
|
||||
if self.build_backend.as_deref() != Some("uv_build") {
|
||||
warnings.push(format!(
|
||||
r#"The value for `build_system.build-backend` should be `"uv_build"`, not `"{}"`"#,
|
||||
self.build_backend.clone().unwrap_or_default()
|
||||
));
|
||||
}
|
||||
|
||||
let uv_version =
|
||||
Version::from_str(uv_version).expect("uv's own version is not PEP 440 compliant");
|
||||
let next_minor = uv_version.release().get(1).copied().unwrap_or_default() + 1;
|
||||
let next_breaking = Version::new([0, next_minor]);
|
||||
|
||||
let expected = || {
|
||||
format!(
|
||||
"Expected a single uv requirement in `build-system.requires`, found `{}`",
|
||||
toml::to_string(&self.requires).unwrap_or_default()
|
||||
)
|
||||
};
|
||||
|
||||
let [uv_requirement] = &self.requires.as_slice() else {
|
||||
warnings.push(expected());
|
||||
return warnings;
|
||||
};
|
||||
if uv_requirement.name.as_str() != "uv-build" {
|
||||
warnings.push(expected());
|
||||
return warnings;
|
||||
}
|
||||
let bounded = match &uv_requirement.version_or_url {
|
||||
None => false,
|
||||
Some(VersionOrUrl::Url(_)) => {
|
||||
// We can't validate the url
|
||||
true
|
||||
}
|
||||
Some(VersionOrUrl::VersionSpecifier(specifier)) => {
|
||||
// We don't check how wide the range is (that's up to the user), we just
|
||||
// check that the current version is compliant, to avoid accidentally using a
|
||||
// too new or too old uv, and we check that an upper bound exists. The latter
|
||||
// is very important to allow making breaking changes in uv without breaking
|
||||
// the existing immutable source distributions on pypi.
|
||||
if !specifier.contains(&uv_version) {
|
||||
// This is allowed to happen when testing prereleases, but we should still warn.
|
||||
warnings.push(format!(
|
||||
r#"`build_system.requires = ["{uv_requirement}"]` does not contain the
|
||||
current uv version {uv_version}"#,
|
||||
));
|
||||
}
|
||||
Ranges::from(specifier.clone())
|
||||
.bounding_range()
|
||||
.map(|bounding_range| bounding_range.1 != Bound::Unbounded)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
};
|
||||
|
||||
if !bounded {
|
||||
warnings.push(format!(
|
||||
"`build_system.requires = [\"{}\"]` is missing an \
|
||||
upper bound on the `uv_build` version such as `<{next_breaking}`. \
|
||||
Without bounding the `uv_build` version, the source distribution will break \
|
||||
when a future, breaking version of `uv_build` is released.",
|
||||
// Use an underscore consistently, to avoid confusing users between a package name with dash and a
|
||||
// module name with underscore
|
||||
uv_requirement.verbatim()
|
||||
));
|
||||
}
|
||||
|
||||
warnings
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
@ -927,7 +826,7 @@ mod tests {
|
|||
{payload}
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
}
|
||||
|
|
@ -941,28 +840,6 @@ mod tests {
|
|||
formatted
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn uppercase_package_name() {
|
||||
let contents = r#"
|
||||
[project]
|
||||
name = "Hello-World"
|
||||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
build-backend = "uv_build"
|
||||
"#;
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||
Metadata-Version: 2.3
|
||||
Name: Hello-World
|
||||
Version: 0.1.0
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn valid() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
|
@ -1032,12 +909,12 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
|
|
@ -1088,65 +965,6 @@ mod tests {
|
|||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn readme() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("Readme.md"),
|
||||
indoc! {r"
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
"},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
fs_err::write(
|
||||
temp_dir.path().join("License.txt"),
|
||||
indoc! {r#"
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
|
||||
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
|
||||
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
|
||||
OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
"#},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let contents = indoc! {r#"
|
||||
# See https://github.com/pypa/sampleproject/blob/main/pyproject.toml for another example
|
||||
|
||||
[project]
|
||||
name = "hello-world"
|
||||
version = "0.1.0"
|
||||
description = "A Python package"
|
||||
readme = { file = "Readme.md", content-type = "text/markdown" }
|
||||
requires_python = ">=3.12"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r"
|
||||
Metadata-Version: 2.3
|
||||
Name: hello-world
|
||||
Version: 0.1.0
|
||||
Summary: A Python package
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# Foo
|
||||
|
||||
This is the foo library.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn self_extras() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
|
@ -1218,12 +1036,12 @@ mod tests {
|
|||
foo-bar = "foo:bar"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
build-backend = "uv_build"
|
||||
"#
|
||||
};
|
||||
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
let metadata = pyproject_toml.to_metadata(temp_dir.path()).unwrap();
|
||||
|
||||
assert_snapshot!(metadata.core_metadata_format(), @r###"
|
||||
|
|
@ -1284,9 +1102,9 @@ mod tests {
|
|||
#[test]
|
||||
fn build_system_valid() {
|
||||
let contents = extend_project("");
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(&contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(&contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
pyproject_toml.check_build_system("1.0.0+test").join("\n"),
|
||||
@""
|
||||
);
|
||||
}
|
||||
|
|
@ -1302,7 +1120,7 @@ mod tests {
|
|||
requires = ["uv_build"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@r###"`build_system.requires = ["uv_build"]` is missing an upper bound on the `uv_build` version such as `<0.5`. Without bounding the `uv_build` version, the source distribution will break when a future, breaking version of `uv_build` is released."###
|
||||
|
|
@ -1317,10 +1135,10 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0", "wheel"]
|
||||
requires = ["uv_build>=0.4.15,<5", "wheel"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||
|
|
@ -1338,7 +1156,7 @@ mod tests {
|
|||
requires = ["setuptools"]
|
||||
build-backend = "uv_build"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@"Expected a single uv requirement in `build-system.requires`, found ``"
|
||||
|
|
@ -1353,10 +1171,10 @@ mod tests {
|
|||
version = "0.1.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.4.15,<0.5.0"]
|
||||
requires = ["uv_build>=0.4.15,<5"]
|
||||
build-backend = "setuptools"
|
||||
"#};
|
||||
let pyproject_toml: PyProjectToml = toml::from_str(contents).unwrap();
|
||||
let pyproject_toml = PyProjectToml::parse(contents).unwrap();
|
||||
assert_snapshot!(
|
||||
pyproject_toml.check_build_system("0.4.15+test").join("\n"),
|
||||
@r###"The value for `build_system.build-backend` should be `"uv_build"`, not `"setuptools"`"###
|
||||
|
|
@ -1367,7 +1185,7 @@ mod tests {
|
|||
fn minimal() {
|
||||
let contents = extend_project("");
|
||||
|
||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
|
|
@ -1386,14 +1204,15 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents).unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r#"
|
||||
TOML parse error at line 4, column 10
|
||||
let err = PyProjectToml::parse(&contents).unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: TOML parse error at line 4, column 10
|
||||
|
|
||||
4 | readme = { path = "Readme.md" }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||
data did not match any variant of untagged enum Readme
|
||||
"#);
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1403,7 +1222,7 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
|
|
@ -1425,14 +1244,14 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.description` must be a single line
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1443,14 +1262,14 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: When `project.license-files` is defined, `project.license` must be an SPDX expression string
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1459,7 +1278,7 @@ mod tests {
|
|||
license = "MIT OR Apache-2.0"
|
||||
"#
|
||||
});
|
||||
let metadata = toml::from_str::<PyProjectToml>(&contents)
|
||||
let metadata = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap();
|
||||
|
|
@ -1477,17 +1296,17 @@ mod tests {
|
|||
license = "MIT XOR Apache-2"
|
||||
"#
|
||||
});
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
// TODO(konsti): We mess up the indentation in the error.
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
Caused by: `project.license` is not a valid SPDX expression: MIT XOR Apache-2
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: `project.license` is not a valid SPDX expression: `MIT XOR Apache-2`
|
||||
Caused by: MIT XOR Apache-2
|
||||
^^^ unknown term
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -1497,18 +1316,18 @@ mod tests {
|
|||
"#
|
||||
});
|
||||
|
||||
let err = toml::from_str::<PyProjectToml>(&contents)
|
||||
let err = PyProjectToml::parse(&contents)
|
||||
.unwrap()
|
||||
.to_metadata(Path::new("/do/not/read"))
|
||||
.unwrap_err();
|
||||
assert_snapshot!(format_err(err), @r"
|
||||
Invalid project metadata
|
||||
assert_snapshot!(format_err(err), @r###"
|
||||
Invalid pyproject.toml
|
||||
Caused by: Dynamic metadata is not supported
|
||||
");
|
||||
"###);
|
||||
}
|
||||
|
||||
fn script_error(contents: &str) -> String {
|
||||
let err = toml::from_str::<PyProjectToml>(contents)
|
||||
let err = PyProjectToml::parse(contents)
|
||||
.unwrap()
|
||||
.to_entry_points()
|
||||
.unwrap_err();
|
||||
|
|
@ -1522,7 +1341,17 @@ mod tests {
|
|||
foo = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: a@b");
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint groups must consist of letters and numbers separated by dots, invalid group: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_entry_point_name() {
|
||||
let contents = extend_project(indoc! {r#"
|
||||
[project.scripts]
|
||||
"a@b" = "bar"
|
||||
"#
|
||||
});
|
||||
assert_snapshot!(script_error(&contents), @"Entrypoint names must consist of letters, numbers, dots, underscores and dashes; invalid name: `a@b`");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::PathBuf;
|
||||
use uv_macros::OptionsMetadata;
|
||||
|
||||
/// Settings for the uv build backend (`uv_build`).
|
||||
|
|
@ -70,9 +70,6 @@ pub struct BuildBackendSettings {
|
|||
pub default_excludes: bool,
|
||||
|
||||
/// Glob expressions which files and directories to exclude from the source distribution.
|
||||
///
|
||||
/// These exclusions are also applied to wheels to ensure that a wheel built from a source tree
|
||||
/// is consistent with a wheel built from a source distribution.
|
||||
#[option(
|
||||
default = r#"[]"#,
|
||||
value_type = "list[str]",
|
||||
|
|
@ -158,7 +155,7 @@ pub struct BuildBackendSettings {
|
|||
/// with this package as build requirement use the include directory to find additional header
|
||||
/// files.
|
||||
/// - `purelib` and `platlib`: Installed to the `site-packages` directory. It is not recommended
|
||||
/// to use these two options.
|
||||
/// to uses these two options.
|
||||
// TODO(konsti): We should show a flat example instead.
|
||||
// ```toml
|
||||
// [tool.uv.build-backend.data]
|
||||
|
|
@ -168,7 +165,7 @@ pub struct BuildBackendSettings {
|
|||
#[option(
|
||||
default = r#"{}"#,
|
||||
value_type = "dict[str, str]",
|
||||
example = r#"data = { headers = "include/headers", scripts = "bin" }"#
|
||||
example = r#"data = { "headers": "include/headers", "scripts": "bin" }"#
|
||||
)]
|
||||
pub data: WheelDataIncludes,
|
||||
}
|
||||
|
|
@ -207,16 +204,16 @@ pub enum ModuleName {
|
|||
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
pub struct WheelDataIncludes {
|
||||
purelib: Option<PathBuf>,
|
||||
platlib: Option<PathBuf>,
|
||||
headers: Option<PathBuf>,
|
||||
scripts: Option<PathBuf>,
|
||||
data: Option<PathBuf>,
|
||||
purelib: Option<String>,
|
||||
platlib: Option<String>,
|
||||
headers: Option<String>,
|
||||
scripts: Option<String>,
|
||||
data: Option<String>,
|
||||
}
|
||||
|
||||
impl WheelDataIncludes {
|
||||
/// Yield all data directories name and corresponding paths.
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&'static str, &Path)> {
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&'static str, &str)> {
|
||||
[
|
||||
("purelib", self.purelib.as_deref()),
|
||||
("platlib", self.platlib.as_deref()),
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
use crate::metadata::DEFAULT_EXCLUDES;
|
||||
use crate::wheel::build_exclude_matcher;
|
||||
use crate::{
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
|
||||
error_on_venv, find_roots,
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots,
|
||||
};
|
||||
use flate2::Compression;
|
||||
use flate2::write::GzEncoder;
|
||||
|
|
@ -10,7 +9,7 @@ use fs_err::File;
|
|||
use globset::{Glob, GlobSet};
|
||||
use std::io;
|
||||
use std::io::{BufReader, Cursor};
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
use std::path::{Path, PathBuf};
|
||||
use tar::{EntryType, Header};
|
||||
use tracing::{debug, trace};
|
||||
use uv_distribution_filename::{SourceDistExtension, SourceDistFilename};
|
||||
|
|
@ -24,9 +23,9 @@ pub fn build_source_dist(
|
|||
source_tree: &Path,
|
||||
source_dist_directory: &Path,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<SourceDistFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
let filename = SourceDistFilename {
|
||||
name: pyproject_toml.name().clone(),
|
||||
version: pyproject_toml.version().clone(),
|
||||
|
|
@ -34,7 +33,7 @@ pub fn build_source_dist(
|
|||
};
|
||||
let source_dist_path = source_dist_directory.join(filename.to_string());
|
||||
let writer = TarGzWriter::new(&source_dist_path)?;
|
||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
||||
write_source_dist(source_tree, writer, uv_version)?;
|
||||
Ok(filename)
|
||||
}
|
||||
|
||||
|
|
@ -42,9 +41,9 @@ pub fn build_source_dist(
|
|||
pub fn list_source_dist(
|
||||
source_tree: &Path,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<(SourceDistFilename, FileList), Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
let filename = SourceDistFilename {
|
||||
name: pyproject_toml.name().clone(),
|
||||
version: pyproject_toml.version().clone(),
|
||||
|
|
@ -52,7 +51,7 @@ pub fn list_source_dist(
|
|||
};
|
||||
let mut files = FileList::new();
|
||||
let writer = ListWriter::new(&mut files);
|
||||
write_source_dist(source_tree, writer, uv_version, show_warnings)?;
|
||||
write_source_dist(source_tree, writer, uv_version)?;
|
||||
Ok((filename, files))
|
||||
}
|
||||
|
||||
|
|
@ -61,7 +60,6 @@ fn source_dist_matcher(
|
|||
source_tree: &Path,
|
||||
pyproject_toml: &PyProjectToml,
|
||||
settings: BuildBackendSettings,
|
||||
show_warnings: bool,
|
||||
) -> Result<(GlobDirFilter, GlobSet), Error> {
|
||||
// File and directories to include in the source directory
|
||||
let mut include_globs = Vec::new();
|
||||
|
|
@ -76,7 +74,6 @@ fn source_dist_matcher(
|
|||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
show_warnings,
|
||||
)?;
|
||||
for module_relative in modules_relative {
|
||||
// The wheel must not include any files included by the source distribution (at least until we
|
||||
|
|
@ -106,7 +103,7 @@ fn source_dist_matcher(
|
|||
.and_then(|readme| readme.path())
|
||||
{
|
||||
let readme = uv_fs::normalize_path(readme);
|
||||
trace!("Including readme at: {}", readme.user_display());
|
||||
trace!("Including readme at: `{}`", readme.user_display());
|
||||
let readme = readme.portable_display().to_string();
|
||||
let glob = Glob::new(&globset::escape(&readme)).expect("escaped globset is parseable");
|
||||
include_globs.push(glob);
|
||||
|
|
@ -114,7 +111,7 @@ fn source_dist_matcher(
|
|||
|
||||
// Include the license files
|
||||
for license_files in pyproject_toml.license_files_source_dist() {
|
||||
trace!("Including license files at: {license_files}`");
|
||||
trace!("Including license files at: `{license_files}`");
|
||||
let glob = PortableGlobParser::Pep639
|
||||
.parse(license_files)
|
||||
.map_err(|err| Error::PortableGlob {
|
||||
|
|
@ -126,18 +123,12 @@ fn source_dist_matcher(
|
|||
|
||||
// Include the data files
|
||||
for (name, directory) in settings.data.iter() {
|
||||
let directory = uv_fs::normalize_path(directory);
|
||||
trace!("Including data ({}) at: {}", name, directory.user_display());
|
||||
if directory
|
||||
.components()
|
||||
.next()
|
||||
.is_some_and(|component| !matches!(component, Component::CurDir | Component::Normal(_)))
|
||||
{
|
||||
return Err(Error::InvalidDataRoot {
|
||||
name: name.to_string(),
|
||||
path: directory.to_path_buf(),
|
||||
});
|
||||
}
|
||||
let directory = uv_fs::normalize_path(Path::new(directory));
|
||||
trace!(
|
||||
"Including data ({}) at: `{}`",
|
||||
name,
|
||||
directory.user_display()
|
||||
);
|
||||
let directory = directory.portable_display().to_string();
|
||||
let glob = PortableGlobParser::Uv
|
||||
.parse(&format!("{}/**", globset::escape(&directory)))
|
||||
|
|
@ -149,7 +140,7 @@ fn source_dist_matcher(
|
|||
}
|
||||
|
||||
debug!(
|
||||
"Source distribution includes: {:?}",
|
||||
"Source distribution includes: `{:?}`",
|
||||
include_globs
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
|
|
@ -184,9 +175,9 @@ fn write_source_dist(
|
|||
source_tree: &Path,
|
||||
mut writer: impl DirectoryWriter,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<SourceDistFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -220,7 +211,7 @@ fn write_source_dist(
|
|||
)?;
|
||||
|
||||
let (include_matcher, exclude_matcher) =
|
||||
source_dist_matcher(source_tree, &pyproject_toml, settings, show_warnings)?;
|
||||
source_dist_matcher(source_tree, &pyproject_toml, settings)?;
|
||||
|
||||
let mut files_visited = 0;
|
||||
for entry in WalkDir::new(source_tree)
|
||||
|
|
@ -261,12 +252,10 @@ fn write_source_dist(
|
|||
.expect("walkdir starts with root");
|
||||
|
||||
if !include_matcher.match_path(relative) || exclude_matcher.is_match(relative) {
|
||||
trace!("Excluding from sdist: {}", relative.user_display());
|
||||
trace!("Excluding from sdist: `{}`", relative.user_display());
|
||||
continue;
|
||||
}
|
||||
|
||||
error_on_venv(entry.file_name(), entry.path())?;
|
||||
|
||||
let entry_path = Path::new(&top_level)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
|
|
@ -299,10 +288,6 @@ impl TarGzWriter {
|
|||
impl DirectoryWriter for TarGzWriter {
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||
let mut header = Header::new_gnu();
|
||||
// Work around bug in Python's std tar module
|
||||
// https://github.com/python/cpython/issues/141707
|
||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
||||
header.set_entry_type(EntryType::Regular);
|
||||
header.set_size(bytes.len() as u64);
|
||||
// Reasonable default to avoid 0o000 permissions, the user's umask will be applied on
|
||||
// unpacking.
|
||||
|
|
@ -316,10 +301,6 @@ impl DirectoryWriter for TarGzWriter {
|
|||
fn write_file(&mut self, path: &str, file: &Path) -> Result<(), Error> {
|
||||
let metadata = fs_err::metadata(file)?;
|
||||
let mut header = Header::new_gnu();
|
||||
// Work around bug in Python's std tar module
|
||||
// https://github.com/python/cpython/issues/141707
|
||||
// https://github.com/astral-sh/uv/pull/17043#issuecomment-3636841022
|
||||
header.set_entry_type(EntryType::Regular);
|
||||
// Preserve the executable bit, especially for scripts
|
||||
#[cfg(unix)]
|
||||
let executable_bit = {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,10 @@
|
|||
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD as base64};
|
||||
use fs_err::File;
|
||||
use globset::{GlobSet, GlobSetBuilder};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::io::{BufReader, Read, Write};
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{io, mem};
|
||||
use tracing::{debug, trace};
|
||||
use walkdir::WalkDir;
|
||||
|
|
@ -19,8 +18,7 @@ use uv_warnings::warn_user_once;
|
|||
|
||||
use crate::metadata::DEFAULT_EXCLUDES;
|
||||
use crate::{
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml,
|
||||
error_on_venv, find_roots,
|
||||
BuildBackendSettings, DirectoryWriter, Error, FileList, ListWriter, PyProjectToml, find_roots,
|
||||
};
|
||||
|
||||
/// Build a wheel from the source tree and place it in the output directory.
|
||||
|
|
@ -29,9 +27,9 @@ pub fn build_wheel(
|
|||
wheel_dir: &Path,
|
||||
metadata_directory: Option<&Path>,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<WheelFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -58,7 +56,6 @@ pub fn build_wheel(
|
|||
&filename,
|
||||
uv_version,
|
||||
wheel_writer,
|
||||
show_warnings,
|
||||
)?;
|
||||
|
||||
Ok(filename)
|
||||
|
|
@ -68,9 +65,9 @@ pub fn build_wheel(
|
|||
pub fn list_wheel(
|
||||
source_tree: &Path,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<(WheelFilename, FileList), Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -88,14 +85,7 @@ pub fn list_wheel(
|
|||
|
||||
let mut files = FileList::new();
|
||||
let writer = ListWriter::new(&mut files);
|
||||
write_wheel(
|
||||
source_tree,
|
||||
&pyproject_toml,
|
||||
&filename,
|
||||
uv_version,
|
||||
writer,
|
||||
show_warnings,
|
||||
)?;
|
||||
write_wheel(source_tree, &pyproject_toml, &filename, uv_version, writer)?;
|
||||
Ok((filename, files))
|
||||
}
|
||||
|
||||
|
|
@ -105,7 +95,6 @@ fn write_wheel(
|
|||
filename: &WheelFilename,
|
||||
uv_version: &str,
|
||||
mut wheel_writer: impl DirectoryWriter,
|
||||
show_warnings: bool,
|
||||
) -> Result<(), Error> {
|
||||
let settings = pyproject_toml
|
||||
.settings()
|
||||
|
|
@ -141,7 +130,6 @@ fn write_wheel(
|
|||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
show_warnings,
|
||||
)?;
|
||||
|
||||
let mut files_visited = 0;
|
||||
|
|
@ -187,12 +175,10 @@ fn write_wheel(
|
|||
.strip_prefix(&src_root)
|
||||
.expect("walkdir starts with root");
|
||||
if exclude_matcher.is_match(match_path) {
|
||||
trace!("Excluding from module: {}", match_path.user_display());
|
||||
trace!("Excluding from module: `{}`", match_path.user_display());
|
||||
continue;
|
||||
}
|
||||
|
||||
error_on_venv(entry.file_name(), entry.path())?;
|
||||
|
||||
let entry_path = entry_path.portable_display().to_string();
|
||||
debug!("Adding to wheel: {entry_path}");
|
||||
wheel_writer.write_dir_entry(&entry, &entry_path)?;
|
||||
|
|
@ -220,20 +206,7 @@ fn write_wheel(
|
|||
|
||||
// Add the data files
|
||||
for (name, directory) in settings.data.iter() {
|
||||
debug!(
|
||||
"Adding {name} data files from: {}",
|
||||
directory.user_display()
|
||||
);
|
||||
if directory
|
||||
.components()
|
||||
.next()
|
||||
.is_some_and(|component| !matches!(component, Component::CurDir | Component::Normal(_)))
|
||||
{
|
||||
return Err(Error::InvalidDataRoot {
|
||||
name: name.to_string(),
|
||||
path: directory.to_path_buf(),
|
||||
});
|
||||
}
|
||||
debug!("Adding {name} data files from: `{directory}`");
|
||||
let data_dir = format!(
|
||||
"{}-{}.data/{}/",
|
||||
pyproject_toml.name().as_dist_info_name(),
|
||||
|
|
@ -269,9 +242,9 @@ pub fn build_editable(
|
|||
wheel_dir: &Path,
|
||||
metadata_directory: Option<&Path>,
|
||||
uv_version: &str,
|
||||
show_warnings: bool,
|
||||
) -> Result<WheelFilename, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -305,7 +278,6 @@ pub fn build_editable(
|
|||
&settings.module_root,
|
||||
settings.module_name.as_ref(),
|
||||
settings.namespace,
|
||||
show_warnings,
|
||||
)?;
|
||||
|
||||
wheel_writer.write_bytes(
|
||||
|
|
@ -313,7 +285,7 @@ pub fn build_editable(
|
|||
src_root.as_os_str().as_encoded_bytes(),
|
||||
)?;
|
||||
|
||||
debug!("Adding metadata files to: {}", wheel_path.user_display());
|
||||
debug!("Adding metadata files to: `{}`", wheel_path.user_display());
|
||||
let dist_info_dir = write_dist_info(
|
||||
&mut wheel_writer,
|
||||
&pyproject_toml,
|
||||
|
|
@ -332,7 +304,8 @@ pub fn metadata(
|
|||
metadata_directory: &Path,
|
||||
uv_version: &str,
|
||||
) -> Result<String, Error> {
|
||||
let pyproject_toml = PyProjectToml::parse(&source_tree.join("pyproject.toml"))?;
|
||||
let contents = fs_err::read_to_string(source_tree.join("pyproject.toml"))?;
|
||||
let pyproject_toml = PyProjectToml::parse(&contents)?;
|
||||
for warning in pyproject_toml.check_build_system(uv_version) {
|
||||
warn_user_once!("{warning}");
|
||||
}
|
||||
|
|
@ -373,7 +346,7 @@ struct RecordEntry {
|
|||
///
|
||||
/// While the spec would allow backslashes, we always use portable paths with forward slashes.
|
||||
path: String,
|
||||
/// The urlsafe-base64-nopad encoded SHA256 of the files.
|
||||
/// The SHA256 of the files.
|
||||
hash: String,
|
||||
/// The size of the file in bytes.
|
||||
size: usize,
|
||||
|
|
@ -408,7 +381,7 @@ fn write_hashed(
|
|||
}
|
||||
Ok(RecordEntry {
|
||||
path: path.to_string(),
|
||||
hash: base64.encode(hasher.finalize()),
|
||||
hash: format!("{:x}", hasher.finalize()),
|
||||
size,
|
||||
})
|
||||
}
|
||||
|
|
@ -538,17 +511,15 @@ fn wheel_subdir_from_globs(
|
|||
.expect("walkdir starts with root");
|
||||
|
||||
if !matcher.match_path(relative) {
|
||||
trace!("Excluding {}: {}", globs_field, relative.user_display());
|
||||
trace!("Excluding {}: `{}`", globs_field, relative.user_display());
|
||||
continue;
|
||||
}
|
||||
|
||||
error_on_venv(entry.file_name(), entry.path())?;
|
||||
|
||||
let license_path = Path::new(target)
|
||||
.join(relative)
|
||||
.portable_display()
|
||||
.to_string();
|
||||
debug!("Adding for {}: {}", globs_field, relative.user_display());
|
||||
debug!("Adding for {}: `{}`", globs_field, relative.user_display());
|
||||
wheel_writer.write_dir_entry(&entry, &license_path)?;
|
||||
}
|
||||
Ok(())
|
||||
|
|
@ -650,8 +621,8 @@ impl ZipDirectoryWriter {
|
|||
path: &str,
|
||||
executable_bit: bool,
|
||||
) -> Result<Box<dyn Write + 'slf>, Error> {
|
||||
// Set file permissions: 644 (rw-r--r--) for regular files, 755 (rwxr-xr-x) for executables
|
||||
let permissions = if executable_bit { 0o755 } else { 0o644 };
|
||||
// 644 is the default of the zip crate.
|
||||
let permissions = if executable_bit { 775 } else { 664 };
|
||||
let options = zip::write::SimpleFileOptions::default()
|
||||
.unix_permissions(permissions)
|
||||
.compression_method(self.compression);
|
||||
|
|
@ -663,14 +634,11 @@ impl ZipDirectoryWriter {
|
|||
impl DirectoryWriter for ZipDirectoryWriter {
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||
trace!("Adding {}", path);
|
||||
// Set appropriate permissions for metadata files (644 = rw-r--r--)
|
||||
let options = zip::write::SimpleFileOptions::default()
|
||||
.unix_permissions(0o644)
|
||||
.compression_method(self.compression);
|
||||
let options = zip::write::SimpleFileOptions::default().compression_method(self.compression);
|
||||
self.writer.start_file(path, options)?;
|
||||
self.writer.write_all(bytes)?;
|
||||
|
||||
let hash = base64.encode(Sha256::new().chain_update(bytes).finalize());
|
||||
let hash = format!("{:x}", Sha256::new().chain_update(bytes).finalize());
|
||||
self.record.push(RecordEntry {
|
||||
path: path.to_string(),
|
||||
hash,
|
||||
|
|
@ -748,7 +716,7 @@ impl FilesystemWriter {
|
|||
impl DirectoryWriter for FilesystemWriter {
|
||||
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
|
||||
trace!("Adding {}", path);
|
||||
let hash = base64.encode(Sha256::new().chain_update(bytes).finalize());
|
||||
let hash = format!("{:x}", Sha256::new().chain_update(bytes).finalize());
|
||||
self.record.push(RecordEntry {
|
||||
path: path.to_string(),
|
||||
hash,
|
||||
|
|
@ -824,14 +792,14 @@ mod test {
|
|||
fn test_record() {
|
||||
let record = vec![RecordEntry {
|
||||
path: "built_by_uv/__init__.py".to_string(),
|
||||
hash: "ifhp5To6AGGlLAIz5kQtTXLegKii00BtnqC_05fteGU".to_string(),
|
||||
hash: "89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865".to_string(),
|
||||
size: 37,
|
||||
}];
|
||||
|
||||
let mut writer = Vec::new();
|
||||
write_record(&mut writer, "built_by_uv-0.1.0", record).unwrap();
|
||||
assert_snapshot!(String::from_utf8(writer).unwrap(), @r"
|
||||
built_by_uv/__init__.py,sha256=ifhp5To6AGGlLAIz5kQtTXLegKii00BtnqC_05fteGU,37
|
||||
built_by_uv/__init__.py,sha256=89f869e53a3a0061a52c0233e6442d4d72de80a8a2d3406d9ea0bfd397ed7865,37
|
||||
built_by_uv-0.1.0/RECORD,,
|
||||
");
|
||||
}
|
||||
|
|
@ -840,7 +808,7 @@ mod test {
|
|||
#[test]
|
||||
fn test_prepare_metadata() {
|
||||
let metadata_dir = TempDir::new().unwrap();
|
||||
let built_by_uv = Path::new("../../test/packages/built-by-uv");
|
||||
let built_by_uv = Path::new("../../scripts/packages/built-by-uv");
|
||||
metadata(built_by_uv, metadata_dir.path(), "1.0.0+test").unwrap();
|
||||
|
||||
let mut files: Vec<_> = WalkDir::new(metadata_dir.path())
|
||||
|
|
@ -890,9 +858,9 @@ mod test {
|
|||
.path()
|
||||
.join("built_by_uv-0.1.0.dist-info/RECORD");
|
||||
assert_snapshot!(fs_err::read_to_string(record_file).unwrap(), @r###"
|
||||
built_by_uv-0.1.0.dist-info/WHEEL,sha256=PaG_oOj9G2zCRqoLK0SjWBVZbGAMtIXDmm-MEGw9Wo0,83
|
||||
built_by_uv-0.1.0.dist-info/entry_points.txt,sha256=-IO6yaq6x6HSl-zWH96rZmgYvfyHlH00L5WQoCpz-YI,50
|
||||
built_by_uv-0.1.0.dist-info/METADATA,sha256=m6EkVvKrGmqx43b_VR45LHD37IZxPYC0NI6Qx9_UXLE,474
|
||||
built_by_uv-0.1.0.dist-info/WHEEL,sha256=3da1bfa0e8fd1b6cc246aa0b2b44a35815596c600cb485c39a6f8c106c3d5a8d,83
|
||||
built_by_uv-0.1.0.dist-info/entry_points.txt,sha256=f883bac9aabac7a1d297ecd61fdeab666818bdfc87947d342f9590a02a73f982,50
|
||||
built_by_uv-0.1.0.dist-info/METADATA,sha256=9ba12456f2ab1a6ab1e376ff551e392c70f7ec86713d80b4348e90c7dfd45cb1,474
|
||||
built_by_uv-0.1.0.dist-info/RECORD,,
|
||||
"###);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-build-frontend"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Build wheels from source distributions"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -16,16 +17,13 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-auth = { workspace = true }
|
||||
uv-cache-key = { workspace = true }
|
||||
uv-configuration = { workspace = true }
|
||||
uv-distribution = { workspace = true }
|
||||
uv-distribution-types = { workspace = true }
|
||||
uv-fs = { workspace = true }
|
||||
uv-normalize = { workspace = true }
|
||||
uv-pep440 = { workspace = true }
|
||||
uv-pep508 = { workspace = true }
|
||||
uv-preview = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-python = { workspace = true }
|
||||
uv-static = { workspace = true }
|
||||
|
|
@ -50,4 +48,4 @@ tracing = { workspace = true }
|
|||
rustc-hash = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true }
|
||||
insta = { version = "1.40.0" }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-build-frontend
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-build-frontend).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -13,8 +13,8 @@ use tracing::error;
|
|||
use uv_configuration::BuildOutput;
|
||||
use uv_distribution_types::IsBuildBackendError;
|
||||
use uv_fs::Simplified;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
use uv_pep508::PackageName;
|
||||
use uv_types::AnyErrorBuild;
|
||||
|
||||
/// e.g. `pygraphviz/graphviz_wrap.c:3020:10: fatal error: graphviz/cgraph.h: No such file or directory`
|
||||
|
|
@ -46,10 +46,9 @@ static LD_NOT_FOUND_RE: LazyLock<Regex> = LazyLock::new(|| {
|
|||
static WHEEL_NOT_FOUND_RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"error: invalid command 'bdist_wheel'").unwrap());
|
||||
|
||||
/// e.g. `ModuleNotFoundError`
|
||||
static MODULE_NOT_FOUND: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new("ModuleNotFoundError: No module named ['\"]([^'\"]+)['\"]").unwrap()
|
||||
});
|
||||
/// e.g. `ModuleNotFoundError: No module named 'torch'`
|
||||
static TORCH_NOT_FOUND_RE: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"ModuleNotFoundError: No module named 'torch'").unwrap());
|
||||
|
||||
/// e.g. `ModuleNotFoundError: No module named 'distutils'`
|
||||
static DISTUTILS_NOT_FOUND_RE: LazyLock<Regex> =
|
||||
|
|
@ -91,10 +90,6 @@ pub enum Error {
|
|||
NoSourceDistBuilds,
|
||||
#[error("Cyclic build dependency detected for `{0}`")]
|
||||
CyclicBuildDependency(PackageName),
|
||||
#[error(
|
||||
"Extra build requirement `{0}` was declared with `match-runtime = true`, but `{1}` does not declare static metadata, making runtime-matching impossible"
|
||||
)]
|
||||
UnmatchedRuntime(PackageName, PackageName),
|
||||
}
|
||||
|
||||
impl IsBuildBackendError for Error {
|
||||
|
|
@ -110,8 +105,7 @@ impl IsBuildBackendError for Error {
|
|||
| Self::Virtualenv(_)
|
||||
| Self::NoSourceDistBuild(_)
|
||||
| Self::NoSourceDistBuilds
|
||||
| Self::CyclicBuildDependency(_)
|
||||
| Self::UnmatchedRuntime(_, _) => false,
|
||||
| Self::CyclicBuildDependency(_) => false,
|
||||
Self::CommandFailed(_, _)
|
||||
| Self::BuildBackend(_)
|
||||
| Self::MissingHeader(_)
|
||||
|
|
@ -136,59 +130,6 @@ pub struct MissingHeaderCause {
|
|||
version_id: Option<String>,
|
||||
}
|
||||
|
||||
/// Extract the package name from a version specifier string.
|
||||
/// Uses PEP 508 naming rules but more lenient for hinting purposes.
|
||||
fn extract_package_name(version_id: &str) -> &str {
|
||||
// https://peps.python.org/pep-0508/#names
|
||||
// ^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$ with re.IGNORECASE
|
||||
// Since we're only using this for a hint, we're more lenient than what we would be doing if this was used for parsing
|
||||
let end = version_id
|
||||
.char_indices()
|
||||
.take_while(|(_, char)| matches!(char, 'A'..='Z' | 'a'..='z' | '0'..='9' | '.' | '-' | '_'))
|
||||
.last()
|
||||
.map_or(0, |(i, c)| i + c.len_utf8());
|
||||
|
||||
if end == 0 {
|
||||
version_id
|
||||
} else {
|
||||
&version_id[..end]
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a hint about missing build dependencies.
|
||||
fn hint_build_dependency(
|
||||
f: &mut std::fmt::Formatter<'_>,
|
||||
display_name: &str,
|
||||
package_name: &str,
|
||||
package: &str,
|
||||
) -> std::fmt::Result {
|
||||
let table_key = if package_name.contains('.') {
|
||||
format!("\"{package_name}\"")
|
||||
} else {
|
||||
package_name.to_string()
|
||||
};
|
||||
write!(
|
||||
f,
|
||||
"This error likely indicates that `{}` depends on `{}`, but doesn't declare it as a build dependency. \
|
||||
If `{}` is a first-party package, consider adding `{}` to its `{}`. \
|
||||
Otherwise, either add it to your `pyproject.toml` under:\n\
|
||||
\n\
|
||||
[tool.uv.extra-build-dependencies]\n\
|
||||
{} = [\"{}\"]\n\
|
||||
\n\
|
||||
or `{}` into the environment and re-run with `{}`.",
|
||||
display_name.cyan(),
|
||||
package.cyan(),
|
||||
package_name.cyan(),
|
||||
package.cyan(),
|
||||
"build-system.requires".green(),
|
||||
table_key.cyan(),
|
||||
package.cyan(),
|
||||
format!("uv pip install {package}").green(),
|
||||
"--no-build-isolation".green(),
|
||||
)
|
||||
}
|
||||
|
||||
impl Display for MissingHeaderCause {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match &self.missing_library {
|
||||
|
|
@ -249,15 +190,29 @@ impl Display for MissingHeaderCause {
|
|||
if let (Some(package_name), Some(package_version)) =
|
||||
(&self.package_name, &self.package_version)
|
||||
{
|
||||
hint_build_dependency(
|
||||
write!(
|
||||
f,
|
||||
&format!("{package_name}@{package_version}"),
|
||||
package_name.as_str(),
|
||||
package,
|
||||
"This error likely indicates that `{}` depends on `{}`, but doesn't declare it as a build dependency. If `{}` is a first-party package, consider adding `{}` to its `{}`. Otherwise, `{}` into the environment and re-run with `{}`.",
|
||||
format!("{package_name}@{package_version}").cyan(),
|
||||
package.cyan(),
|
||||
package_name.cyan(),
|
||||
package.cyan(),
|
||||
"build-system.requires".green(),
|
||||
format!("uv pip install {package}").green(),
|
||||
"--no-build-isolation".green(),
|
||||
)
|
||||
} else if let Some(version_id) = &self.version_id {
|
||||
let package_name = extract_package_name(version_id);
|
||||
hint_build_dependency(f, package_name, package_name, package)
|
||||
write!(
|
||||
f,
|
||||
"This error likely indicates that `{}` depends on `{}`, but doesn't declare it as a build dependency. If `{}` is a first-party package, consider adding `{}` to its `{}`. Otherwise, `{}` into the environment and re-run with `{}`.",
|
||||
version_id.cyan(),
|
||||
package.cyan(),
|
||||
version_id.cyan(),
|
||||
package.cyan(),
|
||||
"build-system.requires".green(),
|
||||
format!("uv pip install {package}").green(),
|
||||
"--no-build-isolation".green(),
|
||||
)
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
|
|
@ -392,22 +347,13 @@ impl Error {
|
|||
Some(MissingLibrary::Linker(library.to_string()))
|
||||
} else if WHEEL_NOT_FOUND_RE.is_match(line.trim()) {
|
||||
Some(MissingLibrary::BuildDependency("wheel".to_string()))
|
||||
} else if TORCH_NOT_FOUND_RE.is_match(line.trim()) {
|
||||
Some(MissingLibrary::BuildDependency("torch".to_string()))
|
||||
} else if DISTUTILS_NOT_FOUND_RE.is_match(line.trim()) {
|
||||
Some(MissingLibrary::DeprecatedModule(
|
||||
"distutils".to_string(),
|
||||
Version::new([3, 12]),
|
||||
))
|
||||
} else if let Some(caps) = MODULE_NOT_FOUND.captures(line.trim()) {
|
||||
if let Some(module_match) = caps.get(1) {
|
||||
let module_name = module_match.as_str();
|
||||
let package_name = match crate::pipreqs::MODULE_MAPPING.lookup(module_name) {
|
||||
Some(package) => package.to_string(),
|
||||
None => module_name.to_string(),
|
||||
};
|
||||
Some(MissingLibrary::BuildDependency(package_name))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
|
@ -468,8 +414,8 @@ mod test {
|
|||
use std::process::ExitStatus;
|
||||
use std::str::FromStr;
|
||||
use uv_configuration::BuildOutput;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
use uv_pep508::PackageName;
|
||||
|
||||
#[test]
|
||||
fn missing_header() {
|
||||
|
|
@ -619,7 +565,7 @@ mod test {
|
|||
.to_string()
|
||||
.replace("exit status: ", "exit code: ");
|
||||
let formatted = anstream::adapter::strip_str(&formatted);
|
||||
insta::assert_snapshot!(formatted, @r#"
|
||||
insta::assert_snapshot!(formatted, @r###"
|
||||
Failed building wheel through setup.py (exit code: 0)
|
||||
|
||||
[stderr]
|
||||
|
|
@ -630,13 +576,8 @@ mod test {
|
|||
|
||||
error: invalid command 'bdist_wheel'
|
||||
|
||||
hint: This error likely indicates that `pygraphviz-1.11` depends on `wheel`, but doesn't declare it as a build dependency. If `pygraphviz-1.11` is a first-party package, consider adding `wheel` to its `build-system.requires`. Otherwise, either add it to your `pyproject.toml` under:
|
||||
|
||||
[tool.uv.extra-build-dependencies]
|
||||
"pygraphviz-1.11" = ["wheel"]
|
||||
|
||||
or `uv pip install wheel` into the environment and re-run with `--no-build-isolation`.
|
||||
"#);
|
||||
hint: This error likely indicates that `pygraphviz-1.11` depends on `wheel`, but doesn't declare it as a build dependency. If `pygraphviz-1.11` is a first-party package, consider adding `wheel` to its `build-system.requires`. Otherwise, `uv pip install wheel` into the environment and re-run with `--no-build-isolation`.
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
|
|
@ -3,9 +3,7 @@
|
|||
//! <https://packaging.python.org/en/latest/specifications/source-distribution-format/>
|
||||
|
||||
mod error;
|
||||
mod pipreqs;
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::ffi::OsString;
|
||||
use std::fmt::Formatter;
|
||||
use std::fmt::Write;
|
||||
|
|
@ -21,26 +19,23 @@ use fs_err as fs;
|
|||
use indoc::formatdoc;
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::de::{self, IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use serde::de::{IntoDeserializer, SeqAccess, Visitor, value};
|
||||
use serde::{Deserialize, Deserializer, de};
|
||||
use tempfile::TempDir;
|
||||
use tokio::io::AsyncBufReadExt;
|
||||
use tokio::process::Command;
|
||||
use tokio::sync::{Mutex, Semaphore};
|
||||
use tracing::{Instrument, debug, info_span, instrument, warn};
|
||||
use uv_auth::CredentialsCache;
|
||||
|
||||
use uv_cache_key::cache_digest;
|
||||
use uv_configuration::{BuildKind, BuildOutput, SourceStrategy};
|
||||
use uv_configuration::PreviewMode;
|
||||
use uv_configuration::{BuildKind, BuildOutput, ConfigSettings, SourceStrategy};
|
||||
use uv_distribution::BuildRequires;
|
||||
use uv_distribution_types::{
|
||||
ConfigSettings, ExtraBuildRequirement, ExtraBuildRequires, IndexLocations, Requirement,
|
||||
Resolution,
|
||||
};
|
||||
use uv_fs::{LockedFile, LockedFileMode};
|
||||
use uv_distribution_types::{IndexLocations, Requirement, Resolution};
|
||||
use uv_fs::LockedFile;
|
||||
use uv_fs::{PythonExt, Simplified};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::Version;
|
||||
use uv_preview::Preview;
|
||||
use uv_pep508::PackageName;
|
||||
use uv_pypi_types::VerbatimParsedUrl;
|
||||
use uv_python::{Interpreter, PythonEnvironment};
|
||||
use uv_static::EnvVars;
|
||||
|
|
@ -286,14 +281,12 @@ impl SourceBuild {
|
|||
workspace_cache: &WorkspaceCache,
|
||||
config_settings: ConfigSettings,
|
||||
build_isolation: BuildIsolation<'_>,
|
||||
extra_build_requires: &ExtraBuildRequires,
|
||||
build_stack: &BuildStack,
|
||||
build_kind: BuildKind,
|
||||
mut environment_variables: FxHashMap<OsString, OsString>,
|
||||
level: BuildOutput,
|
||||
concurrent_builds: usize,
|
||||
credentials_cache: &CredentialsCache,
|
||||
preview: Preview,
|
||||
preview: PreviewMode,
|
||||
) -> Result<Self, Error> {
|
||||
let temp_dir = build_context.cache().venv_dir()?;
|
||||
|
||||
|
|
@ -303,6 +296,8 @@ impl SourceBuild {
|
|||
source.to_path_buf()
|
||||
};
|
||||
|
||||
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
|
||||
|
||||
// Check if we have a PEP 517 build backend.
|
||||
let (pep517_backend, project) = Self::extract_pep517_backend(
|
||||
&source_tree,
|
||||
|
|
@ -311,7 +306,7 @@ impl SourceBuild {
|
|||
locations,
|
||||
source_strategy,
|
||||
workspace_cache,
|
||||
credentials_cache,
|
||||
&default_backend,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| *err)?;
|
||||
|
|
@ -327,29 +322,6 @@ impl SourceBuild {
|
|||
.or(fallback_package_version)
|
||||
.cloned();
|
||||
|
||||
let extra_build_dependencies = package_name
|
||||
.as_ref()
|
||||
.and_then(|name| extra_build_requires.get(name).cloned())
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.map(|requirement| {
|
||||
match requirement {
|
||||
ExtraBuildRequirement {
|
||||
requirement,
|
||||
match_runtime: true,
|
||||
} if requirement.source.is_empty() => {
|
||||
Err(Error::UnmatchedRuntime(
|
||||
requirement.name.clone(),
|
||||
// SAFETY: if `package_name` is `None`, the iterator is empty.
|
||||
package_name.clone().unwrap(),
|
||||
))
|
||||
}
|
||||
requirement => Ok(requirement),
|
||||
}
|
||||
})
|
||||
.map_ok(Requirement::from)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
// Create a virtual environment, or install into the shared environment if requested.
|
||||
let venv = if let Some(venv) = build_isolation.shared_environment(package_name.as_ref()) {
|
||||
venv.clone()
|
||||
|
|
@ -359,9 +331,7 @@ impl SourceBuild {
|
|||
interpreter.clone(),
|
||||
uv_virtualenv::Prompt::None,
|
||||
false,
|
||||
uv_virtualenv::OnExisting::Remove(
|
||||
uv_virtualenv::RemovalReason::TemporaryEnvironment,
|
||||
),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
|
|
@ -374,17 +344,11 @@ impl SourceBuild {
|
|||
if build_isolation.is_isolated(package_name.as_ref()) {
|
||||
debug!("Resolving build requirements");
|
||||
|
||||
let dependency_sources = if extra_build_dependencies.is_empty() {
|
||||
"`build-system.requires`"
|
||||
} else {
|
||||
"`build-system.requires` and `extra-build-dependencies`"
|
||||
};
|
||||
|
||||
let resolved_requirements = Self::get_resolved_requirements(
|
||||
build_context,
|
||||
source_build_context,
|
||||
&default_backend,
|
||||
&pep517_backend,
|
||||
extra_build_dependencies,
|
||||
build_stack,
|
||||
)
|
||||
.await?;
|
||||
|
|
@ -392,7 +356,7 @@ impl SourceBuild {
|
|||
build_context
|
||||
.install(&resolved_requirements, &venv, build_stack)
|
||||
.await
|
||||
.map_err(|err| Error::RequirementsInstall(dependency_sources, err.into()))?;
|
||||
.map_err(|err| Error::RequirementsInstall("`build-system.requires`", err.into()))?;
|
||||
} else {
|
||||
debug!("Proceeding without build isolation");
|
||||
}
|
||||
|
|
@ -454,7 +418,6 @@ impl SourceBuild {
|
|||
&environment_variables,
|
||||
&modified_path,
|
||||
&temp_dir,
|
||||
credentials_cache,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
|
@ -493,16 +456,12 @@ impl SourceBuild {
|
|||
"uv-setuptools-{}.lock",
|
||||
cache_digest(&canonical_source_path)
|
||||
));
|
||||
source_tree_lock = LockedFile::acquire(
|
||||
lock_path,
|
||||
LockedFileMode::Exclusive,
|
||||
self.source_tree.to_string_lossy(),
|
||||
)
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire build lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
source_tree_lock = LockedFile::acquire(lock_path, self.source_tree.to_string_lossy())
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
warn!("Failed to acquire build lock: {err}");
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
Ok(source_tree_lock)
|
||||
}
|
||||
|
|
@ -510,20 +469,18 @@ impl SourceBuild {
|
|||
async fn get_resolved_requirements(
|
||||
build_context: &impl BuildContext,
|
||||
source_build_context: SourceBuildContext,
|
||||
default_backend: &Pep517Backend,
|
||||
pep517_backend: &Pep517Backend,
|
||||
extra_build_dependencies: Vec<Requirement>,
|
||||
build_stack: &BuildStack,
|
||||
) -> Result<Resolution, Error> {
|
||||
Ok(
|
||||
if pep517_backend.requirements == DEFAULT_BACKEND.requirements
|
||||
&& extra_build_dependencies.is_empty()
|
||||
{
|
||||
if pep517_backend.requirements == default_backend.requirements {
|
||||
let mut resolution = source_build_context.default_resolution.lock().await;
|
||||
if let Some(resolved_requirements) = &*resolution {
|
||||
resolved_requirements.clone()
|
||||
} else {
|
||||
let resolved_requirements = build_context
|
||||
.resolve(&DEFAULT_BACKEND.requirements, build_stack)
|
||||
.resolve(&default_backend.requirements, build_stack)
|
||||
.await
|
||||
.map_err(|err| {
|
||||
Error::RequirementsResolve("`setup.py` build", err.into())
|
||||
|
|
@ -532,25 +489,12 @@ impl SourceBuild {
|
|||
resolved_requirements
|
||||
}
|
||||
} else {
|
||||
let (requirements, dependency_sources) = if extra_build_dependencies.is_empty() {
|
||||
(
|
||||
Cow::Borrowed(&pep517_backend.requirements),
|
||||
"`build-system.requires`",
|
||||
)
|
||||
} else {
|
||||
// If there are extra build dependencies, we need to resolve them together with
|
||||
// the backend requirements.
|
||||
let mut requirements = pep517_backend.requirements.clone();
|
||||
requirements.extend(extra_build_dependencies);
|
||||
(
|
||||
Cow::Owned(requirements),
|
||||
"`build-system.requires` and `extra-build-dependencies`",
|
||||
)
|
||||
};
|
||||
build_context
|
||||
.resolve(&requirements, build_stack)
|
||||
.resolve(&pep517_backend.requirements, build_stack)
|
||||
.await
|
||||
.map_err(|err| Error::RequirementsResolve(dependency_sources, err.into()))?
|
||||
.map_err(|err| {
|
||||
Error::RequirementsResolve("`build-system.requires`", err.into())
|
||||
})?
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -563,14 +507,16 @@ impl SourceBuild {
|
|||
locations: &IndexLocations,
|
||||
source_strategy: SourceStrategy,
|
||||
workspace_cache: &WorkspaceCache,
|
||||
credentials_cache: &CredentialsCache,
|
||||
default_backend: &Pep517Backend,
|
||||
) -> Result<(Pep517Backend, Option<Project>), Box<Error>> {
|
||||
match fs::read_to_string(source_tree.join("pyproject.toml")) {
|
||||
Ok(toml) => {
|
||||
let pyproject_toml = toml_edit::Document::from_str(&toml)
|
||||
.map_err(Error::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml = PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(Error::InvalidPyprojectTomlSchema)?;
|
||||
let pyproject_toml: toml_edit::ImDocument<_> =
|
||||
toml_edit::ImDocument::from_str(&toml)
|
||||
.map_err(Error::InvalidPyprojectTomlSyntax)?;
|
||||
let pyproject_toml: PyProjectToml =
|
||||
PyProjectToml::deserialize(pyproject_toml.into_deserializer())
|
||||
.map_err(Error::InvalidPyprojectTomlSchema)?;
|
||||
|
||||
let backend = if let Some(build_system) = pyproject_toml.build_system {
|
||||
// If necessary, lower the requirements.
|
||||
|
|
@ -592,7 +538,6 @@ impl SourceBuild {
|
|||
locations,
|
||||
source_strategy,
|
||||
workspace_cache,
|
||||
credentials_cache,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
|
|
@ -661,8 +606,7 @@ impl SourceBuild {
|
|||
);
|
||||
}
|
||||
}
|
||||
|
||||
DEFAULT_BACKEND.clone()
|
||||
default_backend.clone()
|
||||
};
|
||||
Ok((backend, pyproject_toml.project))
|
||||
}
|
||||
|
|
@ -678,7 +622,7 @@ impl SourceBuild {
|
|||
// the default backend, to match `build`. `pip` uses `setup.py` directly in this
|
||||
// case, but plans to make PEP 517 builds the default in the future.
|
||||
// See: https://github.com/pypa/pip/issues/9175.
|
||||
Ok((DEFAULT_BACKEND.clone(), None))
|
||||
Ok((default_backend.clone(), None))
|
||||
}
|
||||
Err(err) => Err(Box::new(err.into())),
|
||||
}
|
||||
|
|
@ -965,7 +909,6 @@ async fn create_pep517_build_environment(
|
|||
environment_variables: &FxHashMap<OsString, OsString>,
|
||||
modified_path: &OsString,
|
||||
temp_dir: &TempDir,
|
||||
credentials_cache: &CredentialsCache,
|
||||
) -> Result<(), Error> {
|
||||
// Write the hook output to a file so that we can read it back reliably.
|
||||
let outfile = temp_dir
|
||||
|
|
@ -1060,7 +1003,6 @@ async fn create_pep517_build_environment(
|
|||
locations,
|
||||
source_strategy,
|
||||
workspace_cache,
|
||||
credentials_cache,
|
||||
)
|
||||
.await
|
||||
.map_err(Error::Lowering)?;
|
||||
|
|
@ -1167,16 +1109,8 @@ impl PythonRunner {
|
|||
.envs(environment_variables)
|
||||
.env(EnvVars::PATH, modified_path)
|
||||
.env(EnvVars::VIRTUAL_ENV, venv.root())
|
||||
// NOTE: it would be nice to get colored output from build backends,
|
||||
// but setting CLICOLOR_FORCE=1 changes the output of underlying
|
||||
// tools, which might mess with wrappers trying to parse their
|
||||
// output.
|
||||
.env(EnvVars::CLICOLOR_FORCE, "1")
|
||||
.env(EnvVars::PYTHONIOENCODING, "utf-8:backslashreplace")
|
||||
// Remove potentially-sensitive environment variables.
|
||||
.env_remove(EnvVars::PYX_API_KEY)
|
||||
.env_remove(EnvVars::UV_API_KEY)
|
||||
.env_remove(EnvVars::PYX_AUTH_TOKEN)
|
||||
.env_remove(EnvVars::UV_AUTH_TOKEN)
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.spawn()
|
||||
|
|
|
|||
|
|
@ -1,32 +0,0 @@
|
|||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
/// A mapping from module name to PyPI package name.
|
||||
pub(crate) struct ModuleMap<'a>(FxHashMap<&'a str, PackageName>);
|
||||
|
||||
impl<'a> ModuleMap<'a> {
|
||||
/// Generate a [`ModuleMap`] from a string representation, encoded in `${module}:{package}` format.
|
||||
fn from_str(source: &'a str) -> Self {
|
||||
let mut mapping = FxHashMap::default();
|
||||
for line in source.lines() {
|
||||
if let Some((module, package)) = line.split_once(':') {
|
||||
let module = module.trim();
|
||||
let package = PackageName::from_str(package.trim()).unwrap();
|
||||
mapping.insert(module, package);
|
||||
}
|
||||
}
|
||||
Self(mapping)
|
||||
}
|
||||
|
||||
/// Look up a PyPI package name for a given module name.
|
||||
pub(crate) fn lookup(&self, module: &str) -> Option<&PackageName> {
|
||||
self.0.get(module)
|
||||
}
|
||||
}
|
||||
|
||||
/// A mapping from module name to PyPI package name.
|
||||
pub(crate) static MODULE_MAPPING: LazyLock<ModuleMap> =
|
||||
LazyLock::new(|| ModuleMap::from_str(include_str!("pipreqs/mapping")));
|
||||
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,22 +1,19 @@
|
|||
[package]
|
||||
name = "uv-build"
|
||||
version = "0.9.18"
|
||||
description = "A Python build backend"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
version = "0.7.22"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
homepage.workspace = true
|
||||
documentation.workspace = true
|
||||
repository.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-build-backend = { workspace = true }
|
||||
uv-logging = { workspace = true }
|
||||
uv-version = { workspace = true }
|
||||
|
||||
anstream = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, features = ["env-filter"] }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[project]
|
||||
name = "uv-build"
|
||||
version = "0.9.18"
|
||||
version = "0.7.22"
|
||||
description = "The uv build backend"
|
||||
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
|
||||
requires-python = ">=3.8"
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ def main():
|
|||
"Use `uv build` or another build frontend instead.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
if "--help" in sys.argv or "-h" in sys.argv:
|
||||
if "--help" in sys.argv:
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.exit(1)
|
||||
|
|
|
|||
|
|
@ -1,32 +1,10 @@
|
|||
use anyhow::{Context, Result, bail};
|
||||
use std::env;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{Context, Result, bail};
|
||||
use tracing_subscriber::filter::LevelFilter;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::util::SubscriberInitExt;
|
||||
use tracing_subscriber::{EnvFilter, Layer};
|
||||
|
||||
use uv_logging::UvFormat;
|
||||
|
||||
/// Entrypoint for the `uv-build` Python package.
|
||||
fn main() -> Result<()> {
|
||||
// Support configuring the log level with `RUST_LOG` (shows only the error level by default) and
|
||||
// color.
|
||||
//
|
||||
// This configuration is a simplified version of the uv logging configuration. When using
|
||||
// uv_build through uv proper, the uv logging configuration applies.
|
||||
let filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::OFF.into())
|
||||
.from_env()
|
||||
.context("Invalid RUST_LOG directives")?;
|
||||
let stderr_layer = tracing_subscriber::fmt::layer()
|
||||
.event_format(UvFormat::default())
|
||||
.with_writer(std::sync::Mutex::new(anstream::stderr()))
|
||||
.with_filter(filter);
|
||||
tracing_subscriber::registry().with(stderr_layer).init();
|
||||
|
||||
// Handrolled to avoid the large clap dependency
|
||||
let mut args = env::args_os();
|
||||
// Skip the name of the binary
|
||||
|
|
@ -44,7 +22,6 @@ fn main() -> Result<()> {
|
|||
&env::current_dir()?,
|
||||
&sdist_directory,
|
||||
uv_version::version(),
|
||||
false,
|
||||
)?;
|
||||
// Tell the build frontend about the name of the artifact we built
|
||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||
|
|
@ -57,7 +34,6 @@ fn main() -> Result<()> {
|
|||
&wheel_directory,
|
||||
metadata_directory.as_deref(),
|
||||
uv_version::version(),
|
||||
false,
|
||||
)?;
|
||||
// Tell the build frontend about the name of the artifact we built
|
||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||
|
|
@ -70,7 +46,6 @@ fn main() -> Result<()> {
|
|||
&wheel_directory,
|
||||
metadata_directory.as_deref(),
|
||||
uv_version::version(),
|
||||
false,
|
||||
)?;
|
||||
// Tell the build frontend about the name of the artifact we built
|
||||
writeln!(&mut std::io::stdout(), "{filename}").context("stdout is closed")?;
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
[package]
|
||||
name = "uv-cache-info"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -16,8 +16,6 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-fs = { workspace = true }
|
||||
|
||||
fs-err = { workspace = true }
|
||||
globwalk = { workspace = true }
|
||||
schemars = { workspace = true, optional = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cache-info
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-info).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -1,12 +1,11 @@
|
|||
use std::borrow::Cow;
|
||||
use std::cmp::max;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use serde::Deserialize;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
use uv_fs::Simplified;
|
||||
|
||||
use crate::git_info::{Commit, Tags};
|
||||
use crate::glob::cluster_globs;
|
||||
use crate::timestamp::Timestamp;
|
||||
|
|
@ -64,7 +63,7 @@ impl CacheInfo {
|
|||
pub fn from_directory(directory: &Path) -> Result<Self, CacheInfoError> {
|
||||
let mut commit = None;
|
||||
let mut tags = None;
|
||||
let mut last_changed: Option<(PathBuf, Timestamp)> = None;
|
||||
let mut timestamp = None;
|
||||
let mut directories = BTreeMap::new();
|
||||
let mut env = BTreeMap::new();
|
||||
|
||||
|
|
@ -129,12 +128,7 @@ impl CacheInfo {
|
|||
);
|
||||
continue;
|
||||
}
|
||||
let timestamp = Timestamp::from_metadata(&metadata);
|
||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
||||
}) {
|
||||
last_changed = Some((path, timestamp));
|
||||
}
|
||||
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||
}
|
||||
CacheKey::Directory { dir } => {
|
||||
// Treat the path as a directory.
|
||||
|
|
@ -264,25 +258,14 @@ impl CacheInfo {
|
|||
}
|
||||
continue;
|
||||
}
|
||||
let timestamp = Timestamp::from_metadata(&metadata);
|
||||
if last_changed.as_ref().is_none_or(|(_, prev_timestamp)| {
|
||||
*prev_timestamp < Timestamp::from_metadata(&metadata)
|
||||
}) {
|
||||
last_changed = Some((entry.into_path(), timestamp));
|
||||
}
|
||||
timestamp = max(timestamp, Some(Timestamp::from_metadata(&metadata)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let timestamp = if let Some((path, timestamp)) = last_changed {
|
||||
debug!(
|
||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}. Most recently modified: {}",
|
||||
path.user_display()
|
||||
);
|
||||
Some(timestamp)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
debug!(
|
||||
"Computed cache info: {timestamp:?}, {commit:?}, {tags:?}, {env:?}, {directories:?}"
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
timestamp,
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-cache-key"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Generic functionality for caching paths, URLs, and other resources across platforms."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cache-key
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache-key).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -4,7 +4,7 @@ use std::hash::{Hash, Hasher};
|
|||
use std::ops::Deref;
|
||||
|
||||
use url::Url;
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
||||
|
||||
|
|
@ -98,7 +98,7 @@ impl CanonicalUrl {
|
|||
Self(url)
|
||||
}
|
||||
|
||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
||||
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||
}
|
||||
}
|
||||
|
|
@ -139,18 +139,8 @@ impl std::fmt::Display for CanonicalUrl {
|
|||
/// `https://github.com/pypa/package.git#subdirectory=pkg_b` would map to different
|
||||
/// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same
|
||||
/// resource.
|
||||
///
|
||||
/// The additional information it holds should only be used to discriminate between
|
||||
/// sources that hold the exact same commit in their canonical representation,
|
||||
/// but may differ in the contents such as when Git LFS is enabled.
|
||||
///
|
||||
/// A different cache key will be computed when Git LFS is enabled.
|
||||
/// When Git LFS is `false` or `None`, the cache key remains unchanged.
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||
pub struct RepositoryUrl {
|
||||
repo_url: DisplaySafeUrl,
|
||||
with_lfs: Option<bool>,
|
||||
}
|
||||
pub struct RepositoryUrl(DisplaySafeUrl);
|
||||
|
||||
impl RepositoryUrl {
|
||||
pub fn new(url: &DisplaySafeUrl) -> Self {
|
||||
|
|
@ -171,31 +161,19 @@ impl RepositoryUrl {
|
|||
url.set_fragment(None);
|
||||
url.set_query(None);
|
||||
|
||||
Self {
|
||||
repo_url: url,
|
||||
with_lfs: None,
|
||||
}
|
||||
Self(url)
|
||||
}
|
||||
|
||||
pub fn parse(url: &str) -> Result<Self, DisplaySafeUrlError> {
|
||||
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||
Ok(Self::new(&DisplaySafeUrl::parse(url)?))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_lfs(mut self, lfs: Option<bool>) -> Self {
|
||||
self.with_lfs = lfs;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheKey for RepositoryUrl {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.repo_url.as_str().cache_key(state);
|
||||
if let Some(true) = self.with_lfs {
|
||||
1u8.cache_key(state);
|
||||
}
|
||||
self.0.as_str().cache_key(state);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -203,10 +181,7 @@ impl Hash for RepositoryUrl {
|
|||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.repo_url.as_str().hash(state);
|
||||
if let Some(true) = self.with_lfs {
|
||||
1u8.hash(state);
|
||||
}
|
||||
self.0.as_str().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -214,13 +189,13 @@ impl Deref for RepositoryUrl {
|
|||
type Target = Url;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.repo_url
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RepositoryUrl {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.repo_url, f)
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -229,7 +204,7 @@ mod tests {
|
|||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), DisplaySafeUrlError> {
|
||||
fn user_credential_does_not_affect_cache_key() -> Result<(), url::ParseError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
CanonicalUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
|
|
@ -279,7 +254,7 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn canonical_url() -> Result<(), DisplaySafeUrlError> {
|
||||
fn canonical_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
|
|
@ -308,14 +283,6 @@ mod tests {
|
|||
)?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they differ in Git LFS enablement.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
||||
)?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse(
|
||||
|
|
@ -368,7 +335,7 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url() -> Result<(), DisplaySafeUrlError> {
|
||||
fn repository_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
|
|
@ -411,76 +378,6 @@ mod tests {
|
|||
)?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// differ in Git LFS enablement.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git#lfs=true"
|
||||
)?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url_with_lfs() -> Result<(), DisplaySafeUrlError> {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse("https://example.com/pypa/sample-namespace-packages.git@2.0.0")?
|
||||
.cache_key(&mut hasher);
|
||||
let repo_url_basic = hasher.finish();
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.cache_key(&mut hasher);
|
||||
let repo_url_with_fragments = hasher.finish();
|
||||
|
||||
assert_eq!(
|
||||
repo_url_basic, repo_url_with_fragments,
|
||||
"repository urls should have the exact cache keys as fragments are removed",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.with_lfs(None)
|
||||
.cache_key(&mut hasher);
|
||||
let git_url_with_fragments = hasher.finish();
|
||||
|
||||
assert_eq!(
|
||||
repo_url_with_fragments, git_url_with_fragments,
|
||||
"both structs should have the exact cache keys as fragments are still removed",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.with_lfs(Some(false))
|
||||
.cache_key(&mut hasher);
|
||||
let git_url_with_fragments_and_lfs_false = hasher.finish();
|
||||
|
||||
assert_eq!(
|
||||
git_url_with_fragments, git_url_with_fragments_and_lfs_false,
|
||||
"both structs should have the exact cache keys as lfs false should not influence them",
|
||||
);
|
||||
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
RepositoryUrl::parse(
|
||||
"https://user:foo@example.com/pypa/sample-namespace-packages.git@2.0.0#foo=bar",
|
||||
)?
|
||||
.with_lfs(Some(true))
|
||||
.cache_key(&mut hasher);
|
||||
let git_url_with_fragments_and_lfs_true = hasher.finish();
|
||||
|
||||
assert_ne!(
|
||||
git_url_with_fragments, git_url_with_fragments_and_lfs_true,
|
||||
"both structs should have different cache keys as one has Git LFS enabled",
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-cache"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "Generate stable hash digests across versions and platforms."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -34,6 +35,5 @@ rustc-hash = { workspace = true }
|
|||
same-file = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cache
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cache).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -3,7 +3,7 @@ use std::path::{Path, PathBuf};
|
|||
use uv_static::EnvVars;
|
||||
|
||||
use crate::Cache;
|
||||
use clap::{Parser, ValueHint};
|
||||
use clap::Parser;
|
||||
use tracing::{debug, warn};
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
|
|
@ -27,7 +27,7 @@ pub struct CacheArgs {
|
|||
/// `%LOCALAPPDATA%\uv\cache` on Windows.
|
||||
///
|
||||
/// To view the location of the cache directory, run `uv cache dir`.
|
||||
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR, value_hint = ValueHint::DirPath)]
|
||||
#[arg(global = true, long, env = EnvVars::UV_CACHE_DIR)]
|
||||
pub cache_dir: Option<PathBuf>,
|
||||
}
|
||||
|
||||
|
|
@ -82,7 +82,7 @@ impl TryFrom<CacheArgs> for Cache {
|
|||
type Error = io::Error;
|
||||
|
||||
fn try_from(value: CacheArgs) -> Result<Self, Self::Error> {
|
||||
Self::from_settings(value.no_cache, value.cache_dir)
|
||||
Cache::from_settings(value.no_cache, value.cache_dir)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -7,10 +7,11 @@ use std::str::FromStr;
|
|||
use std::sync::Arc;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use tracing::{debug, trace, warn};
|
||||
use tracing::debug;
|
||||
|
||||
pub use archive::ArchiveId;
|
||||
use uv_cache_info::Timestamp;
|
||||
use uv_fs::{LockedFile, LockedFileError, LockedFileMode, Simplified, cachedir, directories};
|
||||
use uv_fs::{LockedFile, cachedir, directories};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pypi_types::ResolutionMetadata;
|
||||
|
||||
|
|
@ -21,7 +22,6 @@ use crate::removal::Remover;
|
|||
pub use crate::removal::{Removal, rm_rf};
|
||||
pub use crate::wheel::WheelCache;
|
||||
use crate::wheel::WheelCacheKind;
|
||||
pub use archive::ArchiveId;
|
||||
|
||||
mod archive;
|
||||
mod by_timestamp;
|
||||
|
|
@ -35,17 +35,6 @@ mod wheel;
|
|||
/// Must be kept in-sync with the version in [`CacheBucket::to_str`].
|
||||
pub const ARCHIVE_VERSION: u8 = 0;
|
||||
|
||||
/// Error locking a cache entry or shard
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] io::Error),
|
||||
#[error("Could not make the path absolute")]
|
||||
Absolute(#[source] io::Error),
|
||||
#[error("Could not acquire lock")]
|
||||
Acquire(#[from] LockedFileError),
|
||||
}
|
||||
|
||||
/// A [`CacheEntry`] which may or may not exist yet.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CacheEntry(PathBuf);
|
||||
|
|
@ -91,14 +80,9 @@ impl CacheEntry {
|
|||
}
|
||||
|
||||
/// Acquire the [`CacheEntry`] as an exclusive lock.
|
||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
||||
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||
fs_err::create_dir_all(self.dir())?;
|
||||
Ok(LockedFile::acquire(
|
||||
self.path(),
|
||||
LockedFileMode::Exclusive,
|
||||
self.path().display(),
|
||||
)
|
||||
.await?)
|
||||
LockedFile::acquire(self.path(), self.path().display()).await
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -125,14 +109,9 @@ impl CacheShard {
|
|||
}
|
||||
|
||||
/// Acquire the cache entry as an exclusive lock.
|
||||
pub async fn lock(&self) -> Result<LockedFile, Error> {
|
||||
pub async fn lock(&self) -> Result<LockedFile, io::Error> {
|
||||
fs_err::create_dir_all(self.as_ref())?;
|
||||
Ok(LockedFile::acquire(
|
||||
self.join(".lock"),
|
||||
LockedFileMode::Exclusive,
|
||||
self.display(),
|
||||
)
|
||||
.await?)
|
||||
LockedFile::acquire(self.join(".lock"), self.display()).await
|
||||
}
|
||||
|
||||
/// Return the [`CacheShard`] as a [`PathBuf`].
|
||||
|
|
@ -156,8 +135,6 @@ impl Deref for CacheShard {
|
|||
}
|
||||
|
||||
/// The main cache abstraction.
|
||||
///
|
||||
/// While the cache is active, it holds a read (shared) lock that prevents cache cleaning
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Cache {
|
||||
/// The cache directory.
|
||||
|
|
@ -169,9 +146,6 @@ pub struct Cache {
|
|||
/// Included to ensure that the temporary directory exists for the length of the operation, but
|
||||
/// is dropped at the end as appropriate.
|
||||
temp_dir: Option<Arc<tempfile::TempDir>>,
|
||||
/// Ensure that `uv cache` operations don't remove items from the cache that are used by another
|
||||
/// uv process.
|
||||
lock_file: Option<Arc<LockedFile>>,
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
|
|
@ -181,7 +155,6 @@ impl Cache {
|
|||
root: root.into(),
|
||||
refresh: Refresh::None(Timestamp::now()),
|
||||
temp_dir: None,
|
||||
lock_file: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -192,7 +165,6 @@ impl Cache {
|
|||
root: temp_dir.path().to_path_buf(),
|
||||
refresh: Refresh::None(Timestamp::now()),
|
||||
temp_dir: Some(Arc::new(temp_dir)),
|
||||
lock_file: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -202,69 +174,6 @@ impl Cache {
|
|||
Self { refresh, ..self }
|
||||
}
|
||||
|
||||
/// Acquire a lock that allows removing entries from the cache.
|
||||
pub async fn with_exclusive_lock(self) -> Result<Self, LockedFileError> {
|
||||
let Self {
|
||||
root,
|
||||
refresh,
|
||||
temp_dir,
|
||||
lock_file,
|
||||
} = self;
|
||||
|
||||
// Release the existing lock, avoid deadlocks from a cloned cache.
|
||||
if let Some(lock_file) = lock_file {
|
||||
drop(
|
||||
Arc::try_unwrap(lock_file).expect(
|
||||
"cloning the cache before acquiring an exclusive lock causes a deadlock",
|
||||
),
|
||||
);
|
||||
}
|
||||
let lock_file = LockedFile::acquire(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Exclusive,
|
||||
root.simplified_display(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(Self {
|
||||
root,
|
||||
refresh,
|
||||
temp_dir,
|
||||
lock_file: Some(Arc::new(lock_file)),
|
||||
})
|
||||
}
|
||||
|
||||
/// Acquire a lock that allows removing entries from the cache, if available.
|
||||
///
|
||||
/// If the lock is not immediately available, returns [`Err`] with self.
|
||||
pub fn with_exclusive_lock_no_wait(self) -> Result<Self, Self> {
|
||||
let Self {
|
||||
root,
|
||||
refresh,
|
||||
temp_dir,
|
||||
lock_file,
|
||||
} = self;
|
||||
|
||||
match LockedFile::acquire_no_wait(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Exclusive,
|
||||
root.simplified_display(),
|
||||
) {
|
||||
Some(lock_file) => Ok(Self {
|
||||
root,
|
||||
refresh,
|
||||
temp_dir,
|
||||
lock_file: Some(Arc::new(lock_file)),
|
||||
}),
|
||||
None => Err(Self {
|
||||
root,
|
||||
refresh,
|
||||
temp_dir,
|
||||
lock_file,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the root of the cache.
|
||||
pub fn root(&self) -> &Path {
|
||||
&self.root
|
||||
|
|
@ -401,8 +310,10 @@ impl Cache {
|
|||
self.temp_dir.is_some()
|
||||
}
|
||||
|
||||
/// Populate the cache scaffold.
|
||||
fn create_base_files(root: &PathBuf) -> io::Result<()> {
|
||||
/// Initialize the [`Cache`].
|
||||
pub fn init(self) -> Result<Self, io::Error> {
|
||||
let root = &self.root;
|
||||
|
||||
// Create the cache directory, if it doesn't exist.
|
||||
fs_err::create_dir_all(root)?;
|
||||
|
||||
|
|
@ -448,101 +359,21 @@ impl Cache {
|
|||
.join(".git"),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Initialize the [`Cache`].
|
||||
pub async fn init(self) -> Result<Self, Error> {
|
||||
let root = &self.root;
|
||||
|
||||
Self::create_base_files(root)?;
|
||||
|
||||
// Block cache removal operations from interfering.
|
||||
let lock_file = match LockedFile::acquire(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Shared,
|
||||
root.simplified_display(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(lock_file) => Some(Arc::new(lock_file)),
|
||||
Err(err)
|
||||
if err
|
||||
.as_io_error()
|
||||
.is_some_and(|err| err.kind() == io::ErrorKind::Unsupported) =>
|
||||
{
|
||||
warn!(
|
||||
"Shared locking is not supported by the current platform or filesystem, \
|
||||
reduced parallel process safety with `uv cache clean` and `uv cache prune`."
|
||||
);
|
||||
None
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
||||
lock_file,
|
||||
root: std::path::absolute(root)?,
|
||||
..self
|
||||
})
|
||||
}
|
||||
|
||||
/// Initialize the [`Cache`], assuming that there are no other uv processes running.
|
||||
pub fn init_no_wait(self) -> Result<Option<Self>, Error> {
|
||||
let root = &self.root;
|
||||
|
||||
Self::create_base_files(root)?;
|
||||
|
||||
// Block cache removal operations from interfering.
|
||||
let Some(lock_file) = LockedFile::acquire_no_wait(
|
||||
root.join(".lock"),
|
||||
LockedFileMode::Shared,
|
||||
root.simplified_display(),
|
||||
) else {
|
||||
return Ok(None);
|
||||
};
|
||||
Ok(Some(Self {
|
||||
root: std::path::absolute(root).map_err(Error::Absolute)?,
|
||||
lock_file: Some(Arc::new(lock_file)),
|
||||
..self
|
||||
}))
|
||||
}
|
||||
|
||||
/// Clear the cache, removing all entries.
|
||||
pub fn clear(self, reporter: Box<dyn CleanReporter>) -> Result<Removal, io::Error> {
|
||||
// Remove everything but `.lock`, Windows does not allow removal of a locked file
|
||||
let mut removal = Remover::new(reporter).rm_rf(&self.root, true)?;
|
||||
let Self {
|
||||
root, lock_file, ..
|
||||
} = self;
|
||||
|
||||
// Remove the `.lock` file, unlocking it first
|
||||
if let Some(lock) = lock_file {
|
||||
drop(lock);
|
||||
fs_err::remove_file(root.join(".lock"))?;
|
||||
}
|
||||
removal.num_files += 1;
|
||||
|
||||
// Remove the root directory
|
||||
match fs_err::remove_dir(root) {
|
||||
Ok(()) => {
|
||||
removal.num_dirs += 1;
|
||||
}
|
||||
// On Windows, when `--force` is used, the `.lock` file can exist and be unremovable,
|
||||
// so we make this non-fatal
|
||||
Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => {
|
||||
trace!("Failed to remove root cache directory: not empty");
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
|
||||
Ok(removal)
|
||||
pub fn clear(&self, reporter: Box<dyn CleanReporter>) -> Result<Removal, io::Error> {
|
||||
Remover::new(reporter).rm_rf(&self.root)
|
||||
}
|
||||
|
||||
/// Remove a package from the cache.
|
||||
///
|
||||
/// Returns the number of entries removed from the cache.
|
||||
pub fn remove(&self, name: &PackageName) -> io::Result<Removal> {
|
||||
pub fn remove(&self, name: &PackageName) -> Result<Removal, io::Error> {
|
||||
// Collect the set of referenced archives.
|
||||
let references = self.find_archive_references()?;
|
||||
|
||||
|
|
@ -576,7 +407,6 @@ impl Cache {
|
|||
if entry.file_name() == "CACHEDIR.TAG"
|
||||
|| entry.file_name() == ".gitignore"
|
||||
|| entry.file_name() == ".git"
|
||||
|| entry.file_name() == ".lock"
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
|
@ -1155,10 +985,6 @@ pub enum CacheBucket {
|
|||
Builds,
|
||||
/// Reusable virtual environments used to invoke Python tools.
|
||||
Environments,
|
||||
/// Cached Python downloads
|
||||
Python,
|
||||
/// Downloaded tool binaries (e.g., Ruff).
|
||||
Binaries,
|
||||
}
|
||||
|
||||
impl CacheBucket {
|
||||
|
|
@ -1172,7 +998,7 @@ impl CacheBucket {
|
|||
Self::Interpreter => "interpreter-v4",
|
||||
// Note that when bumping this, you'll also need to bump it
|
||||
// in `crates/uv/tests/it/cache_clean.rs`.
|
||||
Self::Simple => "simple-v18",
|
||||
Self::Simple => "simple-v16",
|
||||
// Note that when bumping this, you'll also need to bump it
|
||||
// in `crates/uv/tests/it/cache_prune.rs`.
|
||||
Self::Wheels => "wheels-v5",
|
||||
|
|
@ -1181,8 +1007,6 @@ impl CacheBucket {
|
|||
Self::Archive => "archive-v0",
|
||||
Self::Builds => "builds-v0",
|
||||
Self::Environments => "environments-v2",
|
||||
Self::Python => "python-v0",
|
||||
Self::Binaries => "binaries-v0",
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1284,13 +1108,7 @@ impl CacheBucket {
|
|||
let root = cache.bucket(self);
|
||||
summary += rm_rf(root)?;
|
||||
}
|
||||
Self::Git
|
||||
| Self::Interpreter
|
||||
| Self::Archive
|
||||
| Self::Builds
|
||||
| Self::Environments
|
||||
| Self::Python
|
||||
| Self::Binaries => {
|
||||
Self::Git | Self::Interpreter | Self::Archive | Self::Builds | Self::Environments => {
|
||||
// Nothing to do.
|
||||
}
|
||||
}
|
||||
|
|
@ -1309,7 +1127,6 @@ impl CacheBucket {
|
|||
Self::Archive,
|
||||
Self::Builds,
|
||||
Self::Environments,
|
||||
Self::Binaries,
|
||||
]
|
||||
.iter()
|
||||
.copied()
|
||||
|
|
@ -1386,31 +1203,36 @@ impl Refresh {
|
|||
|
||||
/// Combine two [`Refresh`] policies, taking the "max" of the two policies.
|
||||
#[must_use]
|
||||
pub fn combine(self, other: Self) -> Self {
|
||||
pub fn combine(self, other: Refresh) -> Self {
|
||||
/// Return the maximum of two timestamps.
|
||||
fn max(a: Timestamp, b: Timestamp) -> Timestamp {
|
||||
if a > b { a } else { b }
|
||||
}
|
||||
|
||||
match (self, other) {
|
||||
// If the policy is `None`, return the existing refresh policy.
|
||||
// Take the `max` of the two timestamps.
|
||||
(Self::None(t1), Self::None(t2)) => Self::None(t1.max(t2)),
|
||||
(Self::None(t1), Self::All(t2)) => Self::All(t1.max(t2)),
|
||||
(Self::None(t1), Self::Packages(packages, paths, t2)) => {
|
||||
Self::Packages(packages, paths, t1.max(t2))
|
||||
(Self::None(t1), Refresh::None(t2)) => Refresh::None(max(t1, t2)),
|
||||
(Self::None(t1), Refresh::All(t2)) => Refresh::All(max(t1, t2)),
|
||||
(Self::None(t1), Refresh::Packages(packages, paths, t2)) => {
|
||||
Refresh::Packages(packages, paths, max(t1, t2))
|
||||
}
|
||||
|
||||
// If the policy is `All`, refresh all packages.
|
||||
(Self::All(t1), Self::None(t2) | Self::All(t2) | Self::Packages(.., t2)) => {
|
||||
Self::All(t1.max(t2))
|
||||
}
|
||||
(Self::All(t1), Refresh::None(t2)) => Refresh::All(max(t1, t2)),
|
||||
(Self::All(t1), Refresh::All(t2)) => Refresh::All(max(t1, t2)),
|
||||
(Self::All(t1), Refresh::Packages(.., t2)) => Refresh::All(max(t1, t2)),
|
||||
|
||||
// If the policy is `Packages`, take the "max" of the two policies.
|
||||
(Self::Packages(packages, paths, t1), Self::None(t2)) => {
|
||||
Self::Packages(packages, paths, t1.max(t2))
|
||||
(Self::Packages(packages, paths, t1), Refresh::None(t2)) => {
|
||||
Refresh::Packages(packages, paths, max(t1, t2))
|
||||
}
|
||||
(Self::Packages(.., t1), Self::All(t2)) => Self::All(t1.max(t2)),
|
||||
(Self::Packages(packages1, paths1, t1), Self::Packages(packages2, paths2, t2)) => {
|
||||
Self::Packages(
|
||||
(Self::Packages(.., t1), Refresh::All(t2)) => Refresh::All(max(t1, t2)),
|
||||
(Self::Packages(packages1, paths1, t1), Refresh::Packages(packages2, paths2, t2)) => {
|
||||
Refresh::Packages(
|
||||
packages1.into_iter().chain(packages2).collect(),
|
||||
paths1.into_iter().chain(paths2).collect(),
|
||||
t1.max(t2),
|
||||
max(t1, t2),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use crate::CleanReporter;
|
|||
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
||||
/// the number of files and directories removed, along with a total byte count.
|
||||
pub fn rm_rf(path: impl AsRef<Path>) -> io::Result<Removal> {
|
||||
Remover::default().rm_rf(path, false)
|
||||
Remover::default().rm_rf(path)
|
||||
}
|
||||
|
||||
/// A builder for a [`Remover`] that can remove files and directories.
|
||||
|
|
@ -29,13 +29,9 @@ impl Remover {
|
|||
|
||||
/// Remove a file or directory and all its contents, returning a [`Removal`] with
|
||||
/// the number of files and directories removed, along with a total byte count.
|
||||
pub(crate) fn rm_rf(
|
||||
&self,
|
||||
path: impl AsRef<Path>,
|
||||
skip_locked_file: bool,
|
||||
) -> io::Result<Removal> {
|
||||
pub(crate) fn rm_rf(&self, path: impl AsRef<Path>) -> io::Result<Removal> {
|
||||
let mut removal = Removal::default();
|
||||
removal.rm_rf(path.as_ref(), self.reporter.as_deref(), skip_locked_file)?;
|
||||
removal.rm_rf(path.as_ref(), self.reporter.as_deref())?;
|
||||
Ok(removal)
|
||||
}
|
||||
}
|
||||
|
|
@ -56,12 +52,7 @@ pub struct Removal {
|
|||
|
||||
impl Removal {
|
||||
/// Recursively remove a file or directory and all its contents.
|
||||
fn rm_rf(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
reporter: Option<&dyn CleanReporter>,
|
||||
skip_locked_file: bool,
|
||||
) -> io::Result<()> {
|
||||
fn rm_rf(&mut self, path: &Path, reporter: Option<&dyn CleanReporter>) -> io::Result<()> {
|
||||
let metadata = match fs_err::symlink_metadata(path) {
|
||||
Ok(metadata) => metadata,
|
||||
Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(()),
|
||||
|
|
@ -73,22 +64,9 @@ impl Removal {
|
|||
|
||||
// Remove the file.
|
||||
self.total_bytes += metadata.len();
|
||||
if metadata.is_symlink() {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::fs::FileTypeExt;
|
||||
|
||||
if metadata.file_type().is_symlink_dir() {
|
||||
remove_dir(path)?;
|
||||
} else {
|
||||
remove_file(path)?;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
remove_file(path)?;
|
||||
}
|
||||
if cfg!(windows) && metadata.is_symlink() {
|
||||
// Remove the junction.
|
||||
remove_dir(path)?;
|
||||
} else {
|
||||
remove_file(path)?;
|
||||
}
|
||||
|
|
@ -109,44 +87,18 @@ impl Removal {
|
|||
if set_readable(dir).unwrap_or(false) {
|
||||
// Retry the operation; if we _just_ `self.rm_rf(dir)` and continue,
|
||||
// `walkdir` may give us duplicate entries for the directory.
|
||||
return self.rm_rf(path, reporter, skip_locked_file);
|
||||
return self.rm_rf(path, reporter);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let entry = entry?;
|
||||
|
||||
// Remove the exclusive lock last.
|
||||
if skip_locked_file
|
||||
&& entry.file_name() == ".lock"
|
||||
&& entry
|
||||
.path()
|
||||
.strip_prefix(path)
|
||||
.is_ok_and(|suffix| suffix == Path::new(".lock"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if entry.file_type().is_symlink() && {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::fs::FileTypeExt;
|
||||
entry.file_type().is_symlink_dir()
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
false
|
||||
}
|
||||
} {
|
||||
if cfg!(windows) && entry.file_type().is_symlink() {
|
||||
// Remove the junction.
|
||||
self.num_files += 1;
|
||||
remove_dir(entry.path())?;
|
||||
} else if entry.file_type().is_dir() {
|
||||
// Remove the directory with the exclusive lock last.
|
||||
if skip_locked_file && entry.path() == path {
|
||||
continue;
|
||||
}
|
||||
|
||||
self.num_dirs += 1;
|
||||
|
||||
// The contents should have been removed by now, but sometimes a race condition is
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ pub enum WheelCache<'a> {
|
|||
Path(&'a DisplaySafeUrl),
|
||||
/// An editable dependency, which we key by URL.
|
||||
Editable(&'a DisplaySafeUrl),
|
||||
/// A Git dependency, which we key by URL (including LFS state), SHA.
|
||||
/// A Git dependency, which we key by URL and SHA.
|
||||
///
|
||||
/// Note that this variant only exists for source distributions; wheels can't be delivered
|
||||
/// through Git.
|
||||
|
|
@ -26,20 +26,20 @@ impl WheelCache<'_> {
|
|||
/// The root directory for a cache bucket.
|
||||
pub fn root(&self) -> PathBuf {
|
||||
match self {
|
||||
Self::Index(IndexUrl::Pypi(_)) => WheelCacheKind::Pypi.root(),
|
||||
Self::Index(url) => WheelCacheKind::Index
|
||||
WheelCache::Index(IndexUrl::Pypi(_)) => WheelCacheKind::Pypi.root(),
|
||||
WheelCache::Index(url) => WheelCacheKind::Index
|
||||
.root()
|
||||
.join(cache_digest(&CanonicalUrl::new(url.url()))),
|
||||
Self::Url(url) => WheelCacheKind::Url
|
||||
WheelCache::Url(url) => WheelCacheKind::Url
|
||||
.root()
|
||||
.join(cache_digest(&CanonicalUrl::new(url))),
|
||||
Self::Path(url) => WheelCacheKind::Path
|
||||
WheelCache::Path(url) => WheelCacheKind::Path
|
||||
.root()
|
||||
.join(cache_digest(&CanonicalUrl::new(url))),
|
||||
Self::Editable(url) => WheelCacheKind::Editable
|
||||
WheelCache::Editable(url) => WheelCacheKind::Editable
|
||||
.root()
|
||||
.join(cache_digest(&CanonicalUrl::new(url))),
|
||||
Self::Git(url, sha) => WheelCacheKind::Git
|
||||
WheelCache::Git(url, sha) => WheelCacheKind::Git
|
||||
.root()
|
||||
.join(cache_digest(&CanonicalUrl::new(url)))
|
||||
.join(sha),
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
[package]
|
||||
name = "uv-cli"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
description = "The command line interface for the uv binary."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
|
@ -16,14 +17,12 @@ doctest = false
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-auth = { workspace = true }
|
||||
uv-cache = { workspace = true, features = ["clap"] }
|
||||
uv-configuration = { workspace = true, features = ["clap"] }
|
||||
uv-distribution-types = { workspace = true }
|
||||
uv-install-wheel = { workspace = true, features = ["clap"], default-features = false }
|
||||
uv-normalize = { workspace = true }
|
||||
uv-pep508 = { workspace = true }
|
||||
uv-preview = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-python = { workspace = true, features = ["clap", "schemars"]}
|
||||
uv-redacted = { workspace = true }
|
||||
|
|
@ -43,7 +42,7 @@ serde = { workspace = true }
|
|||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true }
|
||||
insta = { version = "1.40.0", features = ["filters", "json"] }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
|
|
|||
|
|
@ -1,13 +0,0 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
|
||||
# uv-cli
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-cli).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
|
|
@ -266,6 +266,9 @@ enum Resolver {
|
|||
/// These represent a subset of the `virtualenv` interface that uv supports by default.
|
||||
#[derive(Args)]
|
||||
pub struct VenvCompatArgs {
|
||||
#[clap(long, hide = true)]
|
||||
clear: bool,
|
||||
|
||||
#[clap(long, hide = true)]
|
||||
no_seed: bool,
|
||||
|
||||
|
|
@ -286,6 +289,12 @@ impl CompatArgs for VenvCompatArgs {
|
|||
/// behavior. If an argument is passed that does _not_ match uv's behavior, this method will
|
||||
/// return an error.
|
||||
fn validate(&self) -> Result<()> {
|
||||
if self.clear {
|
||||
warn_user!(
|
||||
"virtualenv's `--clear` has no effect (uv always clears the virtual environment)"
|
||||
);
|
||||
}
|
||||
|
||||
if self.no_seed {
|
||||
warn_user!(
|
||||
"virtualenv's `--no-seed` has no effect (uv omits seed packages by default)"
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,9 +1,8 @@
|
|||
use anstream::eprintln;
|
||||
|
||||
use uv_cache::Refresh;
|
||||
use uv_configuration::{BuildIsolation, Reinstall, Upgrade};
|
||||
use uv_distribution_types::{ConfigSettings, PackageConfigSettings, Requirement};
|
||||
use uv_resolver::{ExcludeNewer, ExcludeNewerPackage, PrereleaseMode};
|
||||
use uv_configuration::ConfigSettings;
|
||||
use uv_resolver::PrereleaseMode;
|
||||
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
|
||||
use uv_warnings::owo_colors::OwoColorize;
|
||||
|
||||
|
|
@ -63,14 +62,12 @@ impl From<ResolverArgs> for PipOptions {
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
no_sources,
|
||||
exclude_newer_package,
|
||||
} = args;
|
||||
|
||||
Self {
|
||||
|
|
@ -87,18 +84,12 @@ impl From<ResolverArgs> for PipOptions {
|
|||
},
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
|
||||
link_mode,
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
..Self::from(index_args)
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -113,7 +104,6 @@ impl From<InstallerArgs> for PipOptions {
|
|||
index_strategy,
|
||||
keyring_provider,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
build_isolation,
|
||||
exclude_newer,
|
||||
|
|
@ -121,7 +111,6 @@ impl From<InstallerArgs> for PipOptions {
|
|||
compile_bytecode,
|
||||
no_compile_bytecode,
|
||||
no_sources,
|
||||
exclude_newer_package,
|
||||
} = args;
|
||||
|
||||
Self {
|
||||
|
|
@ -131,18 +120,12 @@ impl From<InstallerArgs> for PipOptions {
|
|||
keyring_provider,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
exclude_newer,
|
||||
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
..Self::from(index_args)
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -164,7 +147,6 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
|
|
@ -173,7 +155,6 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
compile_bytecode,
|
||||
no_compile_bytecode,
|
||||
no_sources,
|
||||
exclude_newer_package,
|
||||
} = args;
|
||||
|
||||
Self {
|
||||
|
|
@ -192,19 +173,13 @@ impl From<ResolverInstallerArgs> for PipOptions {
|
|||
fork_strategy,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
..Self::from(index_args)
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -222,7 +197,7 @@ impl From<FetchArgs> for PipOptions {
|
|||
index_strategy,
|
||||
keyring_provider,
|
||||
exclude_newer,
|
||||
..Self::from(index_args)
|
||||
..PipOptions::from(index_args)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -263,7 +238,7 @@ impl From<IndexArgs> for PipOptions {
|
|||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
}),
|
||||
..Self::default()
|
||||
..PipOptions::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -285,14 +260,12 @@ pub fn resolver_options(
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
no_sources,
|
||||
exclude_newer_package,
|
||||
} = resolver_args;
|
||||
|
||||
let BuildOptionsArgs {
|
||||
|
|
@ -334,10 +307,8 @@ pub fn resolver_options(
|
|||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
}),
|
||||
upgrade: Upgrade::from_args(
|
||||
flag(upgrade, no_upgrade, "no-upgrade"),
|
||||
upgrade_package.into_iter().map(Requirement::from).collect(),
|
||||
),
|
||||
upgrade: flag(upgrade, no_upgrade, "no-upgrade"),
|
||||
upgrade_package: Some(upgrade_package),
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
resolution,
|
||||
|
|
@ -350,23 +321,10 @@ pub fn resolver_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
build_isolation: BuildIsolation::from_args(
|
||||
flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package,
|
||||
),
|
||||
extra_build_dependencies: None,
|
||||
extra_build_variables: None,
|
||||
exclude_newer: ExcludeNewer::from_args(
|
||||
exclude_newer,
|
||||
exclude_newer_package.unwrap_or_default(),
|
||||
),
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: Some(no_build_isolation_package),
|
||||
exclude_newer,
|
||||
link_mode,
|
||||
torch_backend: None,
|
||||
no_build: flag(no_build, build, "build"),
|
||||
no_build_package: Some(no_build_package),
|
||||
no_binary: flag(no_binary, binary, "binary"),
|
||||
|
|
@ -395,12 +353,10 @@ pub fn resolver_installer_options(
|
|||
pre,
|
||||
fork_strategy,
|
||||
config_setting,
|
||||
config_settings_package,
|
||||
no_build_isolation,
|
||||
no_build_isolation_package,
|
||||
build_isolation,
|
||||
exclude_newer,
|
||||
exclude_newer_package,
|
||||
link_mode,
|
||||
compile_bytecode,
|
||||
no_compile_bytecode,
|
||||
|
|
@ -448,14 +404,18 @@ pub fn resolver_installer_options(
|
|||
.filter_map(Maybe::into_option)
|
||||
.collect()
|
||||
}),
|
||||
upgrade: Upgrade::from_args(
|
||||
flag(upgrade, no_upgrade, "upgrade"),
|
||||
upgrade_package.into_iter().map(Requirement::from).collect(),
|
||||
),
|
||||
reinstall: Reinstall::from_args(
|
||||
flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package,
|
||||
),
|
||||
upgrade: flag(upgrade, no_upgrade, "upgrade"),
|
||||
upgrade_package: if upgrade_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(upgrade_package)
|
||||
},
|
||||
reinstall: flag(reinstall, no_reinstall, "reinstall"),
|
||||
reinstall_package: if reinstall_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(reinstall_package)
|
||||
},
|
||||
index_strategy,
|
||||
keyring_provider,
|
||||
resolution,
|
||||
|
|
@ -468,19 +428,13 @@ pub fn resolver_installer_options(
|
|||
dependency_metadata: None,
|
||||
config_settings: config_setting
|
||||
.map(|config_settings| config_settings.into_iter().collect::<ConfigSettings>()),
|
||||
config_settings_package: config_settings_package.map(|config_settings| {
|
||||
config_settings
|
||||
.into_iter()
|
||||
.collect::<PackageConfigSettings>()
|
||||
}),
|
||||
build_isolation: BuildIsolation::from_args(
|
||||
flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package,
|
||||
),
|
||||
extra_build_dependencies: None,
|
||||
extra_build_variables: None,
|
||||
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
|
||||
no_build_isolation_package: if no_build_isolation_package.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(no_build_isolation_package)
|
||||
},
|
||||
exclude_newer,
|
||||
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
|
||||
link_mode,
|
||||
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
|
||||
no_build: flag(no_build, build, "build"),
|
||||
|
|
@ -496,6 +450,5 @@ pub fn resolver_installer_options(
|
|||
Some(no_binary_package)
|
||||
},
|
||||
no_sources: if no_sources { Some(true) } else { None },
|
||||
torch_backend: None,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,7 @@
|
|||
use std::fmt;
|
||||
|
||||
use serde::Serialize;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep508::uv_pep440::Version;
|
||||
use uv_pep508::{PackageName, uv_pep440::Version};
|
||||
|
||||
/// Information about the git repository where uv was built from.
|
||||
#[derive(Serialize)]
|
||||
|
|
|
|||
|
|
@ -1,13 +1,7 @@
|
|||
[package]
|
||||
name = "uv-client"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
|
@ -28,7 +22,6 @@ uv-normalize = { workspace = true }
|
|||
uv-pep440 = { workspace = true }
|
||||
uv-pep508 = { workspace = true }
|
||||
uv-platform-tags = { workspace = true }
|
||||
uv-preview = { workspace = true }
|
||||
uv-pypi-types = { workspace = true }
|
||||
uv-small-str = { workspace = true }
|
||||
uv-redacted = { workspace = true }
|
||||
|
|
@ -38,14 +31,12 @@ uv-version = { workspace = true }
|
|||
uv-warnings = { workspace = true }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
astral-tl = { workspace = true }
|
||||
async-trait = { workspace = true }
|
||||
async_http_range_reader = { workspace = true }
|
||||
async_zip = { workspace = true }
|
||||
bytecheck = { workspace = true }
|
||||
fs-err = { workspace = true, features = ["tokio"] }
|
||||
futures = { workspace = true }
|
||||
h2 = { workspace = true }
|
||||
html-escape = { workspace = true }
|
||||
http = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
|
|
@ -61,6 +52,7 @@ serde = { workspace = true }
|
|||
serde_json = { workspace = true }
|
||||
sys-info = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tl = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tokio-util = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
|
@ -68,13 +60,9 @@ url = { workspace = true }
|
|||
|
||||
[dev-dependencies]
|
||||
anyhow = { workspace = true }
|
||||
http-body-util = { workspace = true }
|
||||
hyper = { workspace = true }
|
||||
hyper-util = { workspace = true }
|
||||
insta = { workspace = true }
|
||||
rcgen = { workspace = true }
|
||||
rustls = { workspace = true }
|
||||
http-body-util = { version = "0.1.2" }
|
||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||
hyper-util = { version = "0.1.8", features = ["tokio"] }
|
||||
insta = { version = "1.40.0", features = ["filters", "json", "redactions"] }
|
||||
tokio = { workspace = true }
|
||||
tokio-rustls = { workspace = true }
|
||||
wiremock = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -1,13 +1,5 @@
|
|||
<!-- This file is generated. DO NOT EDIT -->
|
||||
# `pypi-client`
|
||||
|
||||
# uv-client
|
||||
A general-use client for interacting with PyPI.
|
||||
|
||||
This crate is an internal component of [uv](https://crates.io/crates/uv). The Rust API exposed here
|
||||
is unstable and will have frequent breaking changes.
|
||||
|
||||
This version (0.0.8) is a component of [uv 0.9.18](https://crates.io/crates/uv/0.9.18). The source
|
||||
can be found [here](https://github.com/astral-sh/uv/blob/0.9.18/crates/uv-client).
|
||||
|
||||
See uv's
|
||||
[crate versioning policy](https://docs.astral.sh/uv/reference/policies/versioning/#crate-versioning)
|
||||
for details on versioning.
|
||||
Loosely modeled after Orogene's `oro-client`.
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
use std::error::Error;
|
||||
use std::fmt::Debug;
|
||||
use std::fmt::Write;
|
||||
use std::num::ParseIntError;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::{env, io, iter};
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::anyhow;
|
||||
use http::{
|
||||
HeaderMap, HeaderName, HeaderValue, Method, StatusCode,
|
||||
|
|
@ -21,36 +21,32 @@ use reqwest_middleware::{ClientWithMiddleware, Middleware};
|
|||
use reqwest_retry::policies::ExponentialBackoff;
|
||||
use reqwest_retry::{
|
||||
DefaultRetryableStrategy, RetryTransientMiddleware, Retryable, RetryableStrategy,
|
||||
default_on_request_error,
|
||||
};
|
||||
use thiserror::Error;
|
||||
use tracing::{debug, trace};
|
||||
use url::ParseError;
|
||||
use url::Url;
|
||||
|
||||
use uv_auth::{AuthMiddleware, Credentials, CredentialsCache, Indexes, PyxTokenStore};
|
||||
use uv_auth::Credentials;
|
||||
use uv_auth::{AuthMiddleware, Indexes};
|
||||
use uv_configuration::{KeyringProviderType, TrustedHost};
|
||||
use uv_fs::Simplified;
|
||||
use uv_pep508::MarkerEnvironment;
|
||||
use uv_platform_tags::Platform;
|
||||
use uv_preview::Preview;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
use uv_redacted::DisplaySafeUrlError;
|
||||
use uv_static::EnvVars;
|
||||
use uv_version::version;
|
||||
use uv_warnings::warn_user_once;
|
||||
|
||||
use crate::Connectivity;
|
||||
use crate::linehaul::LineHaul;
|
||||
use crate::middleware::OfflineMiddleware;
|
||||
use crate::tls::read_identity;
|
||||
use crate::{Connectivity, WrappedReqwestError};
|
||||
|
||||
pub const DEFAULT_RETRIES: u32 = 3;
|
||||
|
||||
/// Maximum number of redirects to follow before giving up.
|
||||
///
|
||||
/// This is the default used by [`reqwest`].
|
||||
pub const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||
const DEFAULT_MAX_REDIRECTS: u32 = 10;
|
||||
|
||||
/// Selectively skip parts or the entire auth middleware.
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
|
|
@ -69,19 +65,15 @@ pub enum AuthIntegration {
|
|||
#[derive(Debug, Clone)]
|
||||
pub struct BaseClientBuilder<'a> {
|
||||
keyring: KeyringProviderType,
|
||||
preview: Preview,
|
||||
allow_insecure_host: Vec<TrustedHost>,
|
||||
native_tls: bool,
|
||||
built_in_root_certs: bool,
|
||||
retries: u32,
|
||||
pub connectivity: Connectivity,
|
||||
markers: Option<&'a MarkerEnvironment>,
|
||||
platform: Option<&'a Platform>,
|
||||
auth_integration: AuthIntegration,
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
credentials_cache: Arc<CredentialsCache>,
|
||||
indexes: Indexes,
|
||||
timeout: Duration,
|
||||
default_timeout: Duration,
|
||||
extra_middleware: Option<ExtraMiddleware>,
|
||||
proxies: Vec<Proxy>,
|
||||
redirect_policy: RedirectPolicy,
|
||||
|
|
@ -89,10 +81,6 @@ pub struct BaseClientBuilder<'a> {
|
|||
///
|
||||
/// A policy allowing propagation is insecure and should only be available for test code.
|
||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy,
|
||||
/// Optional custom reqwest client to use instead of creating a new one.
|
||||
custom_client: Option<Client>,
|
||||
/// uv subcommand in which this client is being used
|
||||
subcommand: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
/// The policy for handling HTTP redirects.
|
||||
|
|
@ -104,16 +92,13 @@ pub enum RedirectPolicy {
|
|||
BypassMiddleware,
|
||||
/// Handle redirects manually, re-triggering our custom middleware for each request.
|
||||
RetriggerMiddleware,
|
||||
/// No redirect for non-cloneable (e.g., streaming) requests with custom redirect logic.
|
||||
NoRedirect,
|
||||
}
|
||||
|
||||
impl RedirectPolicy {
|
||||
pub fn reqwest_policy(self) -> reqwest::redirect::Policy {
|
||||
match self {
|
||||
Self::BypassMiddleware => reqwest::redirect::Policy::default(),
|
||||
Self::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
||||
Self::NoRedirect => reqwest::redirect::Policy::none(),
|
||||
RedirectPolicy::BypassMiddleware => reqwest::redirect::Policy::default(),
|
||||
RedirectPolicy::RetriggerMiddleware => reqwest::redirect::Policy::none(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -132,61 +117,32 @@ impl Debug for ExtraMiddleware {
|
|||
|
||||
impl Default for BaseClientBuilder<'_> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl BaseClientBuilder<'_> {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
keyring: KeyringProviderType::default(),
|
||||
preview: Preview::default(),
|
||||
allow_insecure_host: vec![],
|
||||
native_tls: false,
|
||||
built_in_root_certs: false,
|
||||
connectivity: Connectivity::Online,
|
||||
retries: DEFAULT_RETRIES,
|
||||
markers: None,
|
||||
platform: None,
|
||||
auth_integration: AuthIntegration::default(),
|
||||
credentials_cache: Arc::new(CredentialsCache::default()),
|
||||
indexes: Indexes::new(),
|
||||
timeout: Duration::from_secs(30),
|
||||
default_timeout: Duration::from_secs(30),
|
||||
extra_middleware: None,
|
||||
proxies: vec![],
|
||||
redirect_policy: RedirectPolicy::default(),
|
||||
cross_origin_credential_policy: CrossOriginCredentialsPolicy::Secure,
|
||||
custom_client: None,
|
||||
subcommand: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> BaseClientBuilder<'a> {
|
||||
pub fn new(
|
||||
connectivity: Connectivity,
|
||||
native_tls: bool,
|
||||
allow_insecure_host: Vec<TrustedHost>,
|
||||
preview: Preview,
|
||||
timeout: Duration,
|
||||
retries: u32,
|
||||
) -> Self {
|
||||
Self {
|
||||
preview,
|
||||
allow_insecure_host,
|
||||
native_tls,
|
||||
retries,
|
||||
connectivity,
|
||||
timeout,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a custom reqwest client instead of creating a new one.
|
||||
///
|
||||
/// This allows you to provide your own reqwest client with custom configuration.
|
||||
/// Note that some configuration options from this builder will still be applied
|
||||
/// to the client via middleware.
|
||||
#[must_use]
|
||||
pub fn custom_client(mut self, client: Client) -> Self {
|
||||
self.custom_client = Some(client);
|
||||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn keyring(mut self, keyring_type: KeyringProviderType) -> Self {
|
||||
self.keyring = keyring_type;
|
||||
|
|
@ -211,15 +167,28 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||
self.native_tls = native_tls;
|
||||
self
|
||||
/// Read the retry count from [`EnvVars::UV_HTTP_RETRIES`] if set, otherwise, make no change.
|
||||
///
|
||||
/// Errors when [`EnvVars::UV_HTTP_RETRIES`] is not a valid u32.
|
||||
pub fn retries_from_env(self) -> anyhow::Result<Self> {
|
||||
// TODO(zanieb): We should probably parse this in another layer, but there's not a natural
|
||||
// fit for it right now
|
||||
if let Some(value) = env::var_os(EnvVars::UV_HTTP_RETRIES) {
|
||||
Ok(self.retries(
|
||||
value
|
||||
.to_string_lossy()
|
||||
.as_ref()
|
||||
.parse::<u32>()
|
||||
.context("Failed to parse `UV_HTTP_RETRIES`")?,
|
||||
))
|
||||
} else {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn built_in_root_certs(mut self, built_in_root_certs: bool) -> Self {
|
||||
self.built_in_root_certs = built_in_root_certs;
|
||||
pub fn native_tls(mut self, native_tls: bool) -> Self {
|
||||
self.native_tls = native_tls;
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -248,8 +217,8 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
||||
self.timeout = timeout;
|
||||
pub fn default_timeout(mut self, default_timeout: Duration) -> Self {
|
||||
self.default_timeout = default_timeout;
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -283,36 +252,12 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
self
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn subcommand(mut self, subcommand: Vec<String>) -> Self {
|
||||
self.subcommand = Some(subcommand);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
||||
&self.credentials_cache
|
||||
}
|
||||
|
||||
/// See [`CredentialsCache::store_credentials_from_url`].
|
||||
pub fn store_credentials_from_url(&self, url: &DisplaySafeUrl) -> bool {
|
||||
self.credentials_cache.store_credentials_from_url(url)
|
||||
}
|
||||
|
||||
/// See [`CredentialsCache::store_credentials`].
|
||||
pub fn store_credentials(&self, url: &DisplaySafeUrl, credentials: Credentials) {
|
||||
self.credentials_cache.store_credentials(url, credentials);
|
||||
}
|
||||
|
||||
pub fn is_native_tls(&self) -> bool {
|
||||
self.native_tls
|
||||
}
|
||||
|
||||
pub fn is_offline(&self) -> bool {
|
||||
matches!(self.connectivity, Connectivity::Offline)
|
||||
}
|
||||
|
||||
/// Create a [`RetryPolicy`] for the client.
|
||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
||||
fn retry_policy(&self) -> ExponentialBackoff {
|
||||
let mut builder = ExponentialBackoff::builder();
|
||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||
|
|
@ -321,14 +266,63 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
|
||||
pub fn build(&self) -> BaseClient {
|
||||
let timeout = self.timeout;
|
||||
// Create user agent.
|
||||
let mut user_agent_string = format!("uv/{}", version());
|
||||
|
||||
// Add linehaul metadata.
|
||||
if let Some(markers) = self.markers {
|
||||
let linehaul = LineHaul::new(markers, self.platform);
|
||||
if let Ok(output) = serde_json::to_string(&linehaul) {
|
||||
let _ = write!(user_agent_string, " {output}");
|
||||
}
|
||||
}
|
||||
|
||||
// Check for the presence of an `SSL_CERT_FILE`.
|
||||
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
||||
let path_exists = Path::new(&path).exists();
|
||||
if !path_exists {
|
||||
warn_user_once!(
|
||||
"Ignoring invalid `SSL_CERT_FILE`. File does not exist: {}.",
|
||||
path.simplified_display().cyan()
|
||||
);
|
||||
}
|
||||
path_exists
|
||||
});
|
||||
|
||||
// Timeout options, matching https://doc.rust-lang.org/nightly/cargo/reference/config.html#httptimeout
|
||||
// `UV_REQUEST_TIMEOUT` is provided for backwards compatibility with v0.1.6
|
||||
let timeout = env::var(EnvVars::UV_HTTP_TIMEOUT)
|
||||
.or_else(|_| env::var(EnvVars::UV_REQUEST_TIMEOUT))
|
||||
.or_else(|_| env::var(EnvVars::HTTP_TIMEOUT))
|
||||
.and_then(|value| {
|
||||
value.parse::<u64>()
|
||||
.map(Duration::from_secs)
|
||||
.or_else(|_| {
|
||||
// On parse error, warn and use the default timeout
|
||||
warn_user_once!("Ignoring invalid value from environment for `UV_HTTP_TIMEOUT`. Expected an integer number of seconds, got \"{value}\".");
|
||||
Ok(self.default_timeout)
|
||||
})
|
||||
})
|
||||
.unwrap_or(self.default_timeout);
|
||||
debug!("Using request timeout of {}s", timeout.as_secs());
|
||||
|
||||
// Use the custom client if provided, otherwise create a new one
|
||||
let (raw_client, raw_dangerous_client) = match &self.custom_client {
|
||||
Some(client) => (client.clone(), client.clone()),
|
||||
None => self.create_secure_and_insecure_clients(timeout),
|
||||
};
|
||||
// Create a secure client that validates certificates.
|
||||
let raw_client = self.create_client(
|
||||
&user_agent_string,
|
||||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
Security::Secure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
// Create an insecure client that accepts invalid certificates.
|
||||
let raw_dangerous_client = self.create_client(
|
||||
&user_agent_string,
|
||||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
Security::Insecure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
// Wrap in any relevant middleware and handle connectivity.
|
||||
let client = RedirectClientWithMiddleware {
|
||||
|
|
@ -351,7 +345,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
dangerous_client,
|
||||
raw_dangerous_client,
|
||||
timeout,
|
||||
credentials_cache: self.credentials_cache.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -378,112 +371,14 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
raw_client: existing.raw_client.clone(),
|
||||
raw_dangerous_client: existing.raw_dangerous_client.clone(),
|
||||
timeout: existing.timeout,
|
||||
credentials_cache: existing.credentials_cache.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn create_secure_and_insecure_clients(&self, timeout: Duration) -> (Client, Client) {
|
||||
// Create user agent.
|
||||
let mut user_agent_string = format!("uv/{}", version());
|
||||
|
||||
// Add linehaul metadata.
|
||||
let linehaul = LineHaul::new(self.markers, self.platform, self.subcommand.clone());
|
||||
if let Ok(output) = serde_json::to_string(&linehaul) {
|
||||
let _ = write!(user_agent_string, " {output}");
|
||||
}
|
||||
|
||||
// Checks for the presence of `SSL_CERT_FILE`.
|
||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
||||
let ssl_cert_file_exists = env::var_os(EnvVars::SSL_CERT_FILE).is_some_and(|path| {
|
||||
let path_exists = Path::new(&path).exists();
|
||||
if !path_exists {
|
||||
warn_user_once!(
|
||||
"Ignoring invalid `SSL_CERT_FILE`. File does not exist: {}.",
|
||||
path.simplified_display().cyan()
|
||||
);
|
||||
}
|
||||
path_exists
|
||||
});
|
||||
|
||||
// Checks for the presence of `SSL_CERT_DIR`.
|
||||
// Certificate loading support is delegated to `rustls-native-certs`.
|
||||
// See https://github.com/rustls/rustls-native-certs/blob/813790a297ad4399efe70a8e5264ca1b420acbec/src/lib.rs#L118-L125
|
||||
let ssl_cert_dir_exists = env::var_os(EnvVars::SSL_CERT_DIR)
|
||||
.filter(|v| !v.is_empty())
|
||||
.is_some_and(|dirs| {
|
||||
// Parse `SSL_CERT_DIR`, with support for multiple entries using
|
||||
// a platform-specific delimiter (`:` on Unix, `;` on Windows)
|
||||
let (existing, missing): (Vec<_>, Vec<_>) =
|
||||
env::split_paths(&dirs).partition(|p| p.exists());
|
||||
|
||||
if existing.is_empty() {
|
||||
let end_note = if missing.len() == 1 {
|
||||
"The directory does not exist."
|
||||
} else {
|
||||
"The entries do not exist."
|
||||
};
|
||||
warn_user_once!(
|
||||
"Ignoring invalid `SSL_CERT_DIR`. {end_note}: {}.",
|
||||
missing
|
||||
.iter()
|
||||
.map(Simplified::simplified_display)
|
||||
.join(", ")
|
||||
.cyan()
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Warn on any missing entries
|
||||
if !missing.is_empty() {
|
||||
let end_note = if missing.len() == 1 {
|
||||
"The following directory does not exist:"
|
||||
} else {
|
||||
"The following entries do not exist:"
|
||||
};
|
||||
warn_user_once!(
|
||||
"Invalid entries in `SSL_CERT_DIR`. {end_note}: {}.",
|
||||
missing
|
||||
.iter()
|
||||
.map(Simplified::simplified_display)
|
||||
.join(", ")
|
||||
.cyan()
|
||||
);
|
||||
}
|
||||
|
||||
// Proceed while ignoring missing entries
|
||||
true
|
||||
});
|
||||
|
||||
// Create a secure client that validates certificates.
|
||||
let raw_client = self.create_client(
|
||||
&user_agent_string,
|
||||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
ssl_cert_dir_exists,
|
||||
Security::Secure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
// Create an insecure client that accepts invalid certificates.
|
||||
let raw_dangerous_client = self.create_client(
|
||||
&user_agent_string,
|
||||
timeout,
|
||||
ssl_cert_file_exists,
|
||||
ssl_cert_dir_exists,
|
||||
Security::Insecure,
|
||||
self.redirect_policy,
|
||||
);
|
||||
|
||||
(raw_client, raw_dangerous_client)
|
||||
}
|
||||
|
||||
fn create_client(
|
||||
&self,
|
||||
user_agent: &str,
|
||||
timeout: Duration,
|
||||
ssl_cert_file_exists: bool,
|
||||
ssl_cert_dir_exists: bool,
|
||||
security: Security,
|
||||
redirect_policy: RedirectPolicy,
|
||||
) -> Client {
|
||||
|
|
@ -493,7 +388,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
.user_agent(user_agent)
|
||||
.pool_max_idle_per_host(20)
|
||||
.read_timeout(timeout)
|
||||
.tls_built_in_root_certs(self.built_in_root_certs)
|
||||
.tls_built_in_root_certs(false)
|
||||
.redirect(redirect_policy.reqwest_policy());
|
||||
|
||||
// If necessary, accept invalid certificates.
|
||||
|
|
@ -502,7 +397,7 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
Security::Insecure => client_builder.danger_accept_invalid_certs(true),
|
||||
};
|
||||
|
||||
let client_builder = if self.native_tls || ssl_cert_file_exists || ssl_cert_dir_exists {
|
||||
let client_builder = if self.native_tls || ssl_cert_file_exists {
|
||||
client_builder.tls_built_in_native_certs(true)
|
||||
} else {
|
||||
client_builder.tls_built_in_webpki_certs(true)
|
||||
|
|
@ -536,30 +431,6 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
fn apply_middleware(&self, client: Client) -> ClientWithMiddleware {
|
||||
match self.connectivity {
|
||||
Connectivity::Online => {
|
||||
// Create a base client to using in the authentication middleware.
|
||||
let base_client = {
|
||||
let mut client = reqwest_middleware::ClientBuilder::new(client.clone());
|
||||
|
||||
// Avoid uncloneable errors with a streaming body during publish.
|
||||
if self.retries > 0 {
|
||||
// Initialize the retry strategy.
|
||||
let retry_strategy = RetryTransientMiddleware::new_with_policy_and_strategy(
|
||||
self.retry_policy(),
|
||||
UvRetryableStrategy,
|
||||
);
|
||||
client = client.with(retry_strategy);
|
||||
}
|
||||
|
||||
// When supplied, add the extra middleware.
|
||||
if let Some(extra_middleware) = &self.extra_middleware {
|
||||
for middleware in &extra_middleware.0 {
|
||||
client = client.with_arc(middleware.clone());
|
||||
}
|
||||
}
|
||||
|
||||
client.build()
|
||||
};
|
||||
|
||||
let mut client = reqwest_middleware::ClientBuilder::new(client);
|
||||
|
||||
// Avoid uncloneable errors with a streaming body during publish.
|
||||
|
|
@ -572,38 +443,20 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
client = client.with(retry_strategy);
|
||||
}
|
||||
|
||||
// When supplied, add the extra middleware.
|
||||
if let Some(extra_middleware) = &self.extra_middleware {
|
||||
for middleware in &extra_middleware.0 {
|
||||
client = client.with_arc(middleware.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize the authentication middleware to set headers.
|
||||
match self.auth_integration {
|
||||
AuthIntegration::Default => {
|
||||
let mut auth_middleware = AuthMiddleware::new()
|
||||
.with_cache_arc(self.credentials_cache.clone())
|
||||
.with_base_client(base_client)
|
||||
let auth_middleware = AuthMiddleware::new()
|
||||
.with_indexes(self.indexes.clone())
|
||||
.with_keyring(self.keyring.to_provider())
|
||||
.with_preview(self.preview);
|
||||
if let Ok(token_store) = PyxTokenStore::from_settings() {
|
||||
auth_middleware = auth_middleware.with_pyx_token_store(token_store);
|
||||
}
|
||||
.with_keyring(self.keyring.to_provider());
|
||||
client = client.with(auth_middleware);
|
||||
}
|
||||
AuthIntegration::OnlyAuthenticated => {
|
||||
let mut auth_middleware = AuthMiddleware::new()
|
||||
.with_cache_arc(self.credentials_cache.clone())
|
||||
.with_base_client(base_client)
|
||||
let auth_middleware = AuthMiddleware::new()
|
||||
.with_indexes(self.indexes.clone())
|
||||
.with_keyring(self.keyring.to_provider())
|
||||
.with_preview(self.preview)
|
||||
.with_only_authenticated(true);
|
||||
if let Ok(token_store) = PyxTokenStore::from_settings() {
|
||||
auth_middleware = auth_middleware.with_pyx_token_store(token_store);
|
||||
}
|
||||
|
||||
client = client.with(auth_middleware);
|
||||
}
|
||||
AuthIntegration::NoAuthMiddleware => {
|
||||
|
|
@ -611,6 +464,13 @@ impl<'a> BaseClientBuilder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
// When supplied add the extra middleware
|
||||
if let Some(extra_middleware) = &self.extra_middleware {
|
||||
for middleware in &extra_middleware.0 {
|
||||
client = client.with_arc(middleware.clone());
|
||||
}
|
||||
}
|
||||
|
||||
client.build()
|
||||
}
|
||||
Connectivity::Offline => reqwest_middleware::ClientBuilder::new(client)
|
||||
|
|
@ -639,8 +499,6 @@ pub struct BaseClient {
|
|||
allow_insecure_host: Vec<TrustedHost>,
|
||||
/// The number of retries to attempt on transient errors.
|
||||
retries: u32,
|
||||
/// Global authentication cache for a uv invocation to share credentials across uv clients.
|
||||
credentials_cache: Arc<CredentialsCache>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
|
@ -663,7 +521,7 @@ impl BaseClient {
|
|||
|
||||
/// Executes a request, applying redirect policy.
|
||||
pub async fn execute(&self, req: Request) -> reqwest_middleware::Result<Response> {
|
||||
let client = self.for_host(&DisplaySafeUrl::from_url(req.url().clone()));
|
||||
let client = self.for_host(&DisplaySafeUrl::from(req.url().clone()));
|
||||
client.execute(req).await
|
||||
}
|
||||
|
||||
|
|
@ -686,15 +544,7 @@ impl BaseClient {
|
|||
|
||||
/// The [`RetryPolicy`] for the client.
|
||||
pub fn retry_policy(&self) -> ExponentialBackoff {
|
||||
let mut builder = ExponentialBackoff::builder();
|
||||
if env::var_os(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY).is_some() {
|
||||
builder = builder.retry_bounds(Duration::from_millis(0), Duration::from_millis(0));
|
||||
}
|
||||
builder.build_with_max_retries(self.retries)
|
||||
}
|
||||
|
||||
pub fn credentials_cache(&self) -> &CredentialsCache {
|
||||
&self.credentials_cache
|
||||
ExponentialBackoff::builder().build_with_max_retries(self.retries)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -713,17 +563,17 @@ pub struct RedirectClientWithMiddleware {
|
|||
|
||||
impl RedirectClientWithMiddleware {
|
||||
/// Convenience method to make a `GET` request to a URL.
|
||||
pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder<'_> {
|
||||
pub fn get<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.get(url), self)
|
||||
}
|
||||
|
||||
/// Convenience method to make a `POST` request to a URL.
|
||||
pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder<'_> {
|
||||
pub fn post<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.post(url), self)
|
||||
}
|
||||
|
||||
/// Convenience method to make a `HEAD` request to a URL.
|
||||
pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder<'_> {
|
||||
pub fn head<U: IntoUrl>(&self, url: U) -> RequestBuilder {
|
||||
RequestBuilder::new(self.client.head(url), self)
|
||||
}
|
||||
|
||||
|
|
@ -732,7 +582,6 @@ impl RedirectClientWithMiddleware {
|
|||
match self.redirect_policy {
|
||||
RedirectPolicy::BypassMiddleware => self.client.execute(req).await,
|
||||
RedirectPolicy::RetriggerMiddleware => self.execute_with_redirect_handling(req).await,
|
||||
RedirectPolicy::NoRedirect => self.client.execute(req).await,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -783,7 +632,7 @@ impl RedirectClientWithMiddleware {
|
|||
}
|
||||
|
||||
impl From<RedirectClientWithMiddleware> for ClientWithMiddleware {
|
||||
fn from(item: RedirectClientWithMiddleware) -> Self {
|
||||
fn from(item: RedirectClientWithMiddleware) -> ClientWithMiddleware {
|
||||
item.client
|
||||
}
|
||||
}
|
||||
|
|
@ -798,7 +647,7 @@ fn request_into_redirect(
|
|||
res: &Response,
|
||||
cross_origin_credentials_policy: CrossOriginCredentialsPolicy,
|
||||
) -> reqwest_middleware::Result<Option<Request>> {
|
||||
let original_req_url = DisplaySafeUrl::from_url(req.url().clone());
|
||||
let original_req_url = DisplaySafeUrl::from(req.url().clone());
|
||||
let status = res.status();
|
||||
let should_redirect = match status {
|
||||
StatusCode::MOVED_PERMANENTLY
|
||||
|
|
@ -851,7 +700,7 @@ fn request_into_redirect(
|
|||
let mut redirect_url = match DisplaySafeUrl::parse(location) {
|
||||
Ok(url) => url,
|
||||
// Per RFC 7231, URLs should be resolved against the request URL.
|
||||
Err(DisplaySafeUrlError::Url(ParseError::RelativeUrlWithoutBase)) => original_req_url.join(location).map_err(|err| {
|
||||
Err(ParseError::RelativeUrlWithoutBase) => original_req_url.join(location).map_err(|err| {
|
||||
reqwest_middleware::Error::Middleware(anyhow!(
|
||||
"Invalid HTTP {status} 'Location' value `{location}` relative to `{original_req_url}`: {err}"
|
||||
))
|
||||
|
|
@ -1029,7 +878,7 @@ impl RetryableStrategy for UvRetryableStrategy {
|
|||
None | Some(Retryable::Fatal)
|
||||
if res
|
||||
.as_ref()
|
||||
.is_err_and(|err| is_transient_network_error(err)) =>
|
||||
.is_err_and(|err| is_extended_transient_error(err)) =>
|
||||
{
|
||||
Some(Retryable::Transient)
|
||||
}
|
||||
|
|
@ -1057,15 +906,12 @@ impl RetryableStrategy for UvRetryableStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
/// Whether the error looks like a network error that should be retried.
|
||||
/// Check for additional transient error kinds not supported by the default retry strategy in `reqwest_retry`.
|
||||
///
|
||||
/// There are two cases that the default retry strategy is missing:
|
||||
/// * Inside the reqwest or reqwest-middleware error is an `io::Error` such as a broken pipe
|
||||
/// * When streaming a response, a reqwest error may be hidden several layers behind errors
|
||||
/// of different crates processing the stream, including `io::Error` layers.
|
||||
pub fn is_transient_network_error(err: &(dyn Error + 'static)) -> bool {
|
||||
/// These cases should be safe to retry with [`Retryable::Transient`].
|
||||
pub fn is_extended_transient_error(err: &dyn Error) -> bool {
|
||||
// First, try to show a nice trace log
|
||||
if let Some((Some(status), Some(url))) = find_source::<WrappedReqwestError>(&err)
|
||||
if let Some((Some(status), Some(url))) = find_source::<crate::WrappedReqwestError>(&err)
|
||||
.map(|request_err| (request_err.status(), request_err.url()))
|
||||
{
|
||||
trace!("Considering retry of response HTTP {status} for {url}");
|
||||
|
|
@ -1073,88 +919,22 @@ pub fn is_transient_network_error(err: &(dyn Error + 'static)) -> bool {
|
|||
trace!("Considering retry of error: {err:?}");
|
||||
}
|
||||
|
||||
let mut has_known_error = false;
|
||||
// IO Errors or reqwest errors may be nested through custom IO errors or stream processing
|
||||
// crates
|
||||
let mut current_source = Some(err);
|
||||
while let Some(source) = current_source {
|
||||
if let Some(reqwest_err) = source.downcast_ref::<WrappedReqwestError>() {
|
||||
has_known_error = true;
|
||||
if let reqwest_middleware::Error::Reqwest(reqwest_err) = &**reqwest_err {
|
||||
if default_on_request_error(reqwest_err) == Some(Retryable::Transient) {
|
||||
trace!("Retrying nested reqwest middleware error");
|
||||
return true;
|
||||
}
|
||||
if is_retryable_status_error(reqwest_err) {
|
||||
trace!("Retrying nested reqwest middleware status code error");
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
trace!("Cannot retry nested reqwest middleware error");
|
||||
} else if let Some(reqwest_err) = source.downcast_ref::<reqwest::Error>() {
|
||||
has_known_error = true;
|
||||
if default_on_request_error(reqwest_err) == Some(Retryable::Transient) {
|
||||
trace!("Retrying nested reqwest error");
|
||||
return true;
|
||||
}
|
||||
if is_retryable_status_error(reqwest_err) {
|
||||
trace!("Retrying nested reqwest status code error");
|
||||
return true;
|
||||
}
|
||||
|
||||
trace!("Cannot retry nested reqwest error");
|
||||
} else if source.downcast_ref::<h2::Error>().is_some() {
|
||||
// All h2 errors look like errors that should be retried
|
||||
// https://github.com/astral-sh/uv/issues/15916
|
||||
trace!("Retrying nested h2 error");
|
||||
// IO Errors may be nested through custom IO errors.
|
||||
for io_err in find_sources::<io::Error>(&err) {
|
||||
if io_err.kind() == io::ErrorKind::ConnectionReset
|
||||
|| io_err.kind() == io::ErrorKind::UnexpectedEof
|
||||
|| io_err.kind() == io::ErrorKind::BrokenPipe
|
||||
{
|
||||
trace!("Retrying error: `ConnectionReset` or `UnexpectedEof`");
|
||||
return true;
|
||||
} else if let Some(io_err) = source.downcast_ref::<io::Error>() {
|
||||
has_known_error = true;
|
||||
let retryable_io_err_kinds = [
|
||||
// https://github.com/astral-sh/uv/issues/12054
|
||||
io::ErrorKind::BrokenPipe,
|
||||
// From reqwest-middleware
|
||||
io::ErrorKind::ConnectionAborted,
|
||||
// https://github.com/astral-sh/uv/issues/3514
|
||||
io::ErrorKind::ConnectionReset,
|
||||
// https://github.com/astral-sh/uv/issues/14699
|
||||
io::ErrorKind::InvalidData,
|
||||
// https://github.com/astral-sh/uv/issues/9246
|
||||
io::ErrorKind::UnexpectedEof,
|
||||
];
|
||||
if retryable_io_err_kinds.contains(&io_err.kind()) {
|
||||
trace!("Retrying error: `{}`", io_err.kind());
|
||||
return true;
|
||||
}
|
||||
|
||||
trace!(
|
||||
"Cannot retry IO error `{}`, not a retryable IO error kind",
|
||||
io_err.kind()
|
||||
);
|
||||
}
|
||||
|
||||
current_source = source.source();
|
||||
trace!("Cannot retry IO error: not one of `ConnectionReset` or `UnexpectedEof`");
|
||||
}
|
||||
|
||||
if !has_known_error {
|
||||
trace!("Cannot retry error: Neither an IO error nor a reqwest error");
|
||||
}
|
||||
trace!("Cannot retry error: not an IO error");
|
||||
false
|
||||
}
|
||||
|
||||
/// Whether the error is a status code error that is retryable.
|
||||
///
|
||||
/// Port of `reqwest_retry::default_on_request_success`.
|
||||
fn is_retryable_status_error(reqwest_err: &reqwest::Error) -> bool {
|
||||
let Some(status) = reqwest_err.status() else {
|
||||
return false;
|
||||
};
|
||||
status.is_server_error()
|
||||
|| status == StatusCode::REQUEST_TIMEOUT
|
||||
|| status == StatusCode::TOO_MANY_REQUESTS
|
||||
}
|
||||
|
||||
/// Find the first source error of a specific type.
|
||||
///
|
||||
/// See <https://github.com/seanmonstar/reqwest/issues/1602#issuecomment-1220996681>
|
||||
|
|
@ -1169,21 +949,22 @@ fn find_source<E: Error + 'static>(orig: &dyn Error) -> Option<&E> {
|
|||
None
|
||||
}
|
||||
|
||||
// TODO(konsti): Remove once we find a native home for `retries_from_env`
|
||||
#[derive(Debug, Error)]
|
||||
pub enum RetryParsingError {
|
||||
#[error("Failed to parse `UV_HTTP_RETRIES`")]
|
||||
ParseInt(#[from] ParseIntError),
|
||||
/// Return all errors in the chain of a specific type.
|
||||
///
|
||||
/// This handles cases such as nested `io::Error`s.
|
||||
///
|
||||
/// See <https://github.com/seanmonstar/reqwest/issues/1602#issuecomment-1220996681>
|
||||
fn find_sources<E: Error + 'static>(orig: &dyn Error) -> impl Iterator<Item = &E> {
|
||||
iter::successors(find_source::<E>(orig), |&err| find_source(err))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use anyhow::Result;
|
||||
use insta::assert_debug_snapshot;
|
||||
|
||||
use reqwest::{Client, Method};
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::matchers::method;
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
use crate::base_client::request_into_redirect;
|
||||
|
|
@ -1376,71 +1157,4 @@ mod tests {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Enumerate which status codes we are retrying.
|
||||
#[tokio::test]
|
||||
async fn retried_status_codes() -> Result<()> {
|
||||
let server = MockServer::start().await;
|
||||
let client = Client::default();
|
||||
let middleware_client = ClientWithMiddleware::default();
|
||||
let mut retried = Vec::new();
|
||||
for status in 100..599 {
|
||||
// Test all standard status codes and and example for a non-RFC code used in the wild.
|
||||
if StatusCode::from_u16(status)?.canonical_reason().is_none() && status != 420 {
|
||||
continue;
|
||||
}
|
||||
|
||||
Mock::given(path(format!("/{status}")))
|
||||
.respond_with(ResponseTemplate::new(status))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let response = middleware_client
|
||||
.get(format!("{}/{}", server.uri(), status))
|
||||
.send()
|
||||
.await;
|
||||
|
||||
let middleware_retry =
|
||||
DefaultRetryableStrategy.handle(&response) == Some(Retryable::Transient);
|
||||
|
||||
let response = client
|
||||
.get(format!("{}/{}", server.uri(), status))
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let uv_retry = match response.error_for_status() {
|
||||
Ok(_) => false,
|
||||
Err(err) => is_transient_network_error(&err),
|
||||
};
|
||||
|
||||
// Ensure we're retrying the same status code as the reqwest_retry crate. We may choose
|
||||
// to deviate from this later.
|
||||
assert_eq!(middleware_retry, uv_retry);
|
||||
if uv_retry {
|
||||
retried.push(status);
|
||||
}
|
||||
}
|
||||
|
||||
assert_debug_snapshot!(retried, @r"
|
||||
[
|
||||
100,
|
||||
102,
|
||||
408,
|
||||
429,
|
||||
500,
|
||||
501,
|
||||
502,
|
||||
503,
|
||||
504,
|
||||
505,
|
||||
506,
|
||||
507,
|
||||
508,
|
||||
510,
|
||||
511,
|
||||
]
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,33 +14,13 @@ use uv_fs::write_atomic;
|
|||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
use crate::BaseClient;
|
||||
use crate::base_client::is_transient_network_error;
|
||||
use crate::error::ProblemDetails;
|
||||
use crate::base_client::is_extended_transient_error;
|
||||
use crate::{
|
||||
Error, ErrorKind,
|
||||
httpcache::{AfterResponse, BeforeRequest, CachePolicy, CachePolicyBuilder},
|
||||
rkyvutil::OwnedArchive,
|
||||
};
|
||||
|
||||
/// Extract problem details from an HTTP response if it has the correct content type
|
||||
///
|
||||
/// Note: This consumes the response body, so it should only be called when there's an error status.
|
||||
async fn extract_problem_details(response: Response) -> Option<ProblemDetails> {
|
||||
match response.bytes().await {
|
||||
Ok(bytes) => match serde_json::from_slice(&bytes) {
|
||||
Ok(details) => Some(details),
|
||||
Err(err) => {
|
||||
warn!("Failed to parse problem details: {err}");
|
||||
None
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
warn!("Failed to read response body for problem details: {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait the generalizes (de)serialization at a high level.
|
||||
///
|
||||
/// The main purpose of this trait is to make the `CachedClient` work for
|
||||
|
|
@ -137,17 +117,17 @@ impl<CallbackError: std::error::Error + 'static> CachedClientError<CallbackError
|
|||
/// Adds to existing errors if any, in case different layers retried.
|
||||
fn with_retries(self, retries: u32) -> Self {
|
||||
match self {
|
||||
Self::Client {
|
||||
CachedClientError::Client {
|
||||
retries: existing_retries,
|
||||
err,
|
||||
} => Self::Client {
|
||||
} => CachedClientError::Client {
|
||||
retries: Some(existing_retries.unwrap_or_default() + retries),
|
||||
err,
|
||||
},
|
||||
Self::Callback {
|
||||
CachedClientError::Callback {
|
||||
retries: existing_retries,
|
||||
err,
|
||||
} => Self::Callback {
|
||||
} => CachedClientError::Callback {
|
||||
retries: Some(existing_retries.unwrap_or_default() + retries),
|
||||
err,
|
||||
},
|
||||
|
|
@ -156,15 +136,15 @@ impl<CallbackError: std::error::Error + 'static> CachedClientError<CallbackError
|
|||
|
||||
fn retries(&self) -> Option<u32> {
|
||||
match self {
|
||||
Self::Client { retries, .. } => *retries,
|
||||
Self::Callback { retries, .. } => *retries,
|
||||
CachedClientError::Client { retries, .. } => *retries,
|
||||
CachedClientError::Callback { retries, .. } => *retries,
|
||||
}
|
||||
}
|
||||
|
||||
fn error(&self) -> &(dyn std::error::Error + 'static) {
|
||||
fn error(&self) -> &dyn std::error::Error {
|
||||
match self {
|
||||
Self::Client { err, .. } => err,
|
||||
Self::Callback { err, .. } => err,
|
||||
CachedClientError::Client { err, .. } => err,
|
||||
CachedClientError::Callback { err, .. } => err,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -196,12 +176,20 @@ impl<E: Into<Self> + std::error::Error + 'static> From<CachedClientError<E>> for
|
|||
CachedClientError::Client {
|
||||
retries: Some(retries),
|
||||
err,
|
||||
} => Self::new(err.into_kind(), retries),
|
||||
} => ErrorKind::RequestWithRetries {
|
||||
source: Box::new(err.into_kind()),
|
||||
retries,
|
||||
}
|
||||
.into(),
|
||||
CachedClientError::Client { retries: None, err } => err,
|
||||
CachedClientError::Callback {
|
||||
retries: Some(retries),
|
||||
err,
|
||||
} => Self::new(err.into().into_kind(), retries),
|
||||
} => ErrorKind::RequestWithRetries {
|
||||
source: Box::new(err.into().into_kind()),
|
||||
retries,
|
||||
}
|
||||
.into(),
|
||||
CachedClientError::Callback { retries: None, err } => err.into(),
|
||||
}
|
||||
}
|
||||
|
|
@ -316,7 +304,7 @@ impl CachedClient {
|
|||
.await?
|
||||
} else {
|
||||
debug!("No cache entry for: {}", req.url());
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -330,13 +318,8 @@ impl CachedClient {
|
|||
"Broken fresh cache entry (for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
.await
|
||||
}
|
||||
},
|
||||
CachedResponse::NotModified { cached, new_policy } => {
|
||||
|
|
@ -356,13 +339,8 @@ impl CachedClient {
|
|||
(for payload) at {}, removing: {err}",
|
||||
cache_entry.path().display()
|
||||
);
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -377,13 +355,8 @@ impl CachedClient {
|
|||
// ETag didn't match). We need to make a fresh request.
|
||||
if response.status() == http::StatusCode::NOT_MODIFIED {
|
||||
warn!("Server returned unusable 304 for: {}", fresh_req.url());
|
||||
self.resend_and_heal_cache(
|
||||
fresh_req,
|
||||
cache_entry,
|
||||
cache_control,
|
||||
response_callback,
|
||||
)
|
||||
.await
|
||||
self.resend_and_heal_cache(fresh_req, cache_entry, response_callback)
|
||||
.await
|
||||
} else {
|
||||
self.run_response_callback(
|
||||
cache_entry,
|
||||
|
|
@ -406,10 +379,9 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
|
||||
let payload = self
|
||||
.run_response_callback(cache_entry, cache_policy, response, async |resp| {
|
||||
|
|
@ -429,11 +401,10 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload::Target, CachedClientError<CallBackError>> {
|
||||
let _ = fs_err::tokio::remove_file(&cache_entry.path()).await;
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
self.run_response_callback(cache_entry, cache_policy, response, response_callback)
|
||||
.await
|
||||
}
|
||||
|
|
@ -472,8 +443,7 @@ impl CachedClient {
|
|||
.await
|
||||
}
|
||||
|
||||
#[instrument(name = "read_and_parse_cache", skip_all, fields(file = %cache_entry.path().display()
|
||||
))]
|
||||
#[instrument(name="read_and_parse_cache", skip_all, fields(file = %cache_entry.path().display()))]
|
||||
async fn read_cache(cache_entry: &CacheEntry) -> Option<DataWithCachePolicy> {
|
||||
match DataWithCachePolicy::from_path_async(cache_entry.path()).await {
|
||||
Ok(data) => Some(data),
|
||||
|
|
@ -506,13 +476,20 @@ impl CachedClient {
|
|||
) -> Result<CachedResponse, Error> {
|
||||
// Apply the cache control header, if necessary.
|
||||
match cache_control {
|
||||
CacheControl::None | CacheControl::AllowStale | CacheControl::Override(..) => {}
|
||||
CacheControl::None | CacheControl::AllowStale => {}
|
||||
CacheControl::MustRevalidate => {
|
||||
req.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_static("no-cache"),
|
||||
);
|
||||
}
|
||||
CacheControl::Override(value) => {
|
||||
req.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(value)
|
||||
.map_err(|_| ErrorKind::InvalidCacheControl(value.to_string()))?,
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(match cached.cache_policy.before_request(&mut req) {
|
||||
BeforeRequest::Fresh => {
|
||||
|
|
@ -522,13 +499,8 @@ impl CachedClient {
|
|||
BeforeRequest::Stale(new_cache_policy_builder) => match cache_control {
|
||||
CacheControl::None | CacheControl::MustRevalidate | CacheControl::Override(_) => {
|
||||
debug!("Found stale response for: {}", req.url());
|
||||
self.send_cached_handle_stale(
|
||||
req,
|
||||
cache_control,
|
||||
cached,
|
||||
new_cache_policy_builder,
|
||||
)
|
||||
.await?
|
||||
self.send_cached_handle_stale(req, cached, new_cache_policy_builder)
|
||||
.await?
|
||||
}
|
||||
CacheControl::AllowStale => {
|
||||
debug!("Found stale (but allowed) response for: {}", req.url());
|
||||
|
|
@ -538,10 +510,10 @@ impl CachedClient {
|
|||
BeforeRequest::NoMatch => {
|
||||
// This shouldn't happen; if it does, we'll override the cache.
|
||||
warn!(
|
||||
"Cached response doesn't match current request for: {}",
|
||||
"Cached request doesn't match current request for: {}",
|
||||
req.url()
|
||||
);
|
||||
let (response, cache_policy) = self.fresh_request(req, cache_control).await?;
|
||||
let (response, cache_policy) = self.fresh_request(req).await?;
|
||||
CachedResponse::ModifiedOrNew {
|
||||
response,
|
||||
cache_policy,
|
||||
|
|
@ -553,50 +525,19 @@ impl CachedClient {
|
|||
async fn send_cached_handle_stale(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
cached: DataWithCachePolicy,
|
||||
new_cache_policy_builder: CachePolicyBuilder,
|
||||
) -> Result<CachedResponse, Error> {
|
||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
debug!("Sending revalidation request for: {url}");
|
||||
let mut response = self
|
||||
let response = self
|
||||
.0
|
||||
.execute(req)
|
||||
.instrument(info_span!("revalidation_request", url = url.as_str()))
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
||||
|
||||
// Check for HTTP error status and extract problem details if available
|
||||
if let Err(status_error) = response.error_for_status_ref() {
|
||||
// Clone the response to extract problem details before the error consumes it
|
||||
let problem_details = if response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
.map(|ct| ct == "application/problem+json")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
extract_problem_details(response).await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
return Err(ErrorKind::from_reqwest_with_problem_details(
|
||||
url.clone(),
|
||||
status_error,
|
||||
problem_details,
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?
|
||||
.error_for_status()
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
match cached
|
||||
.cache_policy
|
||||
.after_response(new_cache_policy_builder, &response)
|
||||
|
|
@ -625,51 +566,25 @@ impl CachedClient {
|
|||
async fn fresh_request(
|
||||
&self,
|
||||
req: Request,
|
||||
cache_control: CacheControl<'_>,
|
||||
) -> Result<(Response, Option<Box<CachePolicy>>), Error> {
|
||||
let url = DisplaySafeUrl::from_url(req.url().clone());
|
||||
let url = DisplaySafeUrl::from(req.url().clone());
|
||||
trace!("Sending fresh {} request for {}", req.method(), url);
|
||||
let cache_policy_builder = CachePolicyBuilder::new(&req);
|
||||
let mut response = self
|
||||
let response = self
|
||||
.0
|
||||
.execute(req)
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest_middleware(url.clone(), err))?;
|
||||
|
||||
// If the user set a custom `Cache-Control` header, override it.
|
||||
if let CacheControl::Override(header) = cache_control {
|
||||
response.headers_mut().insert(
|
||||
http::header::CACHE_CONTROL,
|
||||
http::HeaderValue::from_str(header)
|
||||
.expect("Cache-Control header must be valid UTF-8"),
|
||||
);
|
||||
}
|
||||
|
||||
let retry_count = response
|
||||
.extensions()
|
||||
.get::<reqwest_retry::RetryCount>()
|
||||
.map(|retries| retries.value());
|
||||
|
||||
if let Err(status_error) = response.error_for_status_ref() {
|
||||
let problem_details = if response
|
||||
.headers()
|
||||
.get("content-type")
|
||||
.and_then(|ct| ct.to_str().ok())
|
||||
.map(|ct| ct.starts_with("application/problem+json"))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
extract_problem_details(response).await
|
||||
} else {
|
||||
None
|
||||
};
|
||||
return Err(CachedClientError::<Error>::Client {
|
||||
retries: retry_count,
|
||||
err: ErrorKind::from_reqwest_with_problem_details(
|
||||
url,
|
||||
status_error,
|
||||
problem_details,
|
||||
)
|
||||
.into(),
|
||||
err: ErrorKind::from_reqwest(url, status_error).into(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
|
@ -737,21 +652,19 @@ impl CachedClient {
|
|||
|
||||
if result
|
||||
.as_ref()
|
||||
.is_err_and(|err| is_transient_network_error(err.error()))
|
||||
.is_err_and(|err| is_extended_transient_error(err.error()))
|
||||
{
|
||||
// If middleware already retried, consider that in our retry budget
|
||||
let total_retries = past_retries + middleware_retries;
|
||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying...",
|
||||
req.url(),
|
||||
);
|
||||
let duration = execute_after
|
||||
.duration_since(SystemTime::now())
|
||||
.unwrap_or_else(|_| Duration::default());
|
||||
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying after {:.1}s...",
|
||||
req.url(),
|
||||
duration.as_secs_f32(),
|
||||
);
|
||||
tokio::time::sleep(duration).await;
|
||||
past_retries += 1;
|
||||
continue;
|
||||
|
|
@ -777,7 +690,6 @@ impl CachedClient {
|
|||
&self,
|
||||
req: Request,
|
||||
cache_entry: &CacheEntry,
|
||||
cache_control: CacheControl<'_>,
|
||||
response_callback: Callback,
|
||||
) -> Result<Payload, CachedClientError<CallBackError>> {
|
||||
let mut past_retries = 0;
|
||||
|
|
@ -786,7 +698,7 @@ impl CachedClient {
|
|||
loop {
|
||||
let fresh_req = req.try_clone().expect("HTTP request must be cloneable");
|
||||
let result = self
|
||||
.skip_cache(fresh_req, cache_entry, cache_control, &response_callback)
|
||||
.skip_cache(fresh_req, cache_entry, &response_callback)
|
||||
.await;
|
||||
|
||||
// Check if the middleware already performed retries
|
||||
|
|
@ -798,19 +710,18 @@ impl CachedClient {
|
|||
if result
|
||||
.as_ref()
|
||||
.err()
|
||||
.is_some_and(|err| is_transient_network_error(err.error()))
|
||||
.is_some_and(|err| is_extended_transient_error(err.error()))
|
||||
{
|
||||
let total_retries = past_retries + middleware_retries;
|
||||
let retry_decision = retry_policy.should_retry(start_time, total_retries);
|
||||
if let reqwest_retry::RetryDecision::Retry { execute_after } = retry_decision {
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying...",
|
||||
req.url(),
|
||||
);
|
||||
let duration = execute_after
|
||||
.duration_since(SystemTime::now())
|
||||
.unwrap_or_else(|_| Duration::default());
|
||||
debug!(
|
||||
"Transient failure while handling response from {}; retrying after {}s...",
|
||||
req.url(),
|
||||
duration.as_secs(),
|
||||
);
|
||||
tokio::time::sleep(duration).await;
|
||||
past_retries += 1;
|
||||
continue;
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
use async_http_range_reader::AsyncHttpRangeReaderError;
|
||||
use async_zip::error::ZipError;
|
||||
use serde::Deserialize;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use uv_cache::Error as CacheError;
|
||||
use async_http_range_reader::AsyncHttpRangeReaderError;
|
||||
use async_zip::error::ZipError;
|
||||
|
||||
use uv_distribution_filename::{WheelFilename, WheelFilenameError};
|
||||
use uv_normalize::PackageName;
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
|
@ -13,112 +11,19 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use crate::middleware::OfflineError;
|
||||
use crate::{FlatIndexError, html};
|
||||
|
||||
/// RFC 9457 Problem Details for HTTP APIs
|
||||
///
|
||||
/// This structure represents the standard format for machine-readable details
|
||||
/// of errors in HTTP response bodies as defined in RFC 9457.
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct ProblemDetails {
|
||||
/// A URI reference that identifies the problem type.
|
||||
/// When dereferenced, it SHOULD provide human-readable documentation for the problem type.
|
||||
#[serde(rename = "type", default = "default_problem_type")]
|
||||
pub problem_type: String,
|
||||
|
||||
/// A short, human-readable summary of the problem type.
|
||||
pub title: Option<String>,
|
||||
|
||||
/// The HTTP status code generated by the origin server for this occurrence of the problem.
|
||||
pub status: Option<u16>,
|
||||
|
||||
/// A human-readable explanation specific to this occurrence of the problem.
|
||||
pub detail: Option<String>,
|
||||
|
||||
/// A URI reference that identifies the specific occurrence of the problem.
|
||||
pub instance: Option<String>,
|
||||
}
|
||||
|
||||
/// Default problem type URI as per RFC 9457
|
||||
#[inline]
|
||||
fn default_problem_type() -> String {
|
||||
"about:blank".to_string()
|
||||
}
|
||||
|
||||
impl ProblemDetails {
|
||||
/// Get a human-readable description of the problem
|
||||
pub fn description(&self) -> Option<String> {
|
||||
match self {
|
||||
Self {
|
||||
title: Some(title),
|
||||
detail: Some(detail),
|
||||
..
|
||||
} => Some(format!("Server message: {title}, {detail}")),
|
||||
Self {
|
||||
title: Some(title), ..
|
||||
} => Some(format!("Server message: {title}")),
|
||||
Self {
|
||||
detail: Some(detail),
|
||||
..
|
||||
} => Some(format!("Server message: {detail}")),
|
||||
Self {
|
||||
status: Some(status),
|
||||
..
|
||||
} => Some(format!("HTTP error {status}")),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub struct Error {
|
||||
kind: Box<ErrorKind>,
|
||||
retries: u32,
|
||||
}
|
||||
|
||||
impl Display for Error {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
if self.retries > 0 {
|
||||
write!(
|
||||
f,
|
||||
"Request failed after {retries} {subject}",
|
||||
retries = self.retries,
|
||||
subject = if self.retries > 1 { "retries" } else { "retry" }
|
||||
)
|
||||
} else {
|
||||
Display::fmt(&self.kind, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
if self.retries > 0 {
|
||||
Some(&self.kind)
|
||||
} else {
|
||||
self.kind.source()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// Create a new [`Error`] with the given [`ErrorKind`] and number of retries.
|
||||
pub fn new(kind: ErrorKind, retries: u32) -> Self {
|
||||
Self {
|
||||
kind: Box::new(kind),
|
||||
retries,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the number of retries that were attempted before this error was returned.
|
||||
pub fn retries(&self) -> u32 {
|
||||
self.retries
|
||||
}
|
||||
|
||||
/// Convert this error into an [`ErrorKind`].
|
||||
/// Convert this error into its [`ErrorKind`] variant.
|
||||
pub fn into_kind(self) -> ErrorKind {
|
||||
*self.kind
|
||||
}
|
||||
|
||||
/// Return the [`ErrorKind`] of this error.
|
||||
/// Get a reference to the [`ErrorKind`] variant of this error.
|
||||
pub fn kind(&self) -> &ErrorKind {
|
||||
&self.kind
|
||||
}
|
||||
|
|
@ -133,11 +38,6 @@ impl Error {
|
|||
ErrorKind::BadHtml { source: err, url }.into()
|
||||
}
|
||||
|
||||
/// Create a new error from a `MessagePack` parsing error.
|
||||
pub(crate) fn from_msgpack_err(err: rmp_serde::decode::Error, url: DisplaySafeUrl) -> Self {
|
||||
ErrorKind::BadMessagePack { source: err, url }.into()
|
||||
}
|
||||
|
||||
/// Returns `true` if this error corresponds to an offline error.
|
||||
pub(crate) fn is_offline(&self) -> bool {
|
||||
matches!(&*self.kind, ErrorKind::Offline(_))
|
||||
|
|
@ -178,7 +78,7 @@ impl Error {
|
|||
|
||||
// The server returned a "Method Not Allowed" error, indicating it doesn't support
|
||||
// HEAD requests, so we can't check for range requests.
|
||||
ErrorKind::WrappedReqwestError(_, err) => {
|
||||
ErrorKind::WrappedReqwestError(_url, err) => {
|
||||
if let Some(status) = err.status() {
|
||||
// If the server doesn't support HEAD requests, we can't check for range
|
||||
// requests.
|
||||
|
|
@ -243,7 +143,6 @@ impl From<ErrorKind> for Error {
|
|||
fn from(kind: ErrorKind) -> Self {
|
||||
Self {
|
||||
kind: Box::new(kind),
|
||||
retries: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -273,15 +172,11 @@ pub enum ErrorKind {
|
|||
/// Make sure the package name is spelled correctly and that you've
|
||||
/// configured the right registry to fetch it from.
|
||||
#[error("Package `{0}` was not found in the registry")]
|
||||
RemotePackageNotFound(PackageName),
|
||||
PackageNotFound(String),
|
||||
|
||||
/// The package was not found in the local (file-based) index.
|
||||
#[error("Package `{0}` was not found in the local index")]
|
||||
LocalPackageNotFound(PackageName),
|
||||
|
||||
/// The root was not found in the local (file-based) index.
|
||||
#[error("Local index not found at: `{}`", _0.display())]
|
||||
LocalIndexNotFound(PathBuf),
|
||||
FileNotFound(String),
|
||||
|
||||
/// The metadata file could not be parsed.
|
||||
#[error("Couldn't parse metadata of {0} from {1}")]
|
||||
|
|
@ -291,12 +186,16 @@ pub enum ErrorKind {
|
|||
#[source] Box<uv_pypi_types::MetadataError>,
|
||||
),
|
||||
|
||||
/// The metadata file was not found in the wheel.
|
||||
#[error("Metadata file `{0}` was not found in {1}")]
|
||||
MetadataNotFound(WheelFilename, String),
|
||||
|
||||
/// An error that happened while making a request or in a reqwest middleware.
|
||||
#[error("Failed to fetch: `{0}`")]
|
||||
WrappedReqwestError(DisplaySafeUrl, #[source] WrappedReqwestError),
|
||||
|
||||
/// Add the number of failed retries to the error.
|
||||
#[error("Request failed after {retries} {subject}", subject = if *retries > 1 { "retries" } else { "retry" })]
|
||||
#[error("Request failed after {retries} retries")]
|
||||
RequestWithRetries {
|
||||
source: Box<ErrorKind>,
|
||||
retries: u32,
|
||||
|
|
@ -314,12 +213,6 @@ pub enum ErrorKind {
|
|||
url: DisplaySafeUrl,
|
||||
},
|
||||
|
||||
#[error("Received some unexpected MessagePack from {}", url)]
|
||||
BadMessagePack {
|
||||
source: rmp_serde::decode::Error,
|
||||
url: DisplaySafeUrl,
|
||||
},
|
||||
|
||||
#[error("Failed to read zip with range requests: `{0}`")]
|
||||
AsyncHttpRangeReader(DisplaySafeUrl, #[source] AsyncHttpRangeReaderError),
|
||||
|
||||
|
|
@ -338,9 +231,6 @@ pub enum ErrorKind {
|
|||
#[error("Failed to write to the client cache")]
|
||||
CacheWrite(#[source] std::io::Error),
|
||||
|
||||
#[error("Failed to acquire lock on the client cache")]
|
||||
CacheLock(#[source] CacheError),
|
||||
|
||||
#[error(transparent)]
|
||||
Io(std::io::Error),
|
||||
|
||||
|
|
@ -375,12 +265,10 @@ pub enum ErrorKind {
|
|||
}
|
||||
|
||||
impl ErrorKind {
|
||||
/// Create an [`ErrorKind`] from a [`reqwest::Error`].
|
||||
pub(crate) fn from_reqwest(url: DisplaySafeUrl, error: reqwest::Error) -> Self {
|
||||
Self::WrappedReqwestError(url, WrappedReqwestError::from(error))
|
||||
}
|
||||
|
||||
/// Create an [`ErrorKind`] from a [`reqwest_middleware::Error`].
|
||||
pub(crate) fn from_reqwest_middleware(
|
||||
url: DisplaySafeUrl,
|
||||
err: reqwest_middleware::Error,
|
||||
|
|
@ -391,19 +279,7 @@ impl ErrorKind {
|
|||
}
|
||||
}
|
||||
|
||||
Self::WrappedReqwestError(url, WrappedReqwestError::from(err))
|
||||
}
|
||||
|
||||
/// Create an [`ErrorKind`] from a [`reqwest::Error`] with problem details.
|
||||
pub(crate) fn from_reqwest_with_problem_details(
|
||||
url: DisplaySafeUrl,
|
||||
error: reqwest::Error,
|
||||
problem_details: Option<ProblemDetails>,
|
||||
) -> Self {
|
||||
Self::WrappedReqwestError(
|
||||
url,
|
||||
WrappedReqwestError::with_problem_details(error.into(), problem_details),
|
||||
)
|
||||
Self::WrappedReqwestError(url, WrappedReqwestError(err))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -413,26 +289,12 @@ impl ErrorKind {
|
|||
/// Wraps a [`reqwest_middleware::Error`] instead of an [`reqwest::Error`] since the actual reqwest
|
||||
/// error may be below some context in the [`anyhow::Error`].
|
||||
#[derive(Debug)]
|
||||
pub struct WrappedReqwestError {
|
||||
error: reqwest_middleware::Error,
|
||||
problem_details: Option<Box<ProblemDetails>>,
|
||||
}
|
||||
pub struct WrappedReqwestError(reqwest_middleware::Error);
|
||||
|
||||
impl WrappedReqwestError {
|
||||
/// Create a new `WrappedReqwestError` with optional problem details
|
||||
pub fn with_problem_details(
|
||||
error: reqwest_middleware::Error,
|
||||
problem_details: Option<ProblemDetails>,
|
||||
) -> Self {
|
||||
Self {
|
||||
error,
|
||||
problem_details: problem_details.map(Box::new),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the inner [`reqwest::Error`] from the error chain, if it exists.
|
||||
fn inner(&self) -> Option<&reqwest::Error> {
|
||||
match &self.error {
|
||||
match &self.0 {
|
||||
reqwest_middleware::Error::Reqwest(err) => Some(err),
|
||||
reqwest_middleware::Error::Middleware(err) => err.chain().find_map(|err| {
|
||||
if let Some(err) = err.downcast_ref::<reqwest::Error>() {
|
||||
|
|
@ -494,19 +356,13 @@ impl WrappedReqwestError {
|
|||
|
||||
impl From<reqwest::Error> for WrappedReqwestError {
|
||||
fn from(error: reqwest::Error) -> Self {
|
||||
Self {
|
||||
error: error.into(),
|
||||
problem_details: None,
|
||||
}
|
||||
Self(error.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<reqwest_middleware::Error> for WrappedReqwestError {
|
||||
fn from(error: reqwest_middleware::Error) -> Self {
|
||||
Self {
|
||||
error,
|
||||
problem_details: None,
|
||||
}
|
||||
Self(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -514,7 +370,7 @@ impl Deref for WrappedReqwestError {
|
|||
type Target = reqwest_middleware::Error;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.error
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -523,15 +379,9 @@ impl Display for WrappedReqwestError {
|
|||
if self.is_likely_offline() {
|
||||
// Insert an extra hint, we'll show the wrapped error through `source`
|
||||
f.write_str("Could not connect, are you offline?")
|
||||
} else if let Some(problem_details) = &self.problem_details {
|
||||
// Show problem details if available
|
||||
match problem_details.description() {
|
||||
None => Display::fmt(&self.error, f),
|
||||
Some(message) => f.write_str(&message),
|
||||
}
|
||||
} else {
|
||||
// Show the wrapped error
|
||||
Display::fmt(&self.error, f)
|
||||
Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -540,117 +390,10 @@ impl std::error::Error for WrappedReqwestError {
|
|||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
if self.is_likely_offline() {
|
||||
// `Display` is inserting an extra message, so we need to show the wrapped error
|
||||
Some(&self.error)
|
||||
} else if self.problem_details.is_some() {
|
||||
// `Display` is showing problem details, so show the wrapped error as source
|
||||
Some(&self.error)
|
||||
Some(&self.0)
|
||||
} else {
|
||||
// `Display` is showing the wrapped error, continue with its source
|
||||
self.error.source()
|
||||
self.0.source()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_parsing() {
|
||||
let json = r#"{
|
||||
"type": "https://example.com/probs/out-of-credit",
|
||||
"title": "You do not have enough credit.",
|
||||
"detail": "Your current balance is 30, but that costs 50.",
|
||||
"status": 403,
|
||||
"instance": "/account/12345/msgs/abc"
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.problem_type,
|
||||
"https://example.com/probs/out-of-credit"
|
||||
);
|
||||
assert_eq!(
|
||||
problem_details.title,
|
||||
Some("You do not have enough credit.".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
problem_details.detail,
|
||||
Some("Your current balance is 30, but that costs 50.".to_string())
|
||||
);
|
||||
assert_eq!(problem_details.status, Some(403));
|
||||
assert_eq!(
|
||||
problem_details.instance,
|
||||
Some("/account/12345/msgs/abc".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_default_type() {
|
||||
let json = r#"{
|
||||
"detail": "Something went wrong",
|
||||
"status": 500
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(problem_details.problem_type, "about:blank");
|
||||
assert_eq!(
|
||||
problem_details.detail,
|
||||
Some("Something went wrong".to_string())
|
||||
);
|
||||
assert_eq!(problem_details.status, Some(500));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_description() {
|
||||
let json = r#"{
|
||||
"detail": "Detailed error message",
|
||||
"title": "Error Title",
|
||||
"status": 400
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.description().unwrap(),
|
||||
"Server message: Error Title, Detailed error message"
|
||||
);
|
||||
|
||||
let json_no_detail = r#"{
|
||||
"title": "Error Title",
|
||||
"status": 400
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails =
|
||||
serde_json::from_slice(json_no_detail.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.description().unwrap(),
|
||||
"Server message: Error Title"
|
||||
);
|
||||
|
||||
let json_minimal = r#"{
|
||||
"status": 400
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails =
|
||||
serde_json::from_slice(json_minimal.as_bytes()).unwrap();
|
||||
assert_eq!(problem_details.description().unwrap(), "HTTP error 400");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_problem_details_with_extensions() {
|
||||
let json = r#"{
|
||||
"type": "https://example.com/probs/out-of-credit",
|
||||
"title": "You do not have enough credit.",
|
||||
"detail": "Your current balance is 30, but that costs 50.",
|
||||
"status": 403,
|
||||
"balance": 30,
|
||||
"accounts": ["/account/12345", "/account/67890"]
|
||||
}"#;
|
||||
|
||||
let problem_details: ProblemDetails = serde_json::from_slice(json.as_bytes()).unwrap();
|
||||
assert_eq!(
|
||||
problem_details.title,
|
||||
Some("You do not have enough credit.".to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ use uv_redacted::DisplaySafeUrl;
|
|||
use uv_small_str::SmallString;
|
||||
|
||||
use crate::cached_client::{CacheControl, CachedClientError};
|
||||
use crate::html::SimpleDetailHTML;
|
||||
use crate::html::SimpleHtml;
|
||||
use crate::{CachedClient, Connectivity, Error, ErrorKind, OwnedArchive};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
|
|
@ -189,13 +189,13 @@ impl<'a> FlatIndexClient<'a> {
|
|||
async {
|
||||
// Use the response URL, rather than the request URL, as the base for relative URLs.
|
||||
// This ensures that we handle redirects and other URL transformations correctly.
|
||||
let url = DisplaySafeUrl::from_url(response.url().clone());
|
||||
let url = DisplaySafeUrl::from(response.url().clone());
|
||||
|
||||
let text = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|err| ErrorKind::from_reqwest(url.clone(), err))?;
|
||||
let SimpleDetailHTML { base, files } = SimpleDetailHTML::parse(&text, &url)
|
||||
let SimpleHtml { base, files } = SimpleHtml::parse(&text, &url)
|
||||
.map_err(|err| Error::from_html_err(err, url.clone()))?;
|
||||
|
||||
// Convert to a reference-counted string.
|
||||
|
|
@ -204,7 +204,7 @@ impl<'a> FlatIndexClient<'a> {
|
|||
let unarchived: Vec<File> = files
|
||||
.into_iter()
|
||||
.filter_map(|file| {
|
||||
match File::try_from_pypi(file, &base) {
|
||||
match File::try_from(file, &base) {
|
||||
Ok(file) => Some(file),
|
||||
Err(err) => {
|
||||
// Ignore files with unparsable version specifiers.
|
||||
|
|
@ -305,7 +305,6 @@ impl<'a> FlatIndexClient<'a> {
|
|||
upload_time_utc_ms: None,
|
||||
url: FileLocation::AbsoluteUrl(UrlString::from(url)),
|
||||
yanked: None,
|
||||
zstd: None,
|
||||
};
|
||||
|
||||
let Some(filename) = DistFilename::try_from_normalized_filename(filename) else {
|
||||
|
|
@ -321,63 +320,6 @@ impl<'a> FlatIndexClient<'a> {
|
|||
index: flat_index.clone(),
|
||||
});
|
||||
}
|
||||
|
||||
dists.sort_by(|a, b| {
|
||||
a.filename
|
||||
.cmp(&b.filename)
|
||||
.then_with(|| a.index.cmp(&b.index))
|
||||
});
|
||||
|
||||
Ok(FlatIndexEntries::from_entries(dists))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use fs_err::File;
|
||||
use std::io::Write;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn read_from_directory_sorts_distributions() {
|
||||
let dir = tempdir().unwrap();
|
||||
|
||||
let filenames = [
|
||||
"beta-2.0.0-py3-none-any.whl",
|
||||
"alpha-1.0.0.tar.gz",
|
||||
"alpha-1.0.0-py3-none-any.whl",
|
||||
];
|
||||
|
||||
for name in &filenames {
|
||||
let mut file = File::create(dir.path().join(name)).unwrap();
|
||||
file.write_all(b"").unwrap();
|
||||
}
|
||||
|
||||
let entries = FlatIndexClient::read_from_directory(
|
||||
dir.path(),
|
||||
&IndexUrl::parse(&dir.path().to_string_lossy(), None).unwrap(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let actual = entries
|
||||
.entries
|
||||
.iter()
|
||||
.map(|entry| entry.filename.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut expected = filenames
|
||||
.iter()
|
||||
.map(|name| DistFilename::try_from_normalized_filename(name).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
expected.sort();
|
||||
|
||||
let expected = expected
|
||||
.into_iter()
|
||||
.map(|filename| filename.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,32 +3,32 @@ use std::str::FromStr;
|
|||
use jiff::Timestamp;
|
||||
use tl::HTMLTag;
|
||||
use tracing::{debug, instrument, warn};
|
||||
use url::Url;
|
||||
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep440::VersionSpecifiers;
|
||||
use uv_pypi_types::{BaseUrl, CoreMetadata, Hashes, PypiFile, Yanked};
|
||||
use uv_pypi_types::{BaseUrl, CoreMetadata, File, Hashes, Yanked};
|
||||
use uv_pypi_types::{HashError, LenientVersionSpecifiers};
|
||||
use uv_redacted::{DisplaySafeUrl, DisplaySafeUrlError};
|
||||
use uv_redacted::DisplaySafeUrl;
|
||||
|
||||
/// A parsed structure from PyPI "HTML" index format for a single package.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct SimpleDetailHTML {
|
||||
pub(crate) struct SimpleHtml {
|
||||
/// The [`BaseUrl`] to which all relative URLs should be resolved.
|
||||
pub(crate) base: BaseUrl,
|
||||
/// The list of [`PypiFile`]s available for download sorted by filename.
|
||||
pub(crate) files: Vec<PypiFile>,
|
||||
/// The list of [`File`]s available for download sorted by filename.
|
||||
pub(crate) files: Vec<File>,
|
||||
}
|
||||
|
||||
impl SimpleDetailHTML {
|
||||
/// Parse the list of [`PypiFile`]s from the simple HTML page returned by the given URL.
|
||||
impl SimpleHtml {
|
||||
/// Parse the list of [`File`]s from the simple HTML page returned by the given URL.
|
||||
#[instrument(skip_all, fields(url = % url))]
|
||||
pub(crate) fn parse(text: &str, url: &DisplaySafeUrl) -> Result<Self, Error> {
|
||||
pub(crate) fn parse(text: &str, url: &Url) -> Result<Self, Error> {
|
||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
||||
|
||||
// Parse the first `<base>` tag, if any, to determine the base URL to which all
|
||||
// relative URLs should be resolved. The HTML spec requires that the `<base>` tag
|
||||
// appear before other tags with attribute values of URLs.
|
||||
let base = BaseUrl::from(
|
||||
let base = BaseUrl::from(DisplaySafeUrl::from(
|
||||
dom.nodes()
|
||||
.iter()
|
||||
.filter_map(|node| node.as_tag())
|
||||
|
|
@ -38,10 +38,10 @@ impl SimpleDetailHTML {
|
|||
.transpose()?
|
||||
.flatten()
|
||||
.unwrap_or_else(|| url.clone()),
|
||||
);
|
||||
));
|
||||
|
||||
// Parse each `<a>` tag, to extract the filename, hash, and URL.
|
||||
let mut files: Vec<PypiFile> = dom
|
||||
let mut files: Vec<File> = dom
|
||||
.nodes()
|
||||
.iter()
|
||||
.filter_map(|node| node.as_tag())
|
||||
|
|
@ -67,20 +67,19 @@ impl SimpleDetailHTML {
|
|||
}
|
||||
|
||||
/// Parse the `href` from a `<base>` tag.
|
||||
fn parse_base(base: &HTMLTag) -> Result<Option<DisplaySafeUrl>, Error> {
|
||||
fn parse_base(base: &HTMLTag) -> Result<Option<Url>, Error> {
|
||||
let Some(Some(href)) = base.attributes().get("href") else {
|
||||
return Ok(None);
|
||||
};
|
||||
let href = std::str::from_utf8(href.as_bytes())?;
|
||||
let url =
|
||||
DisplaySafeUrl::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
||||
let url = Url::parse(href).map_err(|err| Error::UrlParse(href.to_string(), err))?;
|
||||
Ok(Some(url))
|
||||
}
|
||||
|
||||
/// Parse a [`PypiFile`] from an `<a>` tag.
|
||||
/// Parse a [`File`] from an `<a>` tag.
|
||||
///
|
||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute.
|
||||
fn parse_anchor(link: &HTMLTag) -> Result<Option<PypiFile>, Error> {
|
||||
/// Returns `None` if the `<a>` don't doesn't have an `href` attribute.
|
||||
fn parse_anchor(link: &HTMLTag) -> Result<Option<File>, Error> {
|
||||
// Extract the href.
|
||||
let Some(href) = link
|
||||
.attributes()
|
||||
|
|
@ -213,7 +212,7 @@ impl SimpleDetailHTML {
|
|||
.map(|upload_time| html_escape::decode_html_entities(upload_time))
|
||||
.and_then(|upload_time| Timestamp::from_str(&upload_time).ok());
|
||||
|
||||
Ok(Some(PypiFile {
|
||||
Ok(Some(File {
|
||||
core_metadata,
|
||||
yanked,
|
||||
requires_python,
|
||||
|
|
@ -226,56 +225,6 @@ impl SimpleDetailHTML {
|
|||
}
|
||||
}
|
||||
|
||||
/// A parsed structure from PyPI "HTML" index format listing all available packages.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct SimpleIndexHtml {
|
||||
/// The list of project names available in the index.
|
||||
pub(crate) projects: Vec<PackageName>,
|
||||
}
|
||||
|
||||
impl SimpleIndexHtml {
|
||||
/// Parse the list of project names from the Simple API index HTML page.
|
||||
pub(crate) fn parse(text: &str) -> Result<Self, Error> {
|
||||
let dom = tl::parse(text, tl::ParserOptions::default())?;
|
||||
|
||||
// Parse each `<a>` tag to extract the project name.
|
||||
let parser = dom.parser();
|
||||
let mut projects = dom
|
||||
.nodes()
|
||||
.iter()
|
||||
.filter_map(|node| node.as_tag())
|
||||
.filter(|link| link.name().as_bytes() == b"a")
|
||||
.filter_map(|link| Self::parse_anchor_project_name(link, parser))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Sort for deterministic ordering.
|
||||
projects.sort_unstable();
|
||||
|
||||
Ok(Self { projects })
|
||||
}
|
||||
|
||||
/// Parse a project name from an `<a>` tag.
|
||||
///
|
||||
/// Returns `None` if the `<a>` doesn't have an `href` attribute or text content.
|
||||
fn parse_anchor_project_name(link: &HTMLTag, parser: &tl::Parser) -> Option<PackageName> {
|
||||
// Extract the href.
|
||||
link.attributes()
|
||||
.get("href")
|
||||
.flatten()
|
||||
.filter(|bytes| !bytes.as_bytes().is_empty())?;
|
||||
|
||||
// Extract the text content, which should be the project name.
|
||||
let inner_text = link.inner_text(parser);
|
||||
let project_name = inner_text.trim();
|
||||
|
||||
if project_name.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
PackageName::from_str(project_name).ok()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
|
|
@ -285,7 +234,7 @@ pub enum Error {
|
|||
FromUtf8(#[from] std::string::FromUtf8Error),
|
||||
|
||||
#[error("Failed to parse URL: {0}")]
|
||||
UrlParse(String, #[source] DisplaySafeUrlError),
|
||||
UrlParse(String, #[source] url::ParseError),
|
||||
|
||||
#[error(transparent)]
|
||||
HtmlParse(#[from] tl::ParseError),
|
||||
|
|
@ -325,10 +274,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -347,7 +296,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -382,10 +331,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -404,7 +353,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -442,10 +391,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -464,7 +413,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -499,10 +448,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -521,7 +470,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2+233fca715f49-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -556,10 +505,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -578,7 +527,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -613,10 +562,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -635,7 +584,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "torchtext-0.17.0+cpu-cp39-cp39-win_amd64.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -668,10 +617,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -690,7 +639,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -723,10 +672,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
";
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -761,10 +710,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -799,10 +748,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -821,7 +770,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -854,10 +803,10 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -876,7 +825,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -909,11 +858,11 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base);
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base);
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
Ok(
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -932,7 +881,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -966,11 +915,11 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base);
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base);
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
Ok(
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -989,7 +938,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -1023,8 +972,8 @@ mod tests {
|
|||
</html>
|
||||
<!--TIMESTAMP 1703347410-->
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap_err();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap_err();
|
||||
insta::assert_snapshot!(result, @"Unsupported hash algorithm (expected one of: `md5`, `sha256`, `sha384`, `sha512`, or `blake2b`) on: `blake2=6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61`");
|
||||
}
|
||||
|
||||
|
|
@ -1040,13 +989,11 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse(
|
||||
"https://storage.googleapis.com/jax-releases/jax_cuda_releases.html",
|
||||
)
|
||||
.unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://storage.googleapis.com/jax-releases/jax_cuda_releases.html")
|
||||
.unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1065,7 +1012,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -1081,7 +1028,7 @@ mod tests {
|
|||
url: "https://storage.googleapis.com/jax-releases/cuda100/jaxlib-0.1.52+cuda100-cp36-none-manylinux2010_x86_64.whl",
|
||||
yanked: None,
|
||||
},
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "jaxlib-0.1.52+cuda100-cp37-none-manylinux2010_x86_64.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -1124,11 +1071,11 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1147,7 +1094,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Flask-0.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
|
|
@ -1165,7 +1112,7 @@ mod tests {
|
|||
url: "0.1/Flask-0.1.tar.gz",
|
||||
yanked: None,
|
||||
},
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Flask-0.10.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
|
|
@ -1183,7 +1130,7 @@ mod tests {
|
|||
url: "0.10.1/Flask-0.10.1.tar.gz",
|
||||
yanked: None,
|
||||
},
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "flask-3.0.1.tar.gz",
|
||||
hashes: Hashes {
|
||||
|
|
@ -1228,10 +1175,10 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let base = Url::parse("https://download.pytorch.org/whl/jinja2/").unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1250,7 +1197,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: None,
|
||||
filename: "Jinja2-3.1.2-py3-none-any.whl",
|
||||
hashes: Hashes {
|
||||
|
|
@ -1300,11 +1247,11 @@ mod tests {
|
|||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let base = DisplaySafeUrl::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
let base = Url::parse("https://account.d.codeartifact.us-west-2.amazonaws.com/pypi/shared-packages-pypi/simple/flask/")
|
||||
.unwrap();
|
||||
let result = SimpleDetailHTML::parse(text, &base).unwrap();
|
||||
let result = SimpleHtml::parse(text, &base).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleDetailHTML {
|
||||
SimpleHtml {
|
||||
base: BaseUrl(
|
||||
DisplaySafeUrl {
|
||||
scheme: "https",
|
||||
|
|
@ -1323,7 +1270,7 @@ mod tests {
|
|||
},
|
||||
),
|
||||
files: [
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
|
|
@ -1343,7 +1290,7 @@ mod tests {
|
|||
url: "/whl/Jinja2-3.1.2-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
|
|
@ -1363,7 +1310,7 @@ mod tests {
|
|||
url: "/whl/Jinja2-3.1.3-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
false,
|
||||
|
|
@ -1383,7 +1330,7 @@ mod tests {
|
|||
url: "/whl/Jinja2-3.1.4-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
false,
|
||||
|
|
@ -1403,7 +1350,7 @@ mod tests {
|
|||
url: "/whl/Jinja2-3.1.5-py3-none-any.whl",
|
||||
yanked: None,
|
||||
},
|
||||
PypiFile {
|
||||
File {
|
||||
core_metadata: Some(
|
||||
Bool(
|
||||
true,
|
||||
|
|
@ -1427,180 +1374,4 @@ mod tests {
|
|||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test parsing Simple API index (root) HTML.
|
||||
#[test]
|
||||
fn parse_simple_index() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Simple Index</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Simple Index</h1>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
<a href="/simple/jinja2/">jinja2</a><br/>
|
||||
<a href="/simple/requests/">requests</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
PackageName(
|
||||
"jinja2",
|
||||
),
|
||||
PackageName(
|
||||
"requests",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that project names are sorted.
|
||||
#[test]
|
||||
fn parse_simple_index_sorted() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="/simple/zebra/">zebra</a><br/>
|
||||
<a href="/simple/apple/">apple</a><br/>
|
||||
<a href="/simple/monkey/">monkey</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"apple",
|
||||
),
|
||||
PackageName(
|
||||
"monkey",
|
||||
),
|
||||
PackageName(
|
||||
"zebra",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that links without `href` attributes are ignored.
|
||||
#[test]
|
||||
fn parse_simple_index_missing_href() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<h1>Simple Index</h1>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
<a>no-href-project</a><br/>
|
||||
<a href="/simple/requests/">requests</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
PackageName(
|
||||
"requests",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that links with empty `href` attributes are ignored.
|
||||
#[test]
|
||||
fn parse_simple_index_empty_href() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="">empty-href</a><br/>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test that links with empty text content are ignored.
|
||||
#[test]
|
||||
fn parse_simple_index_empty_text() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="/simple/empty/"></a><br/>
|
||||
<a href="/simple/flask/">flask</a><br/>
|
||||
<a href="/simple/whitespace/"> </a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
/// Test parsing with case variations and normalization.
|
||||
#[test]
|
||||
fn parse_simple_index_case_variations() {
|
||||
let text = r#"
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<body>
|
||||
<a href="/simple/Flask/">Flask</a><br/>
|
||||
<a href="/simple/django/">django</a><br/>
|
||||
<a href="/simple/PyYAML/">PyYAML</a><br/>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let result = SimpleIndexHtml::parse(text).unwrap();
|
||||
// Note: We preserve the case as returned by the server
|
||||
insta::assert_debug_snapshot!(result, @r#"
|
||||
SimpleIndexHtml {
|
||||
projects: [
|
||||
PackageName(
|
||||
"django",
|
||||
),
|
||||
PackageName(
|
||||
"flask",
|
||||
),
|
||||
PackageName(
|
||||
"pyyaml",
|
||||
),
|
||||
],
|
||||
}
|
||||
"#);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -179,7 +179,7 @@ struct CacheControlParser<'b, I> {
|
|||
impl<'b, B: 'b + ?Sized + AsRef<[u8]>, I: Iterator<Item = &'b B>> CacheControlParser<'b, I> {
|
||||
/// Create a new parser of zero or more `Cache-Control` header values. The
|
||||
/// given iterator should yield elements that satisfy `AsRef<[u8]>`.
|
||||
fn new<II: IntoIterator<IntoIter = I>>(headers: II) -> Self {
|
||||
fn new<II: IntoIterator<IntoIter = I>>(headers: II) -> CacheControlParser<'b, I> {
|
||||
let mut directives = headers.into_iter();
|
||||
let cur = directives.next().map(AsRef::as_ref).unwrap_or(b"");
|
||||
CacheControlParser {
|
||||
|
|
|
|||
|
|
@ -563,7 +563,7 @@ impl ArchivedCachePolicy {
|
|||
ArchivedMethod::Get | ArchivedMethod::Head
|
||||
) {
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because of the request method {:?}",
|
||||
"Cached request {} is not storable because of its method {:?}",
|
||||
self.request.uri,
|
||||
self.request.method
|
||||
);
|
||||
|
|
@ -575,8 +575,8 @@ impl ArchivedCachePolicy {
|
|||
// below, but we can bail out early here.
|
||||
if !self.response.has_final_status() {
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because it has \
|
||||
a non-final status code {:?}",
|
||||
"Cached request {} is not storable because its response has \
|
||||
non-final status code {:?}",
|
||||
self.request.uri,
|
||||
self.response.status,
|
||||
);
|
||||
|
|
@ -591,8 +591,8 @@ impl ArchivedCachePolicy {
|
|||
// itself.
|
||||
if self.response.status == 206 || self.response.status == 304 {
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because it has \
|
||||
an unsupported status code {:?}",
|
||||
"Cached request {} is not storable because its response has \
|
||||
unsupported status code {:?}",
|
||||
self.request.uri,
|
||||
self.response.status,
|
||||
);
|
||||
|
|
@ -605,7 +605,7 @@ impl ArchivedCachePolicy {
|
|||
// S3.)
|
||||
if self.request.headers.cc.no_store {
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because its request has \
|
||||
"Cached request {} is not storable because its request has \
|
||||
a 'no-store' cache-control directive",
|
||||
self.request.uri,
|
||||
);
|
||||
|
|
@ -614,7 +614,7 @@ impl ArchivedCachePolicy {
|
|||
// "the no-store cache directive is not present in the response"
|
||||
if self.response.headers.cc.no_store {
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because it has \
|
||||
"Cached request {} is not storable because its response has \
|
||||
a 'no-store' cache-control directive",
|
||||
self.request.uri,
|
||||
);
|
||||
|
|
@ -631,8 +631,8 @@ impl ArchivedCachePolicy {
|
|||
// private).
|
||||
if self.response.headers.cc.private {
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because this is a shared \
|
||||
cache and has a 'private' cache-control directive",
|
||||
"Cached request {} is not storable because this is a shared \
|
||||
cache and its response has a 'private' cache-control directive",
|
||||
self.request.uri,
|
||||
);
|
||||
return false;
|
||||
|
|
@ -642,7 +642,7 @@ impl ArchivedCachePolicy {
|
|||
// explicitly allows shared caching"
|
||||
if self.request.headers.authorization && !self.allows_authorization_storage() {
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because this is a shared \
|
||||
"Cached request {} is not storable because this is a shared \
|
||||
cache and the request has an 'Authorization' header set and \
|
||||
the response has indicated that caching requests with an \
|
||||
'Authorization' header is allowed",
|
||||
|
|
@ -657,7 +657,7 @@ impl ArchivedCachePolicy {
|
|||
// "a public response directive"
|
||||
if self.response.headers.cc.public {
|
||||
tracing::trace!(
|
||||
"Response from {} is storable because it has \
|
||||
"Cached request {} is storable because its response has \
|
||||
a 'public' cache-control directive",
|
||||
self.request.uri,
|
||||
);
|
||||
|
|
@ -666,8 +666,8 @@ impl ArchivedCachePolicy {
|
|||
// "a private response directive, if the cache is not shared"
|
||||
if !self.config.shared && self.response.headers.cc.private {
|
||||
tracing::trace!(
|
||||
"Response from {} is storable because this is a shared cache \
|
||||
and has a 'private' cache-control directive",
|
||||
"Cached request {} is storable because this is a shared cache \
|
||||
and its response has a 'private' cache-control directive",
|
||||
self.request.uri,
|
||||
);
|
||||
return true;
|
||||
|
|
@ -675,7 +675,7 @@ impl ArchivedCachePolicy {
|
|||
// "an Expires header field"
|
||||
if self.response.headers.expires_unix_timestamp.is_some() {
|
||||
tracing::trace!(
|
||||
"Response from {} is storable because it has an \
|
||||
"Cached request {} is storable because its response has an \
|
||||
'Expires' header set",
|
||||
self.request.uri,
|
||||
);
|
||||
|
|
@ -684,7 +684,7 @@ impl ArchivedCachePolicy {
|
|||
// "a max-age response directive"
|
||||
if self.response.headers.cc.max_age_seconds.is_some() {
|
||||
tracing::trace!(
|
||||
"Response from {} is storable because it has an \
|
||||
"Cached request {} is storable because its response has an \
|
||||
'max-age' cache-control directive",
|
||||
self.request.uri,
|
||||
);
|
||||
|
|
@ -693,8 +693,8 @@ impl ArchivedCachePolicy {
|
|||
// "if the cache is shared: an s-maxage response directive"
|
||||
if self.config.shared && self.response.headers.cc.s_maxage_seconds.is_some() {
|
||||
tracing::trace!(
|
||||
"Response from {} is storable because this is a shared cache \
|
||||
and has a 's-maxage' cache-control directive",
|
||||
"Cached request {} is storable because this is a shared cache \
|
||||
and its response has a 's-maxage' cache-control directive",
|
||||
self.request.uri,
|
||||
);
|
||||
return true;
|
||||
|
|
@ -705,7 +705,7 @@ impl ArchivedCachePolicy {
|
|||
// "a status code that is defined as heuristically cacheable"
|
||||
if HEURISTICALLY_CACHEABLE_STATUS_CODES.contains(&self.response.status.into()) {
|
||||
tracing::trace!(
|
||||
"Response from {} is storable because it has a \
|
||||
"Cached request {} is storable because its response has a \
|
||||
heuristically cacheable status code {:?}",
|
||||
self.request.uri,
|
||||
self.response.status,
|
||||
|
|
@ -713,7 +713,7 @@ impl ArchivedCachePolicy {
|
|||
return true;
|
||||
}
|
||||
tracing::trace!(
|
||||
"Response from {} is not storable because it does not meet any \
|
||||
"Cached response {} is not storable because it does not meet any \
|
||||
of the necessary criteria (e.g., it doesn't have an 'Expires' \
|
||||
header set or a 'max-age' cache-control directive)",
|
||||
self.request.uri,
|
||||
|
|
@ -766,7 +766,7 @@ impl ArchivedCachePolicy {
|
|||
// [RFC 9111 S5.2.1.4]: https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.1.4
|
||||
if reqcc.no_cache {
|
||||
tracing::trace!(
|
||||
"Request to {} does not have a fresh cache entry because \
|
||||
"Request {} does not have a fresh cache because \
|
||||
it has a 'no-cache' cache-control directive",
|
||||
request.url(),
|
||||
);
|
||||
|
|
@ -780,7 +780,7 @@ impl ArchivedCachePolicy {
|
|||
if let Some(&max_age) = reqcc.max_age_seconds.as_ref() {
|
||||
if age > max_age {
|
||||
tracing::trace!(
|
||||
"Request to {} does not have a fresh cache entry because \
|
||||
"Request {} does not have a fresh cache because \
|
||||
the cached response's age is {} seconds and the max age \
|
||||
allowed by the request is {} seconds",
|
||||
request.url(),
|
||||
|
|
@ -800,7 +800,7 @@ impl ArchivedCachePolicy {
|
|||
let time_to_live = freshness_lifetime.saturating_sub(unix_timestamp(now));
|
||||
if time_to_live < min_fresh {
|
||||
tracing::trace!(
|
||||
"Request to {} does not have a fresh cache entry because \
|
||||
"Request {} does not have a fresh cache because \
|
||||
the request set a 'min-fresh' cache-control directive, \
|
||||
and its time-to-live is {} seconds but it needs to be \
|
||||
at least {} seconds",
|
||||
|
|
@ -818,7 +818,7 @@ impl ArchivedCachePolicy {
|
|||
let allows_stale = self.allows_stale(now);
|
||||
if !allows_stale {
|
||||
tracing::trace!(
|
||||
"Request to {} does not have a fresh cache entry because \
|
||||
"Request {} does not have a fresh cache because \
|
||||
its age is {} seconds, it is greater than the freshness \
|
||||
lifetime of {} seconds and stale cached responses are not \
|
||||
allowed",
|
||||
|
|
@ -846,7 +846,7 @@ impl ArchivedCachePolicy {
|
|||
// [RFC 9111 S5.2.2.2]: https://www.rfc-editor.org/rfc/rfc9111.html#section-5.2.2.2
|
||||
if self.response.headers.cc.must_revalidate {
|
||||
tracing::trace!(
|
||||
"Request to {} has a cached response that does not \
|
||||
"Cached request {} has a cached response that does not \
|
||||
permit staleness because the response has a 'must-revalidate' \
|
||||
cache-control directive set",
|
||||
self.request.uri,
|
||||
|
|
@ -865,7 +865,7 @@ impl ArchivedCachePolicy {
|
|||
.saturating_sub(self.freshness_lifetime().as_secs());
|
||||
if stale_amount <= max_stale.into() {
|
||||
tracing::trace!(
|
||||
"Request to {} has a cached response that allows staleness \
|
||||
"Cached request {} has a cached response that allows staleness \
|
||||
in this case because the stale amount is {} seconds and the \
|
||||
'max-stale' cache-control directive set by the cached request \
|
||||
is {} seconds",
|
||||
|
|
@ -885,7 +885,7 @@ impl ArchivedCachePolicy {
|
|||
//
|
||||
// [RFC 9111 S4.2.4]: https://www.rfc-editor.org/rfc/rfc9111.html#section-4.2.4
|
||||
tracing::trace!(
|
||||
"Request to {} has a cached response that does not allow staleness",
|
||||
"Cached request {} has a cached response that does not allow staleness",
|
||||
self.request.uri,
|
||||
);
|
||||
false
|
||||
|
|
|
|||
|
|
@ -1,15 +1,14 @@
|
|||
pub use base_client::{
|
||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_MAX_REDIRECTS, DEFAULT_RETRIES,
|
||||
ExtraMiddleware, RedirectClientWithMiddleware, RedirectPolicy, RequestBuilder,
|
||||
RetryParsingError, UvRetryableStrategy, is_transient_network_error,
|
||||
AuthIntegration, BaseClient, BaseClientBuilder, DEFAULT_RETRIES, ExtraMiddleware,
|
||||
RedirectClientWithMiddleware, RequestBuilder, UvRetryableStrategy, is_extended_transient_error,
|
||||
};
|
||||
pub use cached_client::{CacheControl, CachedClient, CachedClientError, DataWithCachePolicy};
|
||||
pub use error::{Error, ErrorKind, WrappedReqwestError};
|
||||
pub use flat_index::{FlatIndexClient, FlatIndexEntries, FlatIndexEntry, FlatIndexError};
|
||||
pub use linehaul::LineHaul;
|
||||
pub use registry_client::{
|
||||
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleDetailMetadata,
|
||||
SimpleDetailMetadatum, SimpleIndexMetadata, VersionFiles,
|
||||
Connectivity, MetadataFormat, RegistryClient, RegistryClientBuilder, SimpleMetadata,
|
||||
SimpleMetadatum, VersionFiles,
|
||||
};
|
||||
pub use rkyvutil::{Deserializer, OwnedArchive, Serializer, Validator};
|
||||
|
||||
|
|
|
|||
|
|
@ -5,14 +5,12 @@ use tracing::instrument;
|
|||
|
||||
use uv_pep508::MarkerEnvironment;
|
||||
use uv_platform_tags::{Os, Platform};
|
||||
use uv_static::EnvVars;
|
||||
use uv_version::version;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
||||
pub struct Installer {
|
||||
pub name: Option<String>,
|
||||
pub version: Option<String>,
|
||||
pub subcommand: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
|
||||
|
|
@ -64,20 +62,11 @@ pub struct LineHaul {
|
|||
impl LineHaul {
|
||||
/// Initializes Linehaul information based on PEP 508 markers.
|
||||
#[instrument(name = "linehaul", skip_all)]
|
||||
pub fn new(
|
||||
markers: Option<&MarkerEnvironment>,
|
||||
platform: Option<&Platform>,
|
||||
subcommand: Option<Vec<String>>,
|
||||
) -> Self {
|
||||
pub fn new(markers: &MarkerEnvironment, platform: Option<&Platform>) -> Self {
|
||||
// https://github.com/pypa/pip/blob/24.0/src/pip/_internal/network/session.py#L87
|
||||
let looks_like_ci = [
|
||||
EnvVars::BUILD_BUILDID,
|
||||
EnvVars::BUILD_ID,
|
||||
EnvVars::CI,
|
||||
EnvVars::PIP_IS_CI,
|
||||
]
|
||||
.iter()
|
||||
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
||||
let looks_like_ci = ["BUILD_BUILDID", "BUILD_ID", "CI", "PIP_IS_CI"]
|
||||
.iter()
|
||||
.find_map(|&var_name| env::var(var_name).ok().map(|_| true));
|
||||
|
||||
let libc = match platform.map(Platform::os) {
|
||||
Some(Os::Manylinux { major, minor }) => Some(Libc {
|
||||
|
|
@ -128,19 +117,18 @@ impl LineHaul {
|
|||
installer: Option::from(Installer {
|
||||
name: Some("uv".to_string()),
|
||||
version: Some(version().to_string()),
|
||||
subcommand,
|
||||
}),
|
||||
python: markers.map(|markers| markers.python_full_version().version.to_string()),
|
||||
python: Some(markers.python_full_version().version.to_string()),
|
||||
implementation: Option::from(Implementation {
|
||||
name: markers.map(|markers| markers.platform_python_implementation().to_string()),
|
||||
version: markers.map(|markers| markers.python_full_version().version.to_string()),
|
||||
name: Some(markers.platform_python_implementation().to_string()),
|
||||
version: Some(markers.python_full_version().version.to_string()),
|
||||
}),
|
||||
distro,
|
||||
system: Option::from(System {
|
||||
name: markers.map(|markers| markers.platform_system().to_string()),
|
||||
release: markers.map(|markers| markers.platform_release().to_string()),
|
||||
name: Some(markers.platform_system().to_string()),
|
||||
release: Some(markers.platform_release().to_string()),
|
||||
}),
|
||||
cpu: markers.map(|markers| markers.platform_machine().to_string()),
|
||||
cpu: Some(markers.platform_machine().to_string()),
|
||||
// Should probably always be None in uv.
|
||||
openssl_version: None,
|
||||
// Should probably always be None in uv.
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue