mirror of https://github.com/astral-sh/uv
Compare commits
No commits in common. "main" and "0.1.4" have entirely different histories.
|
|
@ -1,9 +0,0 @@
|
|||
[alias]
|
||||
dev = "run --package uv-dev --features dev"
|
||||
|
||||
# statically link the C runtime so the executable does not depend on
|
||||
# that shared/dynamic library.
|
||||
#
|
||||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env = "msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
|
@ -1,81 +0,0 @@
|
|||
# /// script
|
||||
# requires-python = ">=3.12"
|
||||
# dependencies = []
|
||||
# ///
|
||||
|
||||
"""Post-edit hook to auto-format files after Claude edits."""
|
||||
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def format_rust(file_path: str, cwd: str) -> None:
|
||||
"""Format Rust files with cargo fmt."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["cargo", "fmt", "--", file_path],
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def format_python(file_path: str, cwd: str) -> None:
|
||||
"""Format Python files with ruff."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["uvx", "ruff", "format", file_path],
|
||||
cwd=cwd,
|
||||
capture_output=True,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def format_prettier(file_path: str, cwd: str, prose_wrap: bool = False) -> None:
|
||||
"""Format files with prettier."""
|
||||
args = ["npx", "prettier", "--write"]
|
||||
if prose_wrap:
|
||||
args.extend(["--prose-wrap", "always"])
|
||||
args.append(file_path)
|
||||
try:
|
||||
subprocess.run(args, cwd=cwd, capture_output=True)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def main() -> None:
|
||||
import os
|
||||
|
||||
input_data = json.load(sys.stdin)
|
||||
|
||||
tool_name = input_data.get("tool_name")
|
||||
tool_input = input_data.get("tool_input", {})
|
||||
file_path = tool_input.get("file_path")
|
||||
|
||||
# Only process Write, Edit, and MultiEdit tools
|
||||
if tool_name not in ("Write", "Edit", "MultiEdit"):
|
||||
return
|
||||
|
||||
if not file_path:
|
||||
return
|
||||
|
||||
cwd = os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
||||
path = Path(file_path)
|
||||
ext = path.suffix
|
||||
|
||||
if ext == ".rs":
|
||||
format_rust(file_path, cwd)
|
||||
elif ext in (".py", ".pyi"):
|
||||
format_python(file_path, cwd)
|
||||
elif ext in (".json5", ".yaml", ".yml"):
|
||||
format_prettier(file_path, cwd)
|
||||
elif ext == ".md":
|
||||
format_prettier(file_path, cwd, prose_wrap=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"hooks": {
|
||||
"PostToolUse": [
|
||||
{
|
||||
"matcher": "Edit|Write|MultiEdit",
|
||||
"hooks": [
|
||||
{
|
||||
"type": "command",
|
||||
"command": "uv run .claude/hooks/post-edit-format.py"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +1,3 @@
|
|||
[profile.default]
|
||||
# Mark tests that take longer than 10s as slow.
|
||||
# Terminate after 120s as a stop-gap measure to terminate on deadlock.
|
||||
slow-timeout = { period = "10s", terminate-after = 12 }
|
||||
|
||||
[test-groups]
|
||||
serial = { max-threads = 1 }
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'test(native_auth)'
|
||||
test-group = 'serial'
|
||||
|
||||
[[profile.default.overrides]]
|
||||
filter = 'package(uv-keyring)'
|
||||
test-group = 'serial'
|
||||
# Mark tests that take longer than 10s as slow
|
||||
slow-timeout = "10s"
|
||||
|
|
|
|||
|
|
@ -16,8 +16,5 @@ indent_size = 4
|
|||
[*.snap]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[crates/uv/tests/help.rs]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
|
|
|||
|
|
@ -1,4 +1 @@
|
|||
* text=auto eol=lf
|
||||
*.inc linguist-language=Rust
|
||||
|
||||
uv.schema.json linguist-generated=true text=auto eol=lf
|
||||
|
|
|
|||
|
|
@ -0,0 +1,10 @@
|
|||
<!--
|
||||
Thank you for taking the time to report an issue! We're glad to have you involved with uv.
|
||||
|
||||
If you're filing a bug report, please consider including the following information:
|
||||
|
||||
* A minimal code snippet that reproduces the bug.
|
||||
* The command you invoked (e.g., `uv pip sync requirements.txt`), ideally including the `--verbose` flag.
|
||||
* The current uv platform.
|
||||
* The current uv version (`uv --version`).
|
||||
-->
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
name: Bug report
|
||||
description: Report an error or unexpected behavior
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Please review [our guide on interacting with the issue tracker](https://github.com/astral-sh/uv/issues/9452) before opening a new issue.**
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of the bug, including [a minimal reproducible example](https://docs.astral.sh/uv/reference/troubleshooting/reproducible-examples/).
|
||||
If we cannot reproduce the bug, it is unlikely that we will be able to help you.
|
||||
|
||||
Please include the full output of uv with the complete error message.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Platform
|
||||
description: What operating system and architecture are you using? (see `uname -orsm`)
|
||||
placeholder: e.g., macOS 14 arm64, Windows 11 x86_64, Ubuntu 20.04 amd64
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of uv are you using? (see `uv self version`)
|
||||
placeholder: e.g., uv 0.5.20 (1c17662b3 2025-01-15)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Python version
|
||||
description: What version of Python are you using? (see `uv run python --version`)
|
||||
placeholder: e.g., Python 3.12.6
|
||||
validations:
|
||||
required: false
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
name: Feature request
|
||||
description: Suggest a new feature or improvement
|
||||
labels: ["enhancement"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Please review [our guide on interacting with the issue tracker](https://github.com/astral-sh/uv/issues/9452) before opening a new issue.**
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: |
|
||||
A clear and concise description of what new feature or behavior you would like to see. If applicable, please describe the current behavior as well.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Example
|
||||
description: Provide an example of how the user experience would change or how the new feature would be used.
|
||||
validations:
|
||||
required: false
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
name: Question
|
||||
description: Ask a question about uv
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Please review [our guide on interacting with the issue tracker](https://github.com/astral-sh/uv/issues/9452) before opening a new issue.**
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Question
|
||||
description: Describe your question in detail.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Platform
|
||||
description: What operating system and architecture are you using? (see `uname -orsm`)
|
||||
placeholder: e.g., macOS 14 arm64, Windows 11 x86_64, Ubuntu 20.04 amd64
|
||||
validations:
|
||||
required: false
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: Version
|
||||
description: What version of uv are you using? (see `uv self version`)
|
||||
placeholder: e.g., uv 0.5.20 (1c17662b3 2025-01-15)
|
||||
validations:
|
||||
required: false
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Documentation
|
||||
url: https://docs.astral.sh/uv
|
||||
about: Please consult the documentation before creating an issue.
|
||||
- name: Community
|
||||
url: https://discord.com/invite/astral-sh
|
||||
about: Join our Discord community to ask questions and collaborate.
|
||||
|
|
@ -1,148 +0,0 @@
|
|||
{
|
||||
$schema: "https://docs.renovatebot.com/renovate-schema.json",
|
||||
dependencyDashboard: true,
|
||||
suppressNotifications: ["prEditedNotification"],
|
||||
extends: [
|
||||
"github>astral-sh/renovate-config",
|
||||
// For tool versions defined in GitHub Actions:
|
||||
"customManagers:githubActionsVersions",
|
||||
],
|
||||
labels: ["internal"],
|
||||
schedule: ["* 0-3 * * 1"],
|
||||
semanticCommits: "disabled",
|
||||
separateMajorMinor: false,
|
||||
enabledManagers: ["github-actions", "pre-commit", "cargo", "custom.regex"],
|
||||
cargo: {
|
||||
// See https://docs.renovatebot.com/configuration-options/#rangestrategy
|
||||
rangeStrategy: "update-lockfile",
|
||||
managerFilePatterns: ["/^Cargo\\.toml$/", "/^crates/.*Cargo\\.toml$/"],
|
||||
},
|
||||
"pre-commit": {
|
||||
enabled: true,
|
||||
},
|
||||
packageRules: [
|
||||
// Pin GitHub Actions to immutable SHAs.
|
||||
{
|
||||
matchDepTypes: ["action"],
|
||||
pinDigests: true,
|
||||
},
|
||||
// Annotate GitHub Actions SHAs with a SemVer version.
|
||||
{
|
||||
extends: ["helpers:pinGitHubActionDigests"],
|
||||
extractVersion: "^(?<version>v?\\d+\\.\\d+\\.\\d+)$",
|
||||
versioning: "regex:^v?(?<major>\\d+)(\\.(?<minor>\\d+)\\.(?<patch>\\d+))?$",
|
||||
},
|
||||
{
|
||||
// Disable updates of `zip-rs`; intentionally pinned for now due to ownership change
|
||||
// See: https://github.com/astral-sh/uv/issues/3642
|
||||
matchPackageNames: ["/zip/"],
|
||||
matchManagers: ["cargo"],
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
// Create dedicated branches to update references to dependencies in the documentation.
|
||||
matchFileNames: ["docs/**/*.md"],
|
||||
commitMessageTopic: "documentation references to {{{depName}}}",
|
||||
semanticCommitType: "docs",
|
||||
semanticCommitScope: null,
|
||||
additionalBranchPrefix: "docs-",
|
||||
},
|
||||
{
|
||||
// Group upload/download artifact updates, the versions are dependent
|
||||
groupName: "Artifact GitHub Actions dependencies",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["gitea-tags", "github-tags"],
|
||||
matchPackageNames: ["/actions/.*-artifact/"],
|
||||
description: "Weekly update of artifact-related GitHub Actions dependencies",
|
||||
},
|
||||
{
|
||||
// This package rule disables updates for GitHub runners:
|
||||
// we'd only pin them to a specific version
|
||||
// if there was a deliberate reason to do so
|
||||
groupName: "GitHub runners",
|
||||
matchManagers: ["github-actions"],
|
||||
matchDatasources: ["github-runners"],
|
||||
description: "Disable PRs updating GitHub runners (e.g. 'runs-on: macos-14')",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pre-commit dependencies",
|
||||
matchManagers: ["pre-commit"],
|
||||
description: "Weekly update of pre-commit dependencies",
|
||||
},
|
||||
{
|
||||
groupName: "Rust dev-dependencies",
|
||||
matchManagers: ["cargo"],
|
||||
matchDepTypes: ["devDependencies"],
|
||||
description: "Weekly update of Rust development dependencies",
|
||||
},
|
||||
{
|
||||
// We don't really use PyO3 in this project; it's pulled in as an optional feature
|
||||
// of the PEP 440 and PEP 508 crates, which we vendored and forked.
|
||||
groupName: "pyo3",
|
||||
matchManagers: ["cargo"],
|
||||
matchPackageNames: ["/pyo3/"],
|
||||
description: "Weekly update of pyo3 dependencies",
|
||||
enabled: false,
|
||||
},
|
||||
{
|
||||
groupName: "pubgrub",
|
||||
matchManagers: ["cargo"],
|
||||
matchDepNames: ["pubgrub", "version-ranges"],
|
||||
description: "version-ranges and pubgrub are in the same Git repository",
|
||||
},
|
||||
{
|
||||
commitMessageTopic: "MSRV",
|
||||
matchManagers: ["custom.regex"],
|
||||
matchDepNames: ["msrv"],
|
||||
// We have a rolling support policy for the MSRV
|
||||
// 2 releases back * 6 weeks per release * 7 days per week + 1
|
||||
minimumReleaseAge: "85 days",
|
||||
internalChecksFilter: "strict",
|
||||
groupName: "MSRV",
|
||||
},
|
||||
{
|
||||
matchManagers: ["custom.regex"],
|
||||
matchDepNames: ["rust"],
|
||||
commitMessageTopic: "Rust",
|
||||
},
|
||||
],
|
||||
customManagers: [
|
||||
// Update major GitHub actions references in documentation.
|
||||
{
|
||||
customType: "regex",
|
||||
managerFilePatterns: ["/^docs/.*\\.md$/"],
|
||||
matchStrings: [
|
||||
"\\suses: (?<depName>[\\w-]+/[\\w-]+)(?<path>/.*)?@(?<currentValue>.+?)\\s",
|
||||
],
|
||||
datasourceTemplate: "github-tags",
|
||||
versioningTemplate: "regex:^v(?<major>\\d+)$",
|
||||
},
|
||||
// Minimum supported Rust toolchain version
|
||||
{
|
||||
customType: "regex",
|
||||
managerFilePatterns: ["/(^|/)Cargo\\.toml?$/"],
|
||||
matchStrings: [
|
||||
'rust-version\\s*=\\s*"(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)"',
|
||||
],
|
||||
depNameTemplate: "msrv",
|
||||
packageNameTemplate: "rust-lang/rust",
|
||||
datasourceTemplate: "github-releases",
|
||||
},
|
||||
// Rust toolchain version
|
||||
{
|
||||
customType: "regex",
|
||||
managerFilePatterns: ["/(^|/)rust-toolchain\\.toml?$/"],
|
||||
matchStrings: [
|
||||
'channel\\s*=\\s*"(?<currentValue>\\d+\\.\\d+(\\.\\d+)?)"',
|
||||
],
|
||||
depNameTemplate: "rust",
|
||||
packageNameTemplate: "rust-lang/rust",
|
||||
datasourceTemplate: "github-releases",
|
||||
},
|
||||
],
|
||||
vulnerabilityAlerts: {
|
||||
commitMessageSuffix: "",
|
||||
labels: ["internal", "security"],
|
||||
},
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,19 +1,11 @@
|
|||
# Build and publish Docker images.
|
||||
# Build and publish a Docker image.
|
||||
#
|
||||
# Uses Depot for multi-platform builds. Includes both a `uv` base image, which
|
||||
# is just the binary in a scratch image, and a set of extra, common images with
|
||||
# the uv binary installed.
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a local
|
||||
# artifacts job within `cargo-dist`.
|
||||
#
|
||||
# Images are built on all runs.
|
||||
#
|
||||
# On release, assumed to run as a subworkflow of .github/workflows/release.yml;
|
||||
# specifically, as a local artifacts job within `cargo-dist`. In this case,
|
||||
# images are published based on the `plan`.
|
||||
#
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within
|
||||
# `cargo-dist`, but sharing the built image as an artifact between jobs is
|
||||
# challenging.
|
||||
name: "Docker images"
|
||||
# TODO(charlie): Ideally, the publish step would happen as a publish job within `cargo-dist`, but
|
||||
# sharing the built image as an artifact between jobs is challenging.
|
||||
name: "Build Docker image"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
|
@ -23,413 +15,54 @@ on:
|
|||
type: string
|
||||
pull_request:
|
||||
paths:
|
||||
# We want to ensure that the maturin builds still work when we change
|
||||
# Project metadata
|
||||
- pyproject.toml
|
||||
- Cargo.toml
|
||||
- .cargo/config.toml
|
||||
# Toolchain or dependency versions
|
||||
- Cargo.lock
|
||||
- rust-toolchain.toml
|
||||
# The Dockerfile itself
|
||||
- Dockerfile
|
||||
# And the workflow itself
|
||||
- .github/workflows/build-docker.yml
|
||||
|
||||
env:
|
||||
UV_GHCR_IMAGE: ghcr.io/${{ github.repository_owner }}/uv
|
||||
UV_DOCKERHUB_IMAGE: docker.io/astral/uv
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
docker-plan:
|
||||
name: plan
|
||||
docker-publish:
|
||||
name: Build Docker image (ghcr.io/astral-sh/uv)
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
login: ${{ steps.plan.outputs.login }}
|
||||
push: ${{ steps.plan.outputs.push }}
|
||||
tag: ${{ steps.plan.outputs.tag }}
|
||||
action: ${{ steps.plan.outputs.action }}
|
||||
steps:
|
||||
- name: Set push variable
|
||||
env:
|
||||
DRY_RUN: ${{ inputs.plan == '' || fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
TAG: ${{ inputs.plan != '' && fromJson(inputs.plan).announcement_tag }}
|
||||
IS_LOCAL_PR: ${{ github.event.pull_request.head.repo.full_name == 'astral-sh/uv' }}
|
||||
id: plan
|
||||
run: |
|
||||
if [ "${DRY_RUN}" == "false" ]; then
|
||||
echo "login=true" >> "$GITHUB_OUTPUT"
|
||||
echo "push=true" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build and publish" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "login=${IS_LOCAL_PR}" >> "$GITHUB_OUTPUT"
|
||||
echo "push=false" >> "$GITHUB_OUTPUT"
|
||||
echo "tag=dry-run" >> "$GITHUB_OUTPUT"
|
||||
echo "action=build" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
docker-publish-base:
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-build') }}
|
||||
name: ${{ needs.docker-plan.outputs.action }} uv
|
||||
needs:
|
||||
- docker-plan
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # for Depot OIDC and GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
environment:
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
outputs:
|
||||
image-tags: ${{ steps.meta.outputs.tags }}
|
||||
image-annotations: ${{ steps.meta.outputs.annotations }}
|
||||
image-digest: ${{ steps.build.outputs.digest }}
|
||||
image-version: ${{ steps.meta.outputs.version }}
|
||||
name: release
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
persist-credentials: false
|
||||
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
- uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/astral-sh/uv
|
||||
|
||||
- name: Check tag consistency
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
run: |
|
||||
version=$(grep "version = " pyproject.toml | sed -e 's/version = "\(.*\)"/\1/g')
|
||||
if [ "${TAG}" != "${version}" ]; then
|
||||
if [ "${{ fromJson(inputs.plan).announcement_tag }}" != "${version}" ]; then
|
||||
echo "The input tag does not match the version from pyproject.toml:" >&2
|
||||
echo "${TAG}" >&2
|
||||
echo "${{ fromJson(inputs.plan).announcement_tag }}" >&2
|
||||
echo "${version}" >&2
|
||||
exit 1
|
||||
else
|
||||
echo "Releasing ${version}"
|
||||
fi
|
||||
env:
|
||||
TAG: ${{ needs.docker-plan.outputs.tag }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: |
|
||||
${{ env.UV_GHCR_IMAGE }}
|
||||
${{ env.UV_DOCKERHUB_IMAGE }}
|
||||
# Defining this makes sure the org.opencontainers.image.version OCI label becomes the actual release version and not the branch name
|
||||
tags: |
|
||||
type=raw,value=dry-run,enable=${{ needs.docker-plan.outputs.push == 'false' }}
|
||||
type=pep440,pattern={{ version }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }}
|
||||
type=pep440,pattern={{ major }}.{{ minor }},value=${{ needs.docker-plan.outputs.tag }},enable=${{ needs.docker-plan.outputs.push }}
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # v1.16.2
|
||||
with:
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ needs.docker-plan.outputs.push }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
# TODO(zanieb): Annotations are not supported by Depot yet and are ignored
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
- name: Generate artifact attestation for base image
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.build.outputs.digest }}
|
||||
|
||||
docker-publish-extra:
|
||||
name: ${{ needs.docker-plan.outputs.action }} ${{ matrix.image-mapping }}
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
needs:
|
||||
- docker-plan
|
||||
- docker-publish-base
|
||||
permissions:
|
||||
id-token: write # for Depot OIDC and GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Mapping of base image followed by a comma followed by one or more base tags (comma separated)
|
||||
# Note, org.opencontainers.image.version label will use the first base tag (use the most specific tag first)
|
||||
image-mapping:
|
||||
- alpine:3.22,alpine3.22,alpine
|
||||
- alpine:3.21,alpine3.21
|
||||
- debian:trixie-slim,trixie-slim,debian-slim
|
||||
- buildpack-deps:trixie,trixie,debian
|
||||
- debian:bookworm-slim,bookworm-slim
|
||||
- buildpack-deps:bookworm,bookworm
|
||||
- python:3.14-alpine3.23,python3.14-alpine3.23,python3.14-alpine
|
||||
- python:3.13-alpine3.23,python3.13-alpine3.23,python3.13-alpine
|
||||
- python:3.12-alpine3.23,python3.12-alpine3.23,python3.12-alpine
|
||||
- python:3.11-alpine3.23,python3.11-alpine3.23,python3.11-alpine
|
||||
- python:3.10-alpine3.23,python3.10-alpine3.23,python3.10-alpine
|
||||
- python:3.9-alpine3.22,python3.9-alpine3.22,python3.9-alpine
|
||||
- python:3.8-alpine3.20,python3.8-alpine3.20,python3.8-alpine
|
||||
- python:3.14-trixie,python3.14-trixie
|
||||
- python:3.13-trixie,python3.13-trixie
|
||||
- python:3.12-trixie,python3.12-trixie
|
||||
- python:3.11-trixie,python3.11-trixie
|
||||
- python:3.10-trixie,python3.10-trixie
|
||||
- python:3.9-trixie,python3.9-trixie
|
||||
- python:3.14-slim-trixie,python3.14-trixie-slim
|
||||
- python:3.13-slim-trixie,python3.13-trixie-slim
|
||||
- python:3.12-slim-trixie,python3.12-trixie-slim
|
||||
- python:3.11-slim-trixie,python3.11-trixie-slim
|
||||
- python:3.10-slim-trixie,python3.10-trixie-slim
|
||||
- python:3.9-slim-trixie,python3.9-trixie-slim
|
||||
- python:3.14-bookworm,python3.14-bookworm
|
||||
- python:3.13-bookworm,python3.13-bookworm
|
||||
- python:3.12-bookworm,python3.12-bookworm
|
||||
- python:3.11-bookworm,python3.11-bookworm
|
||||
- python:3.10-bookworm,python3.10-bookworm
|
||||
- python:3.9-bookworm,python3.9-bookworm
|
||||
- python:3.8-bookworm,python3.8-bookworm
|
||||
- python:3.14-slim-bookworm,python3.14-bookworm-slim
|
||||
- python:3.13-slim-bookworm,python3.13-bookworm-slim
|
||||
- python:3.12-slim-bookworm,python3.12-bookworm-slim
|
||||
- python:3.11-slim-bookworm,python3.11-bookworm-slim
|
||||
- python:3.10-slim-bookworm,python3.10-bookworm-slim
|
||||
- python:3.9-slim-bookworm,python3.9-bookworm-slim
|
||||
- python:3.8-slim-bookworm,python3.8-bookworm-slim
|
||||
steps:
|
||||
# Login to DockerHub (when not pushing, it's to avoid rate-limiting)
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
if: ${{ needs.docker-plan.outputs.login == 'true' }}
|
||||
with:
|
||||
username: ${{ needs.docker-plan.outputs.push == 'true' && 'astral' || 'astralshbot' }}
|
||||
password: ${{ needs.docker-plan.outputs.push == 'true' && secrets.DOCKERHUB_TOKEN_RW || secrets.DOCKERHUB_TOKEN_RO }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5
|
||||
|
||||
- name: Generate Dynamic Dockerfile Tags
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the image and tags from the matrix variable
|
||||
IFS=',' read -r BASE_IMAGE BASE_TAGS <<< "${{ matrix.image-mapping }}"
|
||||
|
||||
# Generate Dockerfile content
|
||||
cat <<EOF > Dockerfile
|
||||
FROM ${BASE_IMAGE}
|
||||
COPY --from=${UV_GHCR_IMAGE}:latest /uv /uvx /usr/local/bin/
|
||||
ENV UV_TOOL_BIN_DIR="/usr/local/bin"
|
||||
ENTRYPOINT []
|
||||
CMD ["/usr/local/bin/uv"]
|
||||
EOF
|
||||
|
||||
# Initialize a variable to store all tag docker metadata patterns
|
||||
TAG_PATTERNS=""
|
||||
|
||||
# Loop through all base tags and append its docker metadata pattern to the list
|
||||
# Order is on purpose such that the label org.opencontainers.image.version has the first pattern with the full version
|
||||
IFS=','; for TAG in ${BASE_TAGS}; do
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ version }},suffix=-${TAG},value=${VERSION}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=pep440,pattern={{ major }}.{{ minor }},suffix=-${TAG},value=${VERSION}\n"
|
||||
TAG_PATTERNS="${TAG_PATTERNS}type=raw,value=${TAG}\n"
|
||||
done
|
||||
|
||||
# Remove the trailing newline from the pattern list
|
||||
TAG_PATTERNS="${TAG_PATTERNS%\\n}"
|
||||
|
||||
# Export tag patterns using the multiline env var syntax
|
||||
{
|
||||
echo "TAG_PATTERNS<<EOF"
|
||||
echo -e "${TAG_PATTERNS}"
|
||||
echo EOF
|
||||
} >> $GITHUB_ENV
|
||||
env:
|
||||
VERSION: ${{ needs.docker-plan.outputs.tag }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
|
||||
# ghcr.io prefers index level annotations
|
||||
env:
|
||||
DOCKER_METADATA_ANNOTATIONS_LEVELS: index
|
||||
with:
|
||||
images: |
|
||||
${{ env.UV_GHCR_IMAGE }}
|
||||
${{ env.UV_DOCKERHUB_IMAGE }}
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
${{ env.TAG_PATTERNS }}
|
||||
|
||||
- name: Build and push
|
||||
id: build-and-push
|
||||
uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # v1.16.2
|
||||
- name: "Build and push Docker image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
project: 7hd4vdzmw5 # astral-sh/uv
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ needs.docker-plan.outputs.push }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
# Reuse the builder
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
push: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
tags: ghcr.io/astral-sh/uv:latest,ghcr.io/astral-sh/uv:${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || 'dry-run' }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
# TODO(zanieb): Annotations are not supported by Depot yet and are ignored
|
||||
annotations: ${{ steps.meta.outputs.annotations }}
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.build-and-push.outputs.digest }}
|
||||
|
||||
# Push annotations manually.
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Add annotations to images
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
env:
|
||||
IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}"
|
||||
DIGEST: ${{ steps.build-and-push.outputs.digest }}
|
||||
TAGS: ${{ steps.meta.outputs.tags }}
|
||||
ANNOTATIONS: ${{ steps.meta.outputs.annotations }}
|
||||
run: |
|
||||
set -x
|
||||
readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
for image in $IMAGES; do
|
||||
readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
"${tags[@]}" \
|
||||
"${image}@${DIGEST}"
|
||||
done
|
||||
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Export manifest digest
|
||||
id: manifest-digest
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
env:
|
||||
IMAGE: ${{ env.UV_GHCR_IMAGE }}
|
||||
VERSION: ${{ steps.meta.outputs.version }}
|
||||
run: |
|
||||
digest="$(
|
||||
docker buildx imagetools inspect \
|
||||
"${IMAGE}:${VERSION}" \
|
||||
--format '{{json .Manifest}}' \
|
||||
| jq -r '.digest'
|
||||
)"
|
||||
echo "digest=${digest}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# See `docker-annotate-base` for details.
|
||||
- name: Generate artifact attestation
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.manifest-digest.outputs.digest }}
|
||||
|
||||
# Annotate the base image
|
||||
docker-annotate-base:
|
||||
name: annotate uv
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write # for GHCR signing
|
||||
packages: write # for GHCR image pushes
|
||||
attestations: write # for GHCR attestations
|
||||
environment:
|
||||
name: ${{ needs.docker-plan.outputs.push == 'true' && 'release' || '' }}
|
||||
needs:
|
||||
- docker-plan
|
||||
- docker-publish-base
|
||||
- docker-publish-extra
|
||||
if: ${{ needs.docker-plan.outputs.push == 'true' }}
|
||||
steps:
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
username: astral
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN_RW }}
|
||||
|
||||
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Depot doesn't support annotating images, so we need to do so manually
|
||||
# afterwards. Mutating the manifest is desirable regardless, because we
|
||||
# want to bump the base image to appear at the top of the list on GHCR.
|
||||
# However, once annotation support is added to Depot, this step can be
|
||||
# minimized to just touch the GHCR manifest.
|
||||
- name: Add annotations to images
|
||||
env:
|
||||
IMAGES: "${{ env.UV_GHCR_IMAGE }} ${{ env.UV_DOCKERHUB_IMAGE }}"
|
||||
DIGEST: ${{ needs.docker-publish-base.outputs.image-digest }}
|
||||
TAGS: ${{ needs.docker-publish-base.outputs.image-tags }}
|
||||
ANNOTATIONS: ${{ needs.docker-publish-base.outputs.image-annotations }}
|
||||
# The readarray part is used to make sure the quoting and special characters are preserved on expansion (e.g. spaces)
|
||||
# The final command becomes `docker buildx imagetools create --annotation 'index:foo=1' --annotation 'index:bar=2' ... -t tag1 -t tag2 ... <IMG>@sha256:<sha256>`
|
||||
run: |
|
||||
set -x
|
||||
readarray -t lines <<< "$ANNOTATIONS"; annotations=(); for line in "${lines[@]}"; do annotations+=(--annotation "$line"); done
|
||||
for image in $IMAGES; do
|
||||
readarray -t lines < <(grep "^${image}:" <<< "$TAGS"); tags=(); for line in "${lines[@]}"; do tags+=(-t "$line"); done
|
||||
docker buildx imagetools create \
|
||||
"${annotations[@]}" \
|
||||
"${tags[@]}" \
|
||||
"${image}@${DIGEST}"
|
||||
done
|
||||
|
||||
# Now that we've modified the manifest, we need to attest it again.
|
||||
# Note we only generate an attestation for GHCR.
|
||||
- name: Export manifest digest
|
||||
id: manifest-digest
|
||||
env:
|
||||
IMAGE: ${{ env.UV_GHCR_IMAGE }}
|
||||
VERSION: ${{ needs.docker-publish-base.outputs.image-version }}
|
||||
# To sign the manifest, we need it's digest. Unfortunately "docker
|
||||
# buildx imagetools create" does not (yet) have a clean way of sharing
|
||||
# the digest of the manifest it creates (see docker/buildx#2407), so
|
||||
# we use a separate command to retrieve it.
|
||||
# imagetools inspect [TAG] --format '{{json .Manifest}}' gives us
|
||||
# the machine readable JSON description of the manifest, and the
|
||||
# jq command extracts the digest from this. The digest is then
|
||||
# sent to the Github step output file for sharing with other steps.
|
||||
run: |
|
||||
digest="$(
|
||||
docker buildx imagetools inspect \
|
||||
"${IMAGE}:${VERSION}" \
|
||||
--format '{{json .Manifest}}' \
|
||||
| jq -r '.digest'
|
||||
)"
|
||||
echo "digest=${digest}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
with:
|
||||
subject-name: ${{ env.UV_GHCR_IMAGE }}
|
||||
subject-digest: ${{ steps.manifest-digest.outputs.digest }}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,33 +0,0 @@
|
|||
# Publish a release to crates.io.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a publish job
|
||||
# within `cargo-dist`.
|
||||
name: "Publish to crates.io"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
crates-publish-uv:
|
||||
name: Upload uv to crates.io
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
# TODO(zanieb): Switch to trusted publishing once published
|
||||
# - uses: rust-lang/crates-io-auth-action@v1
|
||||
# id: auth
|
||||
- name: Publish workspace crates
|
||||
# Note `--no-verify` is safe because we do a publish dry-run elsewhere in CI
|
||||
run: cargo publish --workspace --no-verify
|
||||
env:
|
||||
CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_TOKEN }}
|
||||
|
|
@ -1,156 +0,0 @@
|
|||
# Publish the uv documentation.
|
||||
#
|
||||
# Assumed to run as a subworkflow of .github/workflows/release.yml; specifically, as a post-announce
|
||||
# job within `cargo-dist`.
|
||||
name: mkdocs
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
ref:
|
||||
description: "The commit SHA, tag, or branch to publish. Uses the default branch if not specified."
|
||||
default: ""
|
||||
type: string
|
||||
workflow_call:
|
||||
inputs:
|
||||
plan:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
mkdocs:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
VERSION: ${{ (inputs.plan != '' && fromJson(inputs.plan).announcement_tag) || inputs.ref }}
|
||||
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ inputs.ref }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
- uses: Swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 # v2.8.0
|
||||
|
||||
- name: "Generate reference documentation"
|
||||
run: |
|
||||
cargo dev generate-options-reference
|
||||
cargo dev generate-cli-reference
|
||||
cargo dev generate-env-vars-reference
|
||||
|
||||
- name: "Set docs display name"
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
# if version is missing, use 'latest'
|
||||
if [ -z "$version" ]; then
|
||||
echo "Using 'latest' as version"
|
||||
version="latest"
|
||||
fi
|
||||
|
||||
# Use version as display name for now
|
||||
display_name="$version"
|
||||
|
||||
echo "DISPLAY_NAME=$display_name" >> $GITHUB_ENV
|
||||
|
||||
- name: "Set branch name"
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
display_name="${DISPLAY_NAME}"
|
||||
timestamp="$(date +%s)"
|
||||
|
||||
# create branch_display_name from display_name by replacing all
|
||||
# characters disallowed in git branch names with hyphens
|
||||
branch_display_name="$(echo "$display_name" | tr -c '[:alnum:]._' '-' | tr -s '-')"
|
||||
|
||||
echo "BRANCH_NAME=update-docs-$branch_display_name-$timestamp" >> $GITHUB_ENV
|
||||
echo "TIMESTAMP=$timestamp" >> $GITHUB_ENV
|
||||
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
|
||||
with:
|
||||
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
|
||||
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: pip install -r docs/requirements-insiders.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: pip install -r docs/requirements.txt
|
||||
|
||||
- name: "Build Insiders docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.insiders.yml
|
||||
|
||||
- name: "Build docs"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: mkdocs build --strict -f mkdocs.public.yml
|
||||
|
||||
- name: "Clone docs repo"
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
git clone https://${ASTRAL_DOCS_PAT}@github.com/astral-sh/docs.git astral-docs
|
||||
env:
|
||||
ASTRAL_DOCS_PAT: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
|
||||
- name: "Copy docs"
|
||||
run: rm -rf astral-docs/site/uv && mkdir -p astral-docs/site && cp -r site/uv astral-docs/site/
|
||||
|
||||
- name: "Commit docs"
|
||||
working-directory: astral-docs
|
||||
run: |
|
||||
branch_name="${BRANCH_NAME}"
|
||||
|
||||
git config user.name "astral-docs-bot"
|
||||
git config user.email "176161322+astral-docs-bot@users.noreply.github.com"
|
||||
|
||||
git checkout -b $branch_name
|
||||
git add site/uv
|
||||
git commit -m "Update uv documentation for $version"
|
||||
|
||||
- name: "Create Pull Request"
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
version="${VERSION}"
|
||||
display_name="${DISPLAY_NAME}"
|
||||
branch_name="${BRANCH_NAME}"
|
||||
|
||||
# set the PR title
|
||||
pull_request_title="Update uv documentation for $display_name"
|
||||
|
||||
# Delete any existing pull requests that are open for this version
|
||||
# by checking against pull_request_title because the new PR will
|
||||
# supersede the old one.
|
||||
gh pr list --state open --json title --jq '.[] | select(.title == "$pull_request_title") | .number' | \
|
||||
xargs -I {} gh pr close {}
|
||||
|
||||
# push the branch to GitHub
|
||||
git push origin $branch_name
|
||||
|
||||
# create the PR
|
||||
gh pr create --base main --head $branch_name \
|
||||
--title "$pull_request_title" \
|
||||
--body "Automated documentation update for $display_name" \
|
||||
--label "documentation"
|
||||
|
||||
- name: "Merge Pull Request"
|
||||
if: ${{ inputs.plan != '' && !fromJson(inputs.plan).announcement_tag_is_implicit }}
|
||||
working-directory: astral-docs
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.ASTRAL_DOCS_PAT }}
|
||||
run: |
|
||||
branch_name="${BRANCH_NAME}"
|
||||
|
||||
# auto-merge the PR if the build was triggered by a release. Manual builds should be reviewed by a human.
|
||||
# give the PR a few seconds to be created before trying to auto-merge it
|
||||
sleep 10
|
||||
gh pr merge --squash $branch_name
|
||||
|
|
@ -12,38 +12,22 @@ on:
|
|||
type: string
|
||||
|
||||
jobs:
|
||||
pypi-publish-uv:
|
||||
name: Upload uv to PyPI
|
||||
pypi-publish:
|
||||
name: Upload to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write # For PyPI's trusted publishing
|
||||
# For PyPI's trusted publishing.
|
||||
id-token: write
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: wheels_uv-*
|
||||
path: wheels_uv
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv/*
|
||||
|
||||
pypi-publish-uv-build:
|
||||
name: Upload uv-build to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write # For PyPI's trusted publishing
|
||||
steps:
|
||||
- name: "Install uv"
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
|
||||
name: wheels
|
||||
path: wheels
|
||||
- name: Publish to PyPi
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
with:
|
||||
pattern: wheels_uv_build-*
|
||||
path: wheels_uv_build
|
||||
merge-multiple: true
|
||||
- name: Publish to PyPI
|
||||
run: uv publish -v wheels_uv_build/*
|
||||
skip-existing: true
|
||||
packages-dir: wheels
|
||||
verbose: true
|
||||
|
|
|
|||
|
|
@ -1,21 +1,20 @@
|
|||
# This file was autogenerated by dist: https://axodotdev.github.io/cargo-dist
|
||||
#
|
||||
# Copyright 2022-2024, axodotdev
|
||||
# Copyright 2022-2023, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with dist (archives, installers, hashes)
|
||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
# * on success, uploads the artifacts to a Github Release
|
||||
#
|
||||
# Note that the GitHub Release will be created with a generated
|
||||
# Note that the Github Release will be created with a generated
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
|
||||
permissions:
|
||||
"contents": "write"
|
||||
contents: write
|
||||
|
||||
# This task will run whenever you workflow_dispatch with a tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
|
|
@ -24,22 +23,21 @@ permissions:
|
|||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't dist-able).
|
||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (dist-able) packages in the workspace with that version (this mode is
|
||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
# If you push multiple tags at once, separate instances of this workflow will
|
||||
# spin up, creating an independent announcement for each one. However, GitHub
|
||||
# spin up, creating an independent announcement for each one. However Github
|
||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
||||
# mistake.
|
||||
#
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
|
|
@ -49,9 +47,9 @@ on:
|
|||
type: string
|
||||
|
||||
jobs:
|
||||
# Run 'dist plan' (or host) to determine what tasks we need to do
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ (inputs.tag != 'dry-run' && inputs.tag) || '' }}
|
||||
|
|
@ -60,20 +58,14 @@ jobs:
|
|||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install dist
|
||||
- name: Install cargo-dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh"
|
||||
- name: Cache dist
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.8.0/cargo-dist-installer.sh | sh"
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
|
|
@ -81,15 +73,15 @@ jobs:
|
|||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
dist ${{ (inputs.tag && inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
cargo dist ${{ inputs.tag && (inputs.tag != 'dry-run' && format('host --steps=create --tag={0}', inputs.tag) || format('host --steps=create', inputs.tag)) || (github.event.pull_request.head.repo.fork && 'plan' || 'host --steps=check') }} --output-format=json > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
name: artifacts
|
||||
path: dist-manifest.json
|
||||
|
||||
custom-build-binaries:
|
||||
needs:
|
||||
|
|
@ -100,64 +92,43 @@ jobs:
|
|||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
|
||||
custom-build-docker:
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload' || inputs.tag == 'dry-run' }}
|
||||
uses: ./.github/workflows/build-docker.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
permissions:
|
||||
"attestations": "write"
|
||||
"contents": "read"
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
- name: Install cargo-dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.8.0/cargo-dist-installer.sh | sh"
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
name: artifacts
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "dist ran successfully"
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
name: artifacts
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
|
|
@ -165,47 +136,38 @@ jobs:
|
|||
host:
|
||||
needs:
|
||||
- plan
|
||||
- custom-build-binaries
|
||||
- custom-build-docker
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }}
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: Install cached dist
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
with:
|
||||
name: cargo-dist-cache
|
||||
path: ~/.cargo/bin/
|
||||
- run: chmod +x ~/.cargo/bin/dist
|
||||
- name: Install cargo-dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.8.0/cargo-dist-installer.sh | sh"
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
name: artifacts
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
||||
# This is a harmless no-op for Github Releases, hosting for that happens in "announce"
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
name: artifacts
|
||||
path: dist-manifest.json
|
||||
|
||||
custom-publish-pypi:
|
||||
|
|
@ -219,83 +181,40 @@ jobs:
|
|||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"id-token": "write"
|
||||
"packages": "write"
|
||||
id-token: write
|
||||
packages: write
|
||||
|
||||
custom-publish-crates:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi # DIRTY: see #16989
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
uses: ./.github/workflows/publish-crates.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
# publish jobs get escalated permissions
|
||||
permissions:
|
||||
"contents": "read"
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
# Create a Github Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- custom-publish-pypi
|
||||
- custom-publish-crates
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') && (needs.custom-publish-crates.result == 'skipped' || needs.custom-publish-crates.result == 'success') }}
|
||||
runs-on: "depot-ubuntu-latest-4"
|
||||
permissions:
|
||||
"attestations": "write"
|
||||
"contents": "write"
|
||||
"id-token": "write"
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.custom-publish-pypi.result == 'skipped' || needs.custom-publish-pypi.result == 'success') }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093
|
||||
- name: "Download Github Artifacts"
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
name: artifacts
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Attest
|
||||
uses: actions/attest-build-provenance@c074443f1aee8d4aeeae555aebba3282517141b2
|
||||
- name: Create Github Release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
subject-path: |
|
||||
artifacts/*.json
|
||||
artifacts/*.sh
|
||||
artifacts/*.ps1
|
||||
artifacts/*.zip
|
||||
artifacts/*.tar.gz
|
||||
- name: Create GitHub Release
|
||||
env:
|
||||
PRERELEASE_FLAG: "${{ fromJson(needs.host.outputs.val).announcement_is_prerelease && '--prerelease' || '' }}"
|
||||
ANNOUNCEMENT_TITLE: "${{ fromJson(needs.host.outputs.val).announcement_title }}"
|
||||
ANNOUNCEMENT_BODY: "${{ fromJson(needs.host.outputs.val).announcement_github_body }}"
|
||||
RELEASE_COMMIT: "${{ github.sha }}"
|
||||
run: |
|
||||
# Write and read notes from a file to avoid quoting breaking things
|
||||
echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt
|
||||
|
||||
gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/*
|
||||
|
||||
custom-publish-docs:
|
||||
needs:
|
||||
- plan
|
||||
- announce
|
||||
uses: ./.github/workflows/publish-docs.yml
|
||||
with:
|
||||
plan: ${{ needs.plan.outputs.val }}
|
||||
secrets: inherit
|
||||
tag: ${{ needs.plan.outputs.tag }}
|
||||
name: ${{ fromJson(needs.host.outputs.val).announcement_title }}
|
||||
body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }}
|
||||
prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }}
|
||||
artifacts: "artifacts/*"
|
||||
|
|
|
|||
|
|
@ -1,92 +0,0 @@
|
|||
# Configures a drive for testing in CI.
|
||||
#
|
||||
# When using standard GitHub Actions runners, a `D:` drive is present and has
|
||||
# similar or better performance characteristics than a ReFS dev drive. Sometimes
|
||||
# using a larger runner is still more performant (e.g., when running the test
|
||||
# suite) and we need to create a dev drive. This script automatically configures
|
||||
# the appropriate drive.
|
||||
#
|
||||
# When using GitHub Actions' "larger runners", the `D:` drive is not present and
|
||||
# we create a DevDrive mount on `C:`. This is purported to be more performant
|
||||
# than an ReFS drive, though we did not see a change when we switched over.
|
||||
#
|
||||
# When using Depot runners, the underling infrastructure is EC2, which does not
|
||||
# support Hyper-V. The `New-VHD` commandlet only works with Hyper-V, but we can
|
||||
# create a ReFS drive using `diskpart` and `format` directory. We cannot use a
|
||||
# DevDrive, as that also requires Hyper-V. The Depot runners use `D:` already,
|
||||
# so we must check if it's a Depot runner first, and we use `V:` as the target
|
||||
# instead.
|
||||
|
||||
|
||||
if ($env:DEPOT_RUNNER -eq "1") {
|
||||
Write-Output "DEPOT_RUNNER detected, setting up custom dev drive..."
|
||||
|
||||
# Create VHD and configure drive using diskpart
|
||||
$vhdPath = "C:\uv_dev_drive.vhdx"
|
||||
@"
|
||||
create vdisk file="$vhdPath" maximum=25600 type=expandable
|
||||
attach vdisk
|
||||
create partition primary
|
||||
active
|
||||
assign letter=V
|
||||
"@ | diskpart
|
||||
|
||||
# Format the drive as ReFS
|
||||
format V: /fs:ReFS /q /y
|
||||
$Drive = "V:"
|
||||
|
||||
Write-Output "Custom dev drive created at $Drive"
|
||||
} elseif (Test-Path "D:\") {
|
||||
# Note `Get-PSDrive` is not sufficient because the drive letter is assigned.
|
||||
Write-Output "Using existing drive at D:"
|
||||
$Drive = "D:"
|
||||
} else {
|
||||
# The size (25 GB) is chosen empirically to be large enough for our
|
||||
# workflows; larger drives can take longer to set up.
|
||||
$Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 25GB |
|
||||
Mount-VHD -Passthru |
|
||||
Initialize-Disk -Passthru |
|
||||
New-Partition -AssignDriveLetter -UseMaximumSize |
|
||||
Format-Volume -DevDrive -Confirm:$false -Force
|
||||
|
||||
$Drive = "$($Volume.DriveLetter):"
|
||||
|
||||
# Set the drive as trusted
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-designate-a-dev-drive-as-trusted
|
||||
fsutil devdrv trust $Drive
|
||||
|
||||
# Disable antivirus filtering on dev drives
|
||||
# See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-configure-additional-filters-on-dev-drive
|
||||
fsutil devdrv enable /disallowAv
|
||||
|
||||
# Remount so the changes take effect
|
||||
Dismount-VHD -Path C:/uv_dev_drive.vhdx
|
||||
Mount-VHD -Path C:/uv_dev_drive.vhdx
|
||||
|
||||
# Show some debug information
|
||||
Write-Output $Volume
|
||||
fsutil devdrv query $Drive
|
||||
|
||||
Write-Output "Using Dev Drive at $Volume"
|
||||
}
|
||||
|
||||
$Tmp = "$($Drive)\uv-tmp"
|
||||
|
||||
# Create the directory ahead of time in an attempt to avoid race-conditions
|
||||
New-Item $Tmp -ItemType Directory
|
||||
|
||||
# Move Cargo to the dev drive
|
||||
New-Item -Path "$($Drive)/.cargo/bin" -ItemType Directory -Force
|
||||
if (Test-Path "C:/Users/runneradmin/.cargo") {
|
||||
Copy-Item -Path "C:/Users/runneradmin/.cargo/*" -Destination "$($Drive)/.cargo/" -Recurse -Force
|
||||
}
|
||||
|
||||
Write-Output `
|
||||
"DEV_DRIVE=$($Drive)" `
|
||||
"TMP=$($Tmp)" `
|
||||
"TEMP=$($Tmp)" `
|
||||
"RUSTUP_HOME=$($Drive)/.rustup" `
|
||||
"CARGO_HOME=$($Drive)/.cargo" `
|
||||
"UV_WORKSPACE=$($Drive)/uv" `
|
||||
"PATH=$($Drive)/.cargo/bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
# Sync Python releases and create a pull request.
|
||||
#
|
||||
# Based on: https://github.com/astral-sh/rye/blob/57b7c089e494138aae29a130afb2e17f447970bf/.github/workflows/sync-python-releases.yml
|
||||
name: "Sync Python downloads"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
sync:
|
||||
if: github.repository == 'astral-sh/uv'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
version: "latest"
|
||||
enable-cache: true
|
||||
- name: Sync Python Releases
|
||||
run: |
|
||||
uv run -- fetch-download-metadata.py
|
||||
working-directory: ./crates/uv-python
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Sync Sysconfig Targets
|
||||
run: ${GITHUB_WORKSPACE}/crates/uv-dev/sync_sysconfig_targets.sh
|
||||
working-directory: ./crates/uv-dev
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: "Create Pull Request"
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
commit-message: "Sync latest Python releases"
|
||||
add-paths: |
|
||||
crates/uv-python/download-metadata.json
|
||||
crates/uv-dev/src/generate_sysconfig_mappings.rs
|
||||
crates/uv-python/src/sysconfig/generated_mappings.rs
|
||||
branch: "sync-python-releases"
|
||||
title: "Sync latest Python releases"
|
||||
body: "Automated update for Python releases."
|
||||
base: "main"
|
||||
draft: true
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
name: zizmor
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["**"]
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
zizmor:
|
||||
name: Run zizmor
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Run zizmor
|
||||
uses: zizmorcore/zizmor-action@5ca5fc7a4779c5263a3ffa0e1f693009994446d1 # v0.1.2
|
||||
|
|
@ -1,15 +1,10 @@
|
|||
# Insta snapshots.
|
||||
*.pending-snap
|
||||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/vendor/
|
||||
debug/
|
||||
target-alpine/
|
||||
target/
|
||||
|
||||
# Bootstrapped Python versions
|
||||
/bin/
|
||||
bin/
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
|
@ -24,27 +19,9 @@ __pycache__
|
|||
*.so
|
||||
*.pyd
|
||||
*.dll
|
||||
/dist
|
||||
/crates/uv-build/dist
|
||||
|
||||
# Profiling
|
||||
flamegraph.svg
|
||||
perf.data
|
||||
perf.data.old
|
||||
profile.json
|
||||
profile.json.gz
|
||||
|
||||
# MkDocs
|
||||
/site
|
||||
|
||||
# Generated reference docs (use `cargo dev generate-all` to regenerate)
|
||||
/docs/reference/cli.md
|
||||
/docs/reference/environment.md
|
||||
/docs/reference/settings.md
|
||||
|
||||
# macOS
|
||||
**/.DS_Store
|
||||
|
||||
# IDE
|
||||
.idea
|
||||
.vscode
|
||||
|
|
|
|||
|
|
@ -7,12 +7,12 @@ exclude: |
|
|||
|
||||
repos:
|
||||
- repo: https://github.com/abravalheri/validate-pyproject
|
||||
rev: v0.24.1
|
||||
rev: v0.16
|
||||
hooks:
|
||||
- id: validate-pyproject
|
||||
|
||||
- repo: https://github.com/crate-ci/typos
|
||||
rev: v1.37.2
|
||||
rev: v1.18.2
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
|
|
@ -25,25 +25,15 @@ repos:
|
|||
types: [rust]
|
||||
pass_filenames: false # This makes it a lot faster
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: cargo-dev-generate-all
|
||||
name: cargo dev generate-all
|
||||
entry: cargo dev generate-all
|
||||
language: system
|
||||
types: [rust]
|
||||
pass_filenames: false
|
||||
files: ^crates/(uv-cli|uv-settings)/
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or: [yaml, json5]
|
||||
types: [yaml]
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.3
|
||||
rev: v0.2.1
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
.venv
|
||||
CHANGELOG.md
|
||||
PREVIEW-CHANGELOG.md
|
||||
docs/reference/cli.md
|
||||
docs/reference/settings.md
|
||||
docs/reference/environment.md
|
||||
test/ecosystem/home-assistant-core/LICENSE.md
|
||||
docs/guides/integration/gitlab.md
|
||||
|
|
@ -1,24 +1,6 @@
|
|||
# These are versions of Python required for running uv's own test suite. You can add or remove
|
||||
# versions here as needed for tests; this doesn't impact uv's own functionality. They can be
|
||||
# installed through any means you like, e.g. `uv python install` if you already have a build of uv,
|
||||
# `cargo run python install`, or through some other installer.
|
||||
#
|
||||
# In uv's CI in GitHub Actions, they are bootstrapped by an existing released version of uv,
|
||||
# installed by the astral-sh/setup-uv action If you need a newer or different version, you will
|
||||
# first need to complete a uv release capable of installing that version, get it picked up by
|
||||
# astral-sh/setup-uv, and update its hash in .github/workflows.
|
||||
|
||||
3.14.0
|
||||
3.13.2
|
||||
3.12.9
|
||||
3.11.11
|
||||
3.10.16
|
||||
3.9.21
|
||||
3.8.20
|
||||
# The following are required for packse scenarios
|
||||
3.9.20
|
||||
3.9.12
|
||||
# The following is needed for `==3.13` request tests
|
||||
3.13.0
|
||||
# A pre-release version required for testing
|
||||
3.14.0rc2
|
||||
3.8.12
|
||||
3.8.18
|
||||
3.9.18
|
||||
3.10.13
|
||||
3.11.7
|
||||
3.12.1
|
||||
|
|
|
|||
|
|
@ -1,14 +1,19 @@
|
|||
# Benchmarks
|
||||
|
||||
All benchmarks were computed on macOS using Python 3.12.4 (for non-uv tools), and come with a few
|
||||
All benchmarks were computed on macOS using Python 3.12.0 (for non-uv tools), and come with a few
|
||||
important caveats:
|
||||
|
||||
- Benchmark performance may vary dramatically across different operating systems and filesystems. In
|
||||
particular, uv uses different installation strategies based on the underlying filesystem's
|
||||
- Benchmark performance may vary dramatically across different operating systems and filesystems.
|
||||
In particular, uv uses different installation strategies based on the underlying filesystem's
|
||||
capabilities. (For example, uv uses reflinking on macOS, and hardlinking on Linux.)
|
||||
- Benchmark performance may vary dramatically depending on the set of packages being installed. For
|
||||
example, a resolution that requires building a single intensive source distribution may appear
|
||||
- Benchmark performance may vary dramatically depending on the set of packages being installed.
|
||||
For example, a resolution that requires building a single intensive source distribution may appear
|
||||
very similar across tools, since the bottleneck is tool-agnostic.
|
||||
- Unlike Poetry, both uv and pip-tools do _not_ generate multi-platform lockfiles. As such,
|
||||
Poetry is (by design) doing significantly more work than other tools in the resolution benchmarks.
|
||||
Poetry is included for completeness, as many projects may not _need_ a multi-platform lockfile.
|
||||
However, it's critical to understand that benchmarking uv's resolution time against Poetry is
|
||||
an unfair comparison. (Benchmarking installation, however, _is_ a fair comparison.)
|
||||
|
||||
This document benchmarks against Trio's `docs-requirements.in`, as a representative example of a
|
||||
real-world project.
|
||||
|
|
@ -17,101 +22,92 @@ In each case, a smaller bar (i.e., lower) is better.
|
|||
|
||||
## Warm Installation
|
||||
|
||||
Benchmarking package installation (e.g., `uv sync`) with a warm cache. This is equivalent to
|
||||
removing and recreating a virtual environment, and then populating it with dependencies that you've
|
||||
installed previously on the same machine.
|
||||
Benchmarking package installation (e.g., `uv pip sync`) with a warm cache. This is equivalent
|
||||
to removing and recreating a virtual environment, and then populating it with dependencies that
|
||||
you've installed previously on the same machine.
|
||||
|
||||

|
||||

|
||||
|
||||
## Cold Installation
|
||||
|
||||
Benchmarking package installation (e.g., `uv sync`) with a cold cache. This is equivalent to running
|
||||
`uv sync` on a new machine or in CI (assuming that the package manager cache is not shared across
|
||||
runs).
|
||||
Benchmarking package installation (e.g., `uv pip sync`) with a cold cache. This is equivalent
|
||||
to running `uv pip sync` on a new machine or in CI (assuming that the package manager cache is
|
||||
not shared across runs).
|
||||
|
||||

|
||||

|
||||
|
||||
## Warm Resolution
|
||||
|
||||
Benchmarking dependency resolution (e.g., `uv lock`) with a warm cache, but no existing lockfile.
|
||||
This is equivalent to blowing away an existing `requirements.txt` file to regenerate it from a
|
||||
`requirements.in` file.
|
||||
Benchmarking dependency resolution (e.g., `uv pip compile`) with a warm cache, but no existing
|
||||
lockfile. This is equivalent to blowing away an existing `requirements.txt` file to regenerate it
|
||||
from a `requirements.in` file.
|
||||
|
||||

|
||||

|
||||
|
||||
## Cold Resolution
|
||||
|
||||
Benchmarking dependency resolution (e.g., `uv lock`) with a cold cache. This is equivalent to
|
||||
running `uv lock` on a new machine or in CI (assuming that the package manager cache is not shared
|
||||
across runs).
|
||||
Benchmarking dependency resolution (e.g., `uv pip compile`) with a cold cache. This is
|
||||
equivalent to running `uv pip compile` on a new machine or in CI (assuming that the package
|
||||
manager cache is not shared across runs).
|
||||
|
||||

|
||||

|
||||
|
||||
## Reproduction
|
||||
|
||||
All benchmarks were generated using the `scripts/benchmark` package, which wraps
|
||||
[`hyperfine`](https://github.com/sharkdp/hyperfine) to facilitate benchmarking uv against a variety
|
||||
of other tools.
|
||||
All benchmarks were generated using the `scripts/bench/__main__.py` script, which wraps
|
||||
[`hyperfine`](https://github.com/sharkdp/hyperfine) to facilitate benchmarking uv
|
||||
against a variety of other tools.
|
||||
|
||||
The benchmark script itself has a several requirements:
|
||||
|
||||
- A local uv release build (`cargo build --release`).
|
||||
- An installation of the production `uv` binary in your path.
|
||||
- The [`hyperfine`](https://github.com/sharkdp/hyperfine) command-line tool installed on your
|
||||
system.
|
||||
- A virtual environment with the script's own dependencies installed (`uv venv && uv pip sync scripts/bench/requirements.txt`).
|
||||
- The [`hyperfine`](https://github.com/sharkdp/hyperfine) command-line tool installed on your system.
|
||||
|
||||
To benchmark resolution against pip-compile, Poetry, and PDM:
|
||||
|
||||
```shell
|
||||
uv run resolver \
|
||||
--uv-project \
|
||||
python -m scripts.bench \
|
||||
--uv \
|
||||
--poetry \
|
||||
--pdm \
|
||||
--pip-compile \
|
||||
--benchmark resolve-warm --benchmark resolve-cold \
|
||||
--json \
|
||||
../requirements/trio.in
|
||||
scripts/requirements/trio.in \
|
||||
--json
|
||||
```
|
||||
|
||||
To benchmark installation against pip-sync, Poetry, and PDM:
|
||||
|
||||
```shell
|
||||
uv run resolver \
|
||||
--uv-project \
|
||||
python -m scripts.bench \
|
||||
--uv \
|
||||
--poetry \
|
||||
--pdm \
|
||||
--pip-sync \
|
||||
--benchmark install-warm --benchmark install-cold \
|
||||
--json \
|
||||
../requirements/compiled/trio.txt
|
||||
--json
|
||||
```
|
||||
|
||||
Both commands should be run from the `scripts/benchmark` directory.
|
||||
|
||||
After running the benchmark script, you can generate the corresponding graph via:
|
||||
|
||||
```shell
|
||||
cargo run -p uv-dev --all-features render-benchmarks resolve-warm.json --title "Warm Resolution"
|
||||
cargo run -p uv-dev --all-features render-benchmarks resolve-cold.json --title "Cold Resolution"
|
||||
cargo run -p uv-dev --all-features render-benchmarks install-warm.json --title "Warm Installation"
|
||||
cargo run -p uv-dev --all-features render-benchmarks install-cold.json --title "Cold Installation"
|
||||
cargo run -p uv-dev render-benchmarks resolve-warm.json --title "Warm Resolution"
|
||||
cargo run -p uv-dev render-benchmarks resolve-cold.json --title "Cold Resolution"
|
||||
cargo run -p uv-dev render-benchmarks install-warm.json --title "Warm Installation"
|
||||
cargo run -p uv-dev render-benchmarks install-cold.json --title "Cold Installation"
|
||||
```
|
||||
|
||||
You need to install the [Roboto Font](https://fonts.google.com/specimen/Roboto) if the labels are
|
||||
missing in the generated graph.
|
||||
You need to install the [Roboto Font](https://fonts.google.com/specimen/Roboto) if the labels are missing in the generated graph.
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
The inclusion of this `BENCHMARKS.md` file was inspired by the excellent benchmarking documentation
|
||||
in
|
||||
[Orogene](https://github.com/orogene/orogene/blob/472e481b4fc6e97c2b57e69240bf8fe995dfab83/BENCHMARKS.md).
|
||||
in [Orogene](https://github.com/orogene/orogene/blob/472e481b4fc6e97c2b57e69240bf8fe995dfab83/BENCHMARKS.md).
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Flaky benchmarks
|
||||
|
||||
If you're seeing high variance when running the cold benchmarks, then it's likely that you're
|
||||
running into throttling or DDoS prevention from your ISP. In that case, ISPs forcefully terminate
|
||||
TCP connections with a TCP reset. We believe this is due to the benchmarks making the exact same
|
||||
requests in a very short time (especially true for `uv`). A possible workaround is to connect to VPN
|
||||
to bypass your ISPs filtering mechanism.
|
||||
If you're seeing high variance when running the cold benchmarks, then it's likely that you're running into throttling or DDoS prevention from your ISP. In that case, ISPs forcefully terminate TCP connections with a TCP reset. We believe this is due to the benchmarks making the exact same requests in a very short time (especially true for `uv`). A possible workaround is to connect to VPN to bypass your ISPs filtering mechanism.
|
||||
|
|
|
|||
581
CHANGELOG.md
581
CHANGELOG.md
|
|
@ -1,581 +0,0 @@
|
|||
# Changelog
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
|
||||
## 0.9.18
|
||||
|
||||
Released on 2025-12-16.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add value hints to command line arguments to improve shell completion accuracy ([#17080](https://github.com/astral-sh/uv/pull/17080))
|
||||
- Improve error handling in `uv publish` ([#17096](https://github.com/astral-sh/uv/pull/17096))
|
||||
- Improve rendering of multiline error messages ([#17132](https://github.com/astral-sh/uv/pull/17132))
|
||||
- Support redirects in `uv publish` ([#17130](https://github.com/astral-sh/uv/pull/17130))
|
||||
- Include Docker images with the alpine version, e.g., `python3.x-alpine3.23` ([#17100](https://github.com/astral-sh/uv/pull/17100))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Accept `--torch-backend` in `[tool.uv]` ([#17116](https://github.com/astral-sh/uv/pull/17116))
|
||||
|
||||
### Performance
|
||||
|
||||
- Speed up `uv cache size` ([#17015](https://github.com/astral-sh/uv/pull/17015))
|
||||
- Initialize S3 signer once ([#17092](https://github.com/astral-sh/uv/pull/17092))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid panics due to reads on failed requests ([#17098](https://github.com/astral-sh/uv/pull/17098))
|
||||
- Enforce latest-version in `@latest` requests ([#17114](https://github.com/astral-sh/uv/pull/17114))
|
||||
- Explicitly set `EntryType` for file entries in tar ([#17043](https://github.com/astral-sh/uv/pull/17043))
|
||||
- Ignore `pyproject.toml` index username in lockfile comparison ([#16995](https://github.com/astral-sh/uv/pull/16995))
|
||||
- Relax error when using `uv add` with `UV_GIT_LFS` set ([#17127](https://github.com/astral-sh/uv/pull/17127))
|
||||
- Support file locks on ExFAT on macOS ([#17115](https://github.com/astral-sh/uv/pull/17115))
|
||||
- Change schema for `exclude-newer` into optional string ([#17121](https://github.com/astral-sh/uv/pull/17121))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Drop arm musl caveat from Docker documentation ([#17111](https://github.com/astral-sh/uv/pull/17111))
|
||||
- Fix version reference in resolver example ([#17085](https://github.com/astral-sh/uv/pull/17085))
|
||||
- Better documentation for `exclude-newer*` ([#17079](https://github.com/astral-sh/uv/pull/17079))
|
||||
|
||||
## 0.9.17
|
||||
|
||||
Released on 2025-12-09.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `torch-tensorrt` and `torchao` to the PyTorch list ([#17053](https://github.com/astral-sh/uv/pull/17053))
|
||||
- Add hint for misplaced `--verbose` in `uv tool run` ([#17020](https://github.com/astral-sh/uv/pull/17020))
|
||||
- Add support for relative durations in `exclude-newer` (a.k.a., dependency cooldowns) ([#16814](https://github.com/astral-sh/uv/pull/16814))
|
||||
- Add support for relocatable nushell activation script ([#17036](https://github.com/astral-sh/uv/pull/17036))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect dropped (but explicit) indexes in dependency groups ([#17012](https://github.com/astral-sh/uv/pull/17012))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Improve `source-exclude` reference docs ([#16832](https://github.com/astral-sh/uv/pull/16832))
|
||||
- Recommend `UV_NO_DEV` in Docker installs ([#17030](https://github.com/astral-sh/uv/pull/17030))
|
||||
- Update `UV_VERSION` in docs for GitLab CI/CD ([#17040](https://github.com/astral-sh/uv/pull/17040))
|
||||
|
||||
## 0.9.16
|
||||
|
||||
Released on 2025-12-06.
|
||||
|
||||
### Python
|
||||
|
||||
- Add CPython 3.14.2
|
||||
- Add CPython 3.13.11
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add a 5m default timeout to acquiring file locks to fail faster on deadlock ([#16342](https://github.com/astral-sh/uv/pull/16342))
|
||||
- Add a stub `debug` subcommand to `uv pip` announcing its intentional absence ([#16966](https://github.com/astral-sh/uv/pull/16966))
|
||||
- Add bounds in `uv add --script` ([#16954](https://github.com/astral-sh/uv/pull/16954))
|
||||
- Add brew specific message for `uv self update` ([#16838](https://github.com/astral-sh/uv/pull/16838))
|
||||
- Error when built wheel is for the wrong platform ([#16074](https://github.com/astral-sh/uv/pull/16074))
|
||||
- Filter wheels from PEP 751 files based on `--no-binary` et al in `uv pip compile` ([#16956](https://github.com/astral-sh/uv/pull/16956))
|
||||
- Support `--target` and `--prefix` in `uv pip list`, `uv pip freeze`, and `uv pip show` ([#16955](https://github.com/astral-sh/uv/pull/16955))
|
||||
- Tweak language for build backend validation errors ([#16720](https://github.com/astral-sh/uv/pull/16720))
|
||||
- Use explicit credentials cache instead of global static ([#16768](https://github.com/astral-sh/uv/pull/16768))
|
||||
- Enable SIMD in HTML parsing ([#17010](https://github.com/astral-sh/uv/pull/17010))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix missing preview warning in `uv workspace metadata` ([#16988](https://github.com/astral-sh/uv/pull/16988))
|
||||
- Add a `uv auth helper --protocol bazel` command ([#16886](https://github.com/astral-sh/uv/pull/16886))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix Pyston wheel compatibility tags ([#16972](https://github.com/astral-sh/uv/pull/16972))
|
||||
- Allow redundant entries in `tool.uv.build-backend.module-name` but emit warnings ([#16928](https://github.com/astral-sh/uv/pull/16928))
|
||||
- Fix infinite loop in non-attribute re-treats during HTML parsing ([#17010](https://github.com/astral-sh/uv/pull/17010))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Clarify `--project` flag help text to indicate project discovery ([#16965](https://github.com/astral-sh/uv/pull/16965))
|
||||
- Regenerate the crates.io READMEs on release ([#16992](https://github.com/astral-sh/uv/pull/16992))
|
||||
- Update Docker integration guide to prefer `COPY` over `ADD` for simple cases ([#16883](https://github.com/astral-sh/uv/pull/16883))
|
||||
- Update PyTorch documentation to include information about supporting CUDA 13.0.x ([#16957](https://github.com/astral-sh/uv/pull/16957))
|
||||
- Update the versioning policy ([#16710](https://github.com/astral-sh/uv/pull/16710))
|
||||
- Upgrade PyTorch documentation to latest versions ([#16970](https://github.com/astral-sh/uv/pull/16970))
|
||||
|
||||
## 0.9.15
|
||||
|
||||
Released on 2025-12-02.
|
||||
|
||||
### Python
|
||||
|
||||
- Add CPython 3.14.1
|
||||
- Add CPython 3.13.10
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add ROCm 6.4 to `--torch-backend=auto` ([#16919](https://github.com/astral-sh/uv/pull/16919))
|
||||
- Add a Windows manifest to uv binaries ([#16894](https://github.com/astral-sh/uv/pull/16894))
|
||||
- Add LFS toggle to Git sources ([#16143](https://github.com/astral-sh/uv/pull/16143))
|
||||
- Cache source reads during resolution ([#16888](https://github.com/astral-sh/uv/pull/16888))
|
||||
- Allow reading requirements from scripts without an extension ([#16923](https://github.com/astral-sh/uv/pull/16923))
|
||||
- Allow reading requirements from scripts with HTTP(S) paths ([#16891](https://github.com/astral-sh/uv/pull/16891))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_HIDE_BUILD_OUTPUT` to omit build logs ([#16885](https://github.com/astral-sh/uv/pull/16885))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `uv-trampoline-builder` builds from crates.io by moving bundled executables ([#16922](https://github.com/astral-sh/uv/pull/16922))
|
||||
- Respect `NO_COLOR` and always show the command as a header when paging `uv help` output ([#16908](https://github.com/astral-sh/uv/pull/16908))
|
||||
- Use `0o666` permissions for flock files instead of `0o777` ([#16845](https://github.com/astral-sh/uv/pull/16845))
|
||||
- Revert "Bump `astral-tl` to v0.7.10 (#16887)" to narrow down a regression causing hangs in metadata retrieval ([#16938](https://github.com/astral-sh/uv/pull/16938))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Link to the uv version in crates.io member READMEs ([#16939](https://github.com/astral-sh/uv/pull/16939))
|
||||
|
||||
## 0.9.14
|
||||
|
||||
Released on 2025-12-01.
|
||||
|
||||
### Performance
|
||||
|
||||
- Bump `astral-tl` to v0.7.10 to enable SIMD for HTML parsing ([#16887](https://github.com/astral-sh/uv/pull/16887))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow earlier post releases with exclusive ordering ([#16881](https://github.com/astral-sh/uv/pull/16881))
|
||||
- Prefer updating existing `.zshenv` over creating a new one in `tool update-shell` ([#16866](https://github.com/astral-sh/uv/pull/16866))
|
||||
- Respect `-e` flags in `uv add` ([#16882](https://github.com/astral-sh/uv/pull/16882))
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Attach subcommand to User-Agent string ([#16837](https://github.com/astral-sh/uv/pull/16837))
|
||||
- Prefer `UV_WORKING_DIR` over `UV_WORKING_DIRECTORY` for consistency ([#16884](https://github.com/astral-sh/uv/pull/16884))
|
||||
|
||||
## 0.9.13
|
||||
|
||||
Released on 2025-11-26.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert "Allow `--with-requirements` to load extensionless inline-metadata scripts" to fix reading of requirements files from streams ([#16861](https://github.com/astral-sh/uv/pull/16861))
|
||||
- Validate URL wheel tags against `Requires-Python` and required environments ([#16824](https://github.com/astral-sh/uv/pull/16824))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Drop unpublished crates from the uv crates.io README ([#16847](https://github.com/astral-sh/uv/pull/16847))
|
||||
- Fix the links to uv in crates.io member READMEs ([#16848](https://github.com/astral-sh/uv/pull/16848))
|
||||
|
||||
## 0.9.12
|
||||
|
||||
Released on 2025-11-24.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Allow `--with-requirements` to load extensionless inline-metadata scripts ([#16744](https://github.com/astral-sh/uv/pull/16744))
|
||||
- Collect and upload PEP 740 attestations during `uv publish` ([#16731](https://github.com/astral-sh/uv/pull/16731))
|
||||
- Prevent `uv export` from overwriting `pyproject.toml` ([#16745](https://github.com/astral-sh/uv/pull/16745))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a crates.io README for uv ([#16809](https://github.com/astral-sh/uv/pull/16809))
|
||||
- Add documentation for intermediate Docker layers in a workspace ([#16787](https://github.com/astral-sh/uv/pull/16787))
|
||||
- Enumerate workspace members in the uv crate README ([#16811](https://github.com/astral-sh/uv/pull/16811))
|
||||
- Fix documentation links for crates ([#16801](https://github.com/astral-sh/uv/pull/16801))
|
||||
- Generate a crates.io README for uv workspace members ([#16812](https://github.com/astral-sh/uv/pull/16812))
|
||||
- Move the "Export" guide to the projects concept section ([#16835](https://github.com/astral-sh/uv/pull/16835))
|
||||
- Update the cargo install recommendation to use crates ([#16800](https://github.com/astral-sh/uv/pull/16800))
|
||||
- Use the word "internal" in crate descriptions ([#16810](https://github.com/astral-sh/uv/pull/16810))
|
||||
|
||||
## 0.9.11
|
||||
|
||||
Released on 2025-11-20.
|
||||
|
||||
### Python
|
||||
|
||||
- Add CPython 3.15.0a2
|
||||
|
||||
See the [`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20251120) for details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add SBOM support to `uv export` ([#16523](https://github.com/astral-sh/uv/pull/16523))
|
||||
- Publish to `crates.io` ([#16770](https://github.com/astral-sh/uv/pull/16770))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `uv workspace list --paths` ([#16776](https://github.com/astral-sh/uv/pull/16776))
|
||||
- Fix the preview warning on `uv workspace dir` ([#16775](https://github.com/astral-sh/uv/pull/16775))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `uv init` author serialization via `toml_edit` inline tables ([#16778](https://github.com/astral-sh/uv/pull/16778))
|
||||
- Fix status messages without TTY ([#16785](https://github.com/astral-sh/uv/pull/16785))
|
||||
- Preserve end-of-line comment whitespace when editing `pyproject.toml` ([#16734](https://github.com/astral-sh/uv/pull/16734))
|
||||
- Disable `always-authenticate` when running under Dependabot ([#16773](https://github.com/astral-sh/uv/pull/16773))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document the new behavior for free-threaded python versions ([#16781](https://github.com/astral-sh/uv/pull/16781))
|
||||
- Improve note about build system in publish guide ([#16788](https://github.com/astral-sh/uv/pull/16788))
|
||||
- Move do not upload publish note out of the guide into concepts ([#16789](https://github.com/astral-sh/uv/pull/16789))
|
||||
|
||||
## 0.9.10
|
||||
|
||||
Released on 2025-11-17.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for `SSL_CERT_DIR` ([#16473](https://github.com/astral-sh/uv/pull/16473))
|
||||
- Enforce UTF‑8-encoded license files during `uv build` ([#16699](https://github.com/astral-sh/uv/pull/16699))
|
||||
- Error when a `project.license-files` glob matches nothing ([#16697](https://github.com/astral-sh/uv/pull/16697))
|
||||
- `pip install --target` (and `sync`) install Python if necessary ([#16694](https://github.com/astral-sh/uv/pull/16694))
|
||||
- Account for `python_downloads_json_url` in pre-release Python version warnings ([#16737](https://github.com/astral-sh/uv/pull/16737))
|
||||
- Support HTTP/HTTPS URLs in `uv python --python-downloads-json-url` ([#16542](https://github.com/astral-sh/uv/pull/16542))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add support for `--upgrade` in `uv python install` ([#16676](https://github.com/astral-sh/uv/pull/16676))
|
||||
- Fix handling of `python install --default` for pre-release Python versions ([#16706](https://github.com/astral-sh/uv/pull/16706))
|
||||
- Add `uv workspace list` to list workspace members ([#16691](https://github.com/astral-sh/uv/pull/16691))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Don't check file URLs for ambiguously parsed credentials ([#16759](https://github.com/astral-sh/uv/pull/16759))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a "storage" reference document ([#15954](https://github.com/astral-sh/uv/pull/15954))
|
||||
|
||||
## 0.9.9
|
||||
|
||||
Released on 2025-11-12.
|
||||
|
||||
### Deprecations
|
||||
|
||||
- Deprecate use of `--project` in `uv init` ([#16674](https://github.com/astral-sh/uv/pull/16674))
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add iOS support to Python interpreter discovery ([#16686](https://github.com/astral-sh/uv/pull/16686))
|
||||
- Reject ambiguously parsed URLs ([#16622](https://github.com/astral-sh/uv/pull/16622))
|
||||
- Allow explicit values in `uv version --bump` ([#16555](https://github.com/astral-sh/uv/pull/16555))
|
||||
- Warn on use of managed pre-release Python versions when a stable version is available ([#16619](https://github.com/astral-sh/uv/pull/16619))
|
||||
- Allow signing trampolines on Windows by using `.rcdata` to store metadata ([#15068](https://github.com/astral-sh/uv/pull/15068))
|
||||
- Add `--only-emit-workspace` and similar variants to `uv export` ([#16681](https://github.com/astral-sh/uv/pull/16681))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `uv workspace dir` command ([#16678](https://github.com/astral-sh/uv/pull/16678))
|
||||
- Add `uv workspace metadata` command ([#16516](https://github.com/astral-sh/uv/pull/16516))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_NO_DEFAULT_GROUPS` environment variable ([#16645](https://github.com/astral-sh/uv/pull/16645))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Remove `torch-model-archiver` and `torch-tb-profiler` from PyTorch backend ([#16655](https://github.com/astral-sh/uv/pull/16655))
|
||||
- Fix Pixi environment detection ([#16585](https://github.com/astral-sh/uv/pull/16585))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix `CMD` path in FastAPI Dockerfile ([#16701](https://github.com/astral-sh/uv/pull/16701))
|
||||
|
||||
## 0.9.8
|
||||
|
||||
Released on 2025-11-07.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Accept multiple packages in `uv export` ([#16603](https://github.com/astral-sh/uv/pull/16603))
|
||||
- Accept multiple packages in `uv sync` ([#16543](https://github.com/astral-sh/uv/pull/16543))
|
||||
- Add a `uv cache size` command ([#16032](https://github.com/astral-sh/uv/pull/16032))
|
||||
- Add prerelease guidance for build-system resolution failures ([#16550](https://github.com/astral-sh/uv/pull/16550))
|
||||
- Allow Python requests to include `+gil` to require a GIL-enabled interpreter ([#16537](https://github.com/astral-sh/uv/pull/16537))
|
||||
- Avoid pluralizing 'retry' for single value ([#16535](https://github.com/astral-sh/uv/pull/16535))
|
||||
- Enable first-class dependency exclusions ([#16528](https://github.com/astral-sh/uv/pull/16528))
|
||||
- Fix inclusive constraints on available package versions in resolver errors ([#16629](https://github.com/astral-sh/uv/pull/16629))
|
||||
- Improve `uv init` error for invalid directory names ([#16554](https://github.com/astral-sh/uv/pull/16554))
|
||||
- Show help on `uv build -h` ([#16632](https://github.com/astral-sh/uv/pull/16632))
|
||||
- Include the Python variant suffix in "Using Python ..." messages ([#16536](https://github.com/astral-sh/uv/pull/16536))
|
||||
- Log most recently modified file for cache-keys ([#16338](https://github.com/astral-sh/uv/pull/16338))
|
||||
- Update Docker builds to use nightly Rust toolchain with musl v1.2.5 ([#16584](https://github.com/astral-sh/uv/pull/16584))
|
||||
- Add GitHub attestations for uv release artifacts ([#11357](https://github.com/astral-sh/uv/pull/11357))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Expose `UV_NO_GROUP` as an environment variable ([#16529](https://github.com/astral-sh/uv/pull/16529))
|
||||
- Add `UV_NO_SOURCES` as an environment variable ([#15883](https://github.com/astral-sh/uv/pull/15883))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow `--check` and `--locked` to be used together in `uv lock` ([#16538](https://github.com/astral-sh/uv/pull/16538))
|
||||
- Allow for unnormalized names in the METADATA file (#16547) ([#16548](https://github.com/astral-sh/uv/pull/16548))
|
||||
- Fix missing value_type for `default-groups` in schema ([#16575](https://github.com/astral-sh/uv/pull/16575))
|
||||
- Respect multi-GPU outputs in `nvidia-smi` ([#15460](https://github.com/astral-sh/uv/pull/15460))
|
||||
- Fix DNS lookup errors in Docker containers ([#8450](https://github.com/astral-sh/uv/issues/8450))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix typo in uv tool list doc ([#16625](https://github.com/astral-sh/uv/pull/16625))
|
||||
- Note `uv pip list` name normalization in docs ([#13210](https://github.com/astral-sh/uv/pull/13210))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Update Rust toolchain to 1.91 and MSRV to 1.89 ([#16531](https://github.com/astral-sh/uv/pull/16531))
|
||||
|
||||
## 0.9.7
|
||||
|
||||
Released on 2025-10-30.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add Windows x86-32 emulation support to interpreter architecture checks ([#13475](https://github.com/astral-sh/uv/pull/13475))
|
||||
- Improve readability of progress bars ([#16509](https://github.com/astral-sh/uv/pull/16509))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Drop terminal coloring from `uv auth token` output ([#16504](https://github.com/astral-sh/uv/pull/16504))
|
||||
- Don't use UV_LOCKED to enable `--check` flag ([#16521](https://github.com/astral-sh/uv/pull/16521))
|
||||
|
||||
## 0.9.6
|
||||
|
||||
Released on 2025-10-29.
|
||||
|
||||
This release contains an upgrade to Astral's fork of `async_zip`, which addresses potential sources of ZIP parsing differentials between uv and other Python packaging tooling. See [GHSA-pqhf-p39g-3x64](https://github.com/astral-sh/uv/security/advisories/GHSA-pqhf-p39g-3x64) for additional details.
|
||||
|
||||
### Security
|
||||
|
||||
* Address ZIP parsing differentials ([GHSA-pqhf-p39g-3x64](https://github.com/astral-sh/uv/security/advisories/GHSA-pqhf-p39g-3x64))
|
||||
|
||||
### Python
|
||||
|
||||
- Upgrade GraalPy to 25.0.1 ([#16401](https://github.com/astral-sh/uv/pull/16401))
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--clear` to `uv build` to remove old build artifacts ([#16371](https://github.com/astral-sh/uv/pull/16371))
|
||||
- Add `--no-create-gitignore` to `uv build` ([#16369](https://github.com/astral-sh/uv/pull/16369))
|
||||
- Do not error when a virtual environment directory cannot be removed due to a busy error ([#16394](https://github.com/astral-sh/uv/pull/16394))
|
||||
- Improve hint on `pip install --system` when externally managed ([#16392](https://github.com/astral-sh/uv/pull/16392))
|
||||
- Running `uv lock --check` with outdated lockfile will print that `--check` was passed, instead of `--locked` ([#16322](https://github.com/astral-sh/uv/pull/16322))
|
||||
- Update `uv init` template for Maturin ([#16449](https://github.com/astral-sh/uv/pull/16449))
|
||||
- Improve ordering of Python sources in logs ([#16463](https://github.com/astral-sh/uv/pull/16463))
|
||||
- Restore DockerHub release images and annotations ([#16441](https://github.com/astral-sh/uv/pull/16441))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check for matching Python implementation during `uv python upgrade` ([#16420](https://github.com/astral-sh/uv/pull/16420))
|
||||
- Deterministically order `--find-links` distributions ([#16446](https://github.com/astral-sh/uv/pull/16446))
|
||||
- Don't panic in `uv export --frozen` when the lockfile is outdated ([#16407](https://github.com/astral-sh/uv/pull/16407))
|
||||
- Fix root of `uv tree` when `--package` is used with circular dependencies ([#15908](https://github.com/astral-sh/uv/pull/15908))
|
||||
- Show package list with `pip freeze --quiet` ([#16491](https://github.com/astral-sh/uv/pull/16491))
|
||||
- Limit `uv auth login pyx.dev` retries to 60s ([#16498](https://github.com/astral-sh/uv/pull/16498))
|
||||
- Add an empty group with `uv add --group ... -r ...` ([#16490](https://github.com/astral-sh/uv/pull/16490))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update docs for maturin build backend init template ([#16469](https://github.com/astral-sh/uv/pull/16469))
|
||||
- Update docs to reflect previous changes to signal forwarding semantics ([#16430](https://github.com/astral-sh/uv/pull/16430))
|
||||
- Add instructions for installing via MacPorts ([#16039](https://github.com/astral-sh/uv/pull/16039))
|
||||
|
||||
## 0.9.5
|
||||
|
||||
Released on 2025-10-21.
|
||||
|
||||
This release contains an upgrade to `astral-tokio-tar`, which addresses a vulnerability in tar extraction on malformed archives with mismatching size information between the ustar header and PAX extensions. While the `astral-tokio-tar` advisory has been graded as "high" due its potential broader impact, the *specific* impact to uv is **low** due to a lack of novel attacker capability. Specifically, uv only processes tar archives from source distributions, which already possess the capability for full arbitrary code execution by design, meaning that an attacker gains no additional capabilities through `astral-tokio-tar`.
|
||||
|
||||
Regardless, we take the hypothetical risk of parser differentials very seriously. Out of an abundance of caution, we have assigned this upgrade an advisory: https://github.com/astral-sh/uv/security/advisories/GHSA-w476-p2h3-79g9
|
||||
|
||||
### Security
|
||||
|
||||
* Upgrade `astral-tokio-tar` to 0.5.6 to address a parsing differential ([#16387](https://github.com/astral-sh/uv/pull/16387))
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add required environment marker example to hint ([#16244](https://github.com/astral-sh/uv/pull/16244))
|
||||
- Fix typo in MissingTopLevel warning ([#16351](https://github.com/astral-sh/uv/pull/16351))
|
||||
- Improve 403 Forbidden error message to indicate package may not exist ([#16353](https://github.com/astral-sh/uv/pull/16353))
|
||||
- Add a hint on `uv pip install` failure if the `--system` flag is used to select an externally managed interpreter ([#16318](https://github.com/astral-sh/uv/pull/16318))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix backtick escaping for PowerShell ([#16307](https://github.com/astral-sh/uv/pull/16307))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document metadata consistency expectation ([#15683](https://github.com/astral-sh/uv/pull/15683))
|
||||
- Remove outdated aarch64 musl note ([#16385](https://github.com/astral-sh/uv/pull/16385))
|
||||
|
||||
## 0.9.4
|
||||
|
||||
Released on 2025-10-17.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add CUDA 13.0 support ([#16321](https://github.com/astral-sh/uv/pull/16321))
|
||||
- Add auto-detection for Intel GPU on Windows ([#16280](https://github.com/astral-sh/uv/pull/16280))
|
||||
- Implement display of RFC 9457 HTTP error contexts ([#16199](https://github.com/astral-sh/uv/pull/16199))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid obfuscating pyx tokens in `uv auth token` output ([#16345](https://github.com/astral-sh/uv/pull/16345))
|
||||
|
||||
## 0.9.3
|
||||
|
||||
Released on 2025-10-14.
|
||||
|
||||
### Python
|
||||
|
||||
- Add CPython 3.15.0a1
|
||||
- Add CPython 3.13.9
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Obfuscate secret token values in logs ([#16164](https://github.com/astral-sh/uv/pull/16164))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix workspace with relative pathing ([#16296](https://github.com/astral-sh/uv/pull/16296))
|
||||
|
||||
## 0.9.2
|
||||
|
||||
Released on 2025-10-10.
|
||||
|
||||
### Python
|
||||
|
||||
- Add CPython 3.9.24.
|
||||
- Add CPython 3.10.19.
|
||||
- Add CPython 3.11.14.
|
||||
- Add CPython 3.12.12.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Avoid inferring check URLs for pyx in `uv publish` ([#16234](https://github.com/astral-sh/uv/pull/16234))
|
||||
- Add `uv tool list --show-python` ([#15814](https://github.com/astral-sh/uv/pull/15814))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add missing "added in" to new environment variables in reference ([#16217](https://github.com/astral-sh/uv/pull/16217))
|
||||
|
||||
## 0.9.1
|
||||
|
||||
Released on 2025-10-09.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Log Python choice in `uv init` ([#16182](https://github.com/astral-sh/uv/pull/16182))
|
||||
- Fix `pylock.toml` config conflict error messages ([#16211](https://github.com/astral-sh/uv/pull/16211))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_UPLOAD_HTTP_TIMEOUT` and respect `UV_HTTP_TIMEOUT` in uploads ([#16040](https://github.com/astral-sh/uv/pull/16040))
|
||||
- Support `UV_WORKING_DIRECTORY` for setting `--directory` ([#16125](https://github.com/astral-sh/uv/pull/16125))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow missing `Scripts` directory ([#16206](https://github.com/astral-sh/uv/pull/16206))
|
||||
- Fix handling of Python requests with pre-releases in ranges ([#16208](https://github.com/astral-sh/uv/pull/16208))
|
||||
- Preserve comments on version bump ([#16141](https://github.com/astral-sh/uv/pull/16141))
|
||||
- Retry all HTTP/2 errors ([#16038](https://github.com/astral-sh/uv/pull/16038))
|
||||
- Treat deleted Windows registry keys as equivalent to missing ones ([#16194](https://github.com/astral-sh/uv/pull/16194))
|
||||
- Ignore pre-release Python versions when a patch version is requested ([#16210](https://github.com/astral-sh/uv/pull/16210))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document why uv discards upper bounds on `requires-python` ([#15927](https://github.com/astral-sh/uv/pull/15927))
|
||||
- Document uv version environment variables were added in ([#15196](https://github.com/astral-sh/uv/pull/15196))
|
||||
|
||||
## 0.9.0
|
||||
|
||||
Released on 2025-10-07.
|
||||
|
||||
This breaking release is primarily motivated by the release of Python 3.14, which contains some breaking changes (we recommend reading the ["What's new in Python 3.14"](https://docs.python.org/3/whatsnew/3.14.html) page). uv may use Python 3.14 in cases where it previously used 3.13, e.g., if you have not pinned your Python version and do not have any Python versions installed on your machine. While we think this is uncommon, we prefer to be cautious. We've included some additional small changes that could break workflows.
|
||||
|
||||
See our [Python 3.14](https://astral.sh/blog/python-3.14) blog post for some discussion of features we're excited about!
|
||||
|
||||
There are no breaking changes to [`uv_build`](https://docs.astral.sh/uv/concepts/build-backend/). If you have an upper bound in your `[build-system]` table, you should update it.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Python 3.14 is now the default stable version**
|
||||
|
||||
The default Python version has changed from 3.13 to 3.14. This applies to Python version installation when no Python version is requested, e.g., `uv python install`. By default, uv will use the system Python version if present, so this may not cause changes to general use of uv. For example, if Python 3.13 is installed already, then `uv venv` will use that version. If no Python versions are installed on a machine and automatic downloads are enabled, uv will now use 3.14 instead of 3.13, e.g., for `uv venv` or `uvx python`. This change will not affect users who are using a `.python-version` file to pin to a specific Python version.
|
||||
- **Allow use of free-threaded variants in Python 3.14+ without explicit opt-in** ([#16142](https://github.com/astral-sh/uv/pull/16142))
|
||||
|
||||
Previously, free-threaded variants of Python were considered experimental and required explicit opt-in (i.e., with `3.14t`) for usage. Now uv will allow use of free-threaded Python 3.14+ interpreters without explicit selection. The GIL-enabled build of Python will still be preferred, e.g., when performing an installation with `uv python install 3.14`. However, e.g., if a free-threaded interpreter comes before a GIL-enabled build on the `PATH`, it will be used. This change does not apply to free-threaded Python 3.13 interpreters, which will continue to require opt-in.
|
||||
- **Use Python 3.14 stable Docker images** ([#16150](https://github.com/astral-sh/uv/pull/16150))
|
||||
|
||||
Previously, the Python 3.14 images had an `-rc` suffix, e.g., `python:3.14-rc-alpine` or
|
||||
`python:3.14-rc-trixie`. Now, the `-rc` suffix has been removed to match the stable
|
||||
[upstream images](https://hub.docker.com/_/python). The `-rc` images tags will no longer be
|
||||
updated. This change should not break existing workflows.
|
||||
- **Upgrade Alpine Docker image to Alpine 3.22**
|
||||
|
||||
Previously, the `uv:alpine` Docker image was based on Alpine 3.21. Now, this image is based on Alpine 3.22. The previous image can be recovered with `uv:alpine3.21` and will continue to be updated until a future release.
|
||||
- **Upgrade Debian Docker images to Debian 13 "Trixie"**
|
||||
|
||||
Previously, the `uv:debian` and `uv:debian-slim` Docker images were based on Debian 12 "Bookworm". Now, these images are based on Debian 13 "Trixie". The previous images can be recovered with `uv:bookworm` and `uv:bookworm-slim` and will continue to be updated until a future release.
|
||||
- **Fix incorrect output path when a trailing `/` is used in `uv build`** ([#15133](https://github.com/astral-sh/uv/pull/15133))
|
||||
|
||||
When using `uv build` in a workspace, the artifacts are intended to be written to a `dist` directory in the workspace root. A bug caused workspace root determination to fail when the input path included a trailing `/` causing the `dist` directory to be placed in the child directory. This bug has been fixed in this release. For example, `uv build child/` is used, the output path will now be in `<workspace root>/dist/` rather than `<workspace root>/child/dist/`.
|
||||
|
||||
### Python
|
||||
|
||||
- Add CPython 3.14.0
|
||||
- Add CPython 3.13.8
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Don't warn when a dependency is constrained by another dependency ([#16149](https://github.com/astral-sh/uv/pull/16149))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `uv python upgrade / install` output when there is a no-op for one request ([#16158](https://github.com/astral-sh/uv/pull/16158))
|
||||
- Surface pinned-version hint when `uv tool upgrade` can’t move the tool ([#16081](https://github.com/astral-sh/uv/pull/16081))
|
||||
- Ban pre-release versions in `uv python upgrade` requests ([#16160](https://github.com/astral-sh/uv/pull/16160))
|
||||
- Fix `uv python upgrade` replacement of installed binaries on pre-release to stable ([#16159](https://github.com/astral-sh/uv/pull/16159))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update `uv pip compile` args in `layout.md` ([#16155](https://github.com/astral-sh/uv/pull/16155))
|
||||
|
||||
## 0.8.x
|
||||
|
||||
See [changelogs/0.8.x](./changelogs/0.8.x.md)
|
||||
|
||||
## 0.7.x
|
||||
|
||||
See [changelogs/0.7.x](./changelogs/0.7.x.md)
|
||||
|
||||
## 0.6.x
|
||||
|
||||
See [changelogs/0.6.x](./changelogs/0.6.x.md)
|
||||
|
||||
## 0.5.x
|
||||
|
||||
See [changelogs/0.5.x](./changelogs/0.5.x.md)
|
||||
|
||||
## 0.4.x
|
||||
|
||||
See [changelogs/0.4.x](./changelogs/0.4.x.md)
|
||||
|
||||
## 0.3.x
|
||||
|
||||
See [changelogs/0.3.x](./changelogs/0.3.x.md)
|
||||
|
||||
## 0.2.x
|
||||
|
||||
See [changelogs/0.2.x](./changelogs/0.2.x.md)
|
||||
|
||||
## 0.1.x
|
||||
|
||||
See [changelogs/0.1.x](./changelogs/0.1.x.md)
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
- [Our Pledge](#our-pledge)
|
||||
- [Our Standards](#our-standards)
|
||||
- [Enforcement Responsibilities](#enforcement-responsibilities)
|
||||
- [Scope](#scope)
|
||||
- [Enforcement](#enforcement)
|
||||
- [Enforcement Guidelines](#enforcement-guidelines)
|
||||
- [1. Correction](#1-correction)
|
||||
- [2. Warning](#2-warning)
|
||||
- [3. Temporary Ban](#3-temporary-ban)
|
||||
- [4. Permanent Ban](#4-permanent-ban)
|
||||
- [Attribution](#attribution)
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our community a
|
||||
harassment-free experience for everyone, regardless of age, body size, visible or invisible
|
||||
disability, ethnicity, sex characteristics, gender identity and expression, level of experience,
|
||||
education, socio-economic status, nationality, personal appearance, race, religion, or sexual
|
||||
identity and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and
|
||||
healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the
|
||||
experience
|
||||
- Focusing on what is best not just for us as individuals, but for the overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email address, without their
|
||||
explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior
|
||||
and will take appropriate and fair corrective action in response to any behavior that they deem
|
||||
inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject comments, commits,
|
||||
code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and
|
||||
will communicate reasons for moderation decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when an individual is
|
||||
officially representing the community in public spaces. Examples of representing our community
|
||||
include using an official e-mail address, posting via an official social media account, or acting as
|
||||
an appointed representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community
|
||||
leaders responsible for enforcement at <hey@astral.sh>. All complaints will be reviewed and
|
||||
investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the reporter of any
|
||||
incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining the consequences for
|
||||
any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or
|
||||
unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing clarity around the
|
||||
nature of the violation and an explanation of why the behavior was inappropriate. A public apology
|
||||
may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No interaction with the people
|
||||
involved, including unsolicited interaction with those enforcing the Code of Conduct, for a
|
||||
specified period of time. This includes avoiding interactions in community spaces as well as
|
||||
external channels like social media. Violating these terms may lead to a temporary or permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including sustained inappropriate
|
||||
behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public communication with the
|
||||
community for a specified period of time. No public or private interaction with the people involved,
|
||||
including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this
|
||||
period. Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community standards, including
|
||||
sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement
|
||||
of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available
|
||||
[here](https://www.contributor-covenant.org/version/2/0/code_of_conduct.html).
|
||||
|
||||
Community Impact Guidelines were inspired by
|
||||
[Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
For answers to common questions about this code of conduct, see the
|
||||
[FAQ](https://www.contributor-covenant.org/faq). Translations are available
|
||||
[here](https://www.contributor-covenant.org/translations).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
244
CONTRIBUTING.md
244
CONTRIBUTING.md
|
|
@ -1,240 +1,82 @@
|
|||
# Contributing
|
||||
|
||||
## Finding ways to help
|
||||
|
||||
We label issues that would be good for a first time contributor as
|
||||
[`good first issue`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22).
|
||||
These usually do not require significant experience with Rust or the uv code base.
|
||||
|
||||
We label issues that we think are a good opportunity for subsequent contributions as
|
||||
[`help wanted`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22help+wanted%22).
|
||||
These require varying levels of experience with Rust and uv. Often, we want to accomplish these
|
||||
tasks but do not have the resources to do so ourselves.
|
||||
|
||||
You don't need our permission to start on an issue we have labeled as appropriate for community
|
||||
contribution as described above. However, it's a good idea to indicate that you are going to work on
|
||||
an issue to avoid concurrent attempts to solve the same problem.
|
||||
|
||||
Please check in with us before starting work on an issue that has not been labeled as appropriate
|
||||
for community contribution. We're happy to receive contributions for other issues, but it's
|
||||
important to make sure we have consensus on the solution to the problem first.
|
||||
|
||||
Outside of issues with the labels above, issues labeled as
|
||||
[`bug`](https://github.com/astral-sh/uv/issues?q=is%3Aopen+is%3Aissue+label%3A%22bug%22) are the
|
||||
best candidates for contribution. In contrast, issues labeled with `needs-decision` or
|
||||
`needs-design` are _not_ good candidates for contribution. Please do not open pull requests for
|
||||
issues with these labels.
|
||||
|
||||
Please do not open pull requests for new features without prior discussion. While we appreciate
|
||||
exploration of new features, we will almost always close these pull requests immediately. Adding a
|
||||
new feature to uv creates a long-term maintenance burden and requires strong consensus from the uv
|
||||
team before it is appropriate to begin work on an implementation.
|
||||
|
||||
## Setup
|
||||
|
||||
[Rust](https://rustup.rs/) (and a C compiler) are required to build uv.
|
||||
[Rust](https://rustup.rs/), a C compiler, and CMake are required to build uv.
|
||||
|
||||
On Ubuntu and other Debian-based distributions, you can install a C compiler with:
|
||||
### Linux
|
||||
|
||||
|
||||
On Ubuntu and other Debian-based distributions, you can install the C compiler and CMake with
|
||||
|
||||
```shell
|
||||
sudo apt install build-essential
|
||||
sudo apt install build-essential cmake
|
||||
```
|
||||
|
||||
On Fedora-based distributions, you can install a C compiler with:
|
||||
### macOS
|
||||
|
||||
CMake may be installed with Homebrew:
|
||||
|
||||
```shell
|
||||
sudo dnf install gcc
|
||||
```
|
||||
brew install cmake
|
||||
```
|
||||
|
||||
The Python bootstrapping script requires `coreutils` and `zstd`; we recommend installing them with Homebrew:
|
||||
|
||||
```
|
||||
brew install coreutils zstd
|
||||
```
|
||||
|
||||
See the [Python](#python) section for instructions on installing the Python versions.
|
||||
|
||||
### Windows
|
||||
|
||||
You can install CMake from the [installers](https://cmake.org/download/) or with `pipx install cmake`
|
||||
(make sure that the pipx install path is in `PATH`, pipx complains if it isn't).
|
||||
|
||||
## Testing
|
||||
|
||||
For running tests, we recommend [nextest](https://nexte.st/).
|
||||
|
||||
If tests fail due to a mismatch in the JSON Schema, run: `cargo dev generate-json-schema`.
|
||||
|
||||
### Python
|
||||
|
||||
Testing uv requires multiple specific Python versions; they can be installed with:
|
||||
Testing uv requires multiple specific Python versions. You can install them into
|
||||
`<project root>/bin` via our bootstrapping script:
|
||||
|
||||
```shell
|
||||
cargo run python install
|
||||
pipx run scripts/bootstrap/install.py
|
||||
```
|
||||
|
||||
The storage directory can be configured with `UV_PYTHON_INSTALL_DIR`. (It must be an absolute path.)
|
||||
Alternatively, you can install `zstandard` from PyPI, then run:
|
||||
|
||||
### Snapshot testing
|
||||
|
||||
uv uses [insta](https://insta.rs/) for snapshot testing. It's recommended (but not necessary) to use
|
||||
`cargo-insta` for a better snapshot review experience. See the
|
||||
[installation guide](https://insta.rs/docs/cli/) for more information.
|
||||
|
||||
In tests, you can use `uv_snapshot!` macro to simplify creating snapshots for uv commands. For
|
||||
example:
|
||||
|
||||
```rust
|
||||
#[test]
|
||||
fn test_add() {
|
||||
let context = TestContext::new("3.12");
|
||||
uv_snapshot!(context.filters(), context.add().arg("requests"), @"");
|
||||
}
|
||||
```
|
||||
python3.12 scripts/bootstrap/install.py
|
||||
```
|
||||
|
||||
To run and review a specific snapshot test:
|
||||
## Running inside a docker container
|
||||
|
||||
```shell
|
||||
cargo test --package <package> --test <test> -- <test_name> -- --exact
|
||||
cargo insta review
|
||||
```
|
||||
Source distributions can run arbitrary code on build and can make unwanted modifications to your system (https://moyix.blogspot.com/2022/09/someones-been-messing-with-my-subnormals.html, https://pypi.org/project/nvidia-pyindex/), which can even occur when just resolving requirements. To prevent this, there's a Docker container you can run commands in:
|
||||
|
||||
### Git and Git LFS
|
||||
|
||||
A subset of uv tests require both [Git](https://git-scm.com) and [Git LFS](https://git-lfs.com/) to
|
||||
execute properly.
|
||||
|
||||
These tests can be disabled by turning off either `git` or `git-lfs` uv features.
|
||||
|
||||
### Local testing
|
||||
|
||||
You can invoke your development version of uv with `cargo run -- <args>`. For example:
|
||||
|
||||
```shell
|
||||
cargo run -- venv
|
||||
cargo run -- pip install requests
|
||||
```
|
||||
|
||||
## Crate structure
|
||||
|
||||
Rust does not allow circular dependencies between crates. To visualize the crate hierarchy, install
|
||||
[cargo-depgraph](https://github.com/jplatte/cargo-depgraph) and graphviz, then run:
|
||||
|
||||
```shell
|
||||
cargo depgraph --dedup-transitive-deps --workspace-only | dot -Tpng > graph.png
|
||||
```
|
||||
|
||||
## Running inside a Docker container
|
||||
|
||||
Source distributions can run arbitrary code on build and can make unwanted modifications to your
|
||||
system
|
||||
(["Someone's Been Messing With My Subnormals!" on Blogspot](https://moyix.blogspot.com/2022/09/someones-been-messing-with-my-subnormals.html),
|
||||
["nvidia-pyindex" on PyPI](https://pypi.org/project/nvidia-pyindex/)), which can even occur when
|
||||
just resolving requirements. To prevent this, there's a Docker container you can run commands in:
|
||||
|
||||
```console
|
||||
$ docker build -t uv-builder -f crates/uv-dev/builder.dockerfile --load .
|
||||
```bash
|
||||
docker buildx build -t uv-builder -f builder.dockerfile --load .
|
||||
# Build for musl to avoid glibc errors, might not be required with your OS version
|
||||
cargo build --target x86_64-unknown-linux-musl --profile profiling
|
||||
cargo build --target x86_64-unknown-linux-musl --profile profiling --features vendored-openssl
|
||||
docker run --rm -it -v $(pwd):/app uv-builder /app/target/x86_64-unknown-linux-musl/profiling/uv-dev resolve-many --cache-dir /app/cache-docker /app/scripts/popular_packages/pypi_10k_most_dependents.txt
|
||||
```
|
||||
|
||||
We recommend using this container if you don't trust the dependency tree of the package(s) you are
|
||||
trying to resolve or install.
|
||||
We recommend using this container if you don't trust the dependency tree of the package(s) you are trying to resolve or install.
|
||||
|
||||
## Profiling and Benchmarking
|
||||
## Profiling
|
||||
|
||||
Please refer to Ruff's
|
||||
[Profiling Guide](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md#profiling-projects),
|
||||
it applies to uv, too.
|
||||
Please refer to Ruff's [Profiling Guide](https://github.com/astral-sh/ruff/blob/main/CONTRIBUTING.md#profiling-projects), it applies to uv, too.
|
||||
|
||||
We provide diverse sets of requirements for testing and benchmarking the resolver in
|
||||
`test/requirements` and for the installer in `test/requirements/compiled`.
|
||||
### Analysing concurrency
|
||||
|
||||
You can use `scripts/benchmark` to benchmark predefined workloads between uv versions and with other
|
||||
tools, e.g., from the `scripts/benchmark` directory:
|
||||
You can use [tracing-durations-export](https://github.com/konstin/tracing-durations-export) to visualize parallel requests and find any spots where uv is CPU-bound. Example usage, with `uv` and `uv-dev` respectively:
|
||||
|
||||
```shell
|
||||
uv run resolver \
|
||||
--uv-pip \
|
||||
--poetry \
|
||||
--benchmark \
|
||||
resolve-cold \
|
||||
../test/requirements/trio.in
|
||||
```bash
|
||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile scripts/requirements/jupyter.in
|
||||
```
|
||||
|
||||
### Analyzing concurrency
|
||||
|
||||
You can use [tracing-durations-export](https://github.com/konstin/tracing-durations-export) to
|
||||
visualize parallel requests and find any spots where uv is CPU-bound. Example usage, with `uv` and
|
||||
`uv-dev` respectively:
|
||||
|
||||
```shell
|
||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --profile profiling -- pip compile test/requirements/jupyter.in
|
||||
```
|
||||
|
||||
```shell
|
||||
```bash
|
||||
RUST_LOG=uv=info TRACING_DURATIONS_FILE=target/traces/jupyter.ndjson cargo run --features tracing-durations-export --bin uv-dev --profile profiling -- resolve jupyter
|
||||
```
|
||||
|
||||
### Trace-level logging
|
||||
|
||||
You can enable `trace` level logging using the `RUST_LOG` environment variable, i.e.
|
||||
|
||||
```shell
|
||||
RUST_LOG=trace uv
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
To preview any changes to the documentation locally:
|
||||
|
||||
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
|
||||
|
||||
2. Run `cargo dev generate-all`, to update any auto-generated documentation.
|
||||
|
||||
3. Run the development server with:
|
||||
|
||||
```shell
|
||||
# For contributors.
|
||||
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
|
||||
|
||||
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
|
||||
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
|
||||
```
|
||||
|
||||
The documentation should then be available locally at
|
||||
[http://127.0.0.1:8000/uv/](http://127.0.0.1:8000/uv/).
|
||||
|
||||
To update the documentation dependencies, edit `docs/requirements.in` and
|
||||
`docs/requirements-insiders.in`, then run:
|
||||
|
||||
```shell
|
||||
uv pip compile docs/requirements.in -o docs/requirements.txt --universal -p 3.12
|
||||
uv pip compile docs/requirements-insiders.in -o docs/requirements-insiders.txt --universal -p 3.12
|
||||
```
|
||||
|
||||
Documentation is deployed automatically on release by publishing to the
|
||||
[Astral documentation](https://github.com/astral-sh/docs) repository, which itself deploys via
|
||||
Cloudflare Pages.
|
||||
|
||||
After making changes to the documentation, format the markdown files with:
|
||||
|
||||
```shell
|
||||
npx prettier --prose-wrap always --write "**/*.md"
|
||||
```
|
||||
|
||||
Note that the command above requires Node.js and npm to be installed on your system. As an
|
||||
alternative, you can run this command using Docker:
|
||||
|
||||
```console
|
||||
$ docker run --rm -v .:/src/ -w /src/ node:alpine npx prettier --prose-wrap always --write "**/*.md"
|
||||
```
|
||||
|
||||
## Releases
|
||||
|
||||
Releases can only be performed by Astral team members.
|
||||
|
||||
Changelog entries and version bumps are automated. First, run:
|
||||
|
||||
```shell
|
||||
./scripts/release.sh
|
||||
```
|
||||
|
||||
Then, editorialize the `CHANGELOG.md` file to ensure entries are consistently styled.
|
||||
|
||||
Then, open a pull request, e.g., `Bump version to ...`.
|
||||
|
||||
Binary builds will automatically be tested for the release.
|
||||
|
||||
After merging the pull request, run the
|
||||
[release workflow](https://github.com/astral-sh/uv/actions/workflows/release.yml) with the version
|
||||
tag. **Do not include a leading `v`**. The release will automatically be created on GitHub after
|
||||
everything else publishes.
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
401
Cargo.toml
401
Cargo.toml
|
|
@ -1,229 +1,120 @@
|
|||
[workspace]
|
||||
members = ["crates/*"]
|
||||
exclude = [
|
||||
"scripts",
|
||||
# Needs nightly
|
||||
"crates/uv-trampoline",
|
||||
"scripts",
|
||||
# Needs nightly
|
||||
"crates/uv-trampoline"
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2024"
|
||||
rust-version = "1.89"
|
||||
edition = "2021"
|
||||
rust-version = "1.74"
|
||||
homepage = "https://pypi.org/project/uv/"
|
||||
documentation = "https://pypi.org/project/uv/"
|
||||
repository = "https://github.com/astral-sh/uv"
|
||||
authors = ["uv"]
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
uv-auth = { version = "0.0.8", path = "crates/uv-auth" }
|
||||
uv-bin-install = { version = "0.0.8", path = "crates/uv-bin-install" }
|
||||
uv-build-backend = { version = "0.0.8", path = "crates/uv-build-backend" }
|
||||
uv-build-frontend = { version = "0.0.8", path = "crates/uv-build-frontend" }
|
||||
uv-cache = { version = "0.0.8", path = "crates/uv-cache" }
|
||||
uv-cache-info = { version = "0.0.8", path = "crates/uv-cache-info" }
|
||||
uv-cache-key = { version = "0.0.8", path = "crates/uv-cache-key" }
|
||||
uv-cli = { version = "0.0.8", path = "crates/uv-cli" }
|
||||
uv-client = { version = "0.0.8", path = "crates/uv-client" }
|
||||
uv-configuration = { version = "0.0.8", path = "crates/uv-configuration" }
|
||||
uv-console = { version = "0.0.8", path = "crates/uv-console" }
|
||||
uv-dirs = { version = "0.0.8", path = "crates/uv-dirs" }
|
||||
uv-dispatch = { version = "0.0.8", path = "crates/uv-dispatch" }
|
||||
uv-distribution = { version = "0.0.8", path = "crates/uv-distribution" }
|
||||
uv-distribution-filename = { version = "0.0.8", path = "crates/uv-distribution-filename" }
|
||||
uv-distribution-types = { version = "0.0.8", path = "crates/uv-distribution-types" }
|
||||
uv-extract = { version = "0.0.8", path = "crates/uv-extract" }
|
||||
uv-flags = { version = "0.0.8", path = "crates/uv-flags" }
|
||||
uv-fs = { version = "0.0.8", path = "crates/uv-fs", features = ["serde", "tokio"] }
|
||||
uv-git = { version = "0.0.8", path = "crates/uv-git" }
|
||||
uv-git-types = { version = "0.0.8", path = "crates/uv-git-types" }
|
||||
uv-globfilter = { version = "0.0.8", path = "crates/uv-globfilter" }
|
||||
uv-install-wheel = { version = "0.0.8", path = "crates/uv-install-wheel", default-features = false }
|
||||
uv-installer = { version = "0.0.8", path = "crates/uv-installer" }
|
||||
uv-keyring = { version = "0.0.8", path = "crates/uv-keyring" }
|
||||
uv-logging = { version = "0.0.8", path = "crates/uv-logging" }
|
||||
uv-macros = { version = "0.0.8", path = "crates/uv-macros" }
|
||||
uv-metadata = { version = "0.0.8", path = "crates/uv-metadata" }
|
||||
uv-normalize = { version = "0.0.8", path = "crates/uv-normalize" }
|
||||
uv-once-map = { version = "0.0.8", path = "crates/uv-once-map" }
|
||||
uv-options-metadata = { version = "0.0.8", path = "crates/uv-options-metadata" }
|
||||
uv-performance-memory-allocator = { version = "0.0.8", path = "crates/uv-performance-memory-allocator" }
|
||||
uv-pep440 = { version = "0.0.8", path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
|
||||
uv-pep508 = { version = "0.0.8", path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
|
||||
uv-platform = { version = "0.0.8", path = "crates/uv-platform" }
|
||||
uv-platform-tags = { version = "0.0.8", path = "crates/uv-platform-tags" }
|
||||
uv-preview = { version = "0.0.8", path = "crates/uv-preview" }
|
||||
uv-publish = { version = "0.0.8", path = "crates/uv-publish" }
|
||||
uv-pypi-types = { version = "0.0.8", path = "crates/uv-pypi-types" }
|
||||
uv-python = { version = "0.0.8", path = "crates/uv-python" }
|
||||
uv-redacted = { version = "0.0.8", path = "crates/uv-redacted" }
|
||||
uv-requirements = { version = "0.0.8", path = "crates/uv-requirements" }
|
||||
uv-requirements-txt = { version = "0.0.8", path = "crates/uv-requirements-txt" }
|
||||
uv-resolver = { version = "0.0.8", path = "crates/uv-resolver" }
|
||||
uv-scripts = { version = "0.0.8", path = "crates/uv-scripts" }
|
||||
uv-settings = { version = "0.0.8", path = "crates/uv-settings" }
|
||||
uv-shell = { version = "0.0.8", path = "crates/uv-shell" }
|
||||
uv-small-str = { version = "0.0.8", path = "crates/uv-small-str" }
|
||||
uv-state = { version = "0.0.8", path = "crates/uv-state" }
|
||||
uv-static = { version = "0.0.8", path = "crates/uv-static" }
|
||||
uv-tool = { version = "0.0.8", path = "crates/uv-tool" }
|
||||
uv-torch = { version = "0.0.8", path = "crates/uv-torch" }
|
||||
uv-trampoline-builder = { version = "0.0.8", path = "crates/uv-trampoline-builder" }
|
||||
uv-types = { version = "0.0.8", path = "crates/uv-types" }
|
||||
uv-version = { version = "0.9.18", path = "crates/uv-version" }
|
||||
uv-virtualenv = { version = "0.0.8", path = "crates/uv-virtualenv" }
|
||||
uv-warnings = { version = "0.0.8", path = "crates/uv-warnings" }
|
||||
uv-workspace = { version = "0.0.8", path = "crates/uv-workspace" }
|
||||
|
||||
ambient-id = { version = "0.0.7", default-features = false, features = ["astral-reqwest-middleware"] }
|
||||
anstream = { version = "0.6.15" }
|
||||
anyhow = { version = "1.0.89" }
|
||||
arcstr = { version = "1.2.0" }
|
||||
arrayvec = { version = "0.7.6" }
|
||||
astral-tokio-tar = { version = "0.5.6" }
|
||||
async-channel = { version = "2.3.1" }
|
||||
async-compression = { version = "0.4.12", features = ["bzip2", "gzip", "xz", "zstd"] }
|
||||
async-trait = { version = "0.1.82" }
|
||||
async_http_range_reader = { version = "0.9.1", package = "astral_async_http_range_reader" }
|
||||
async_zip = { version = "0.0.17", package = "astral_async_zip", features = ["bzip2", "deflate", "lzma", "tokio", "xz", "zstd"] }
|
||||
axoupdater = { version = "0.9.0", default-features = false }
|
||||
backon = { version = "1.3.0" }
|
||||
base64 = { version = "0.22.1" }
|
||||
bitflags = { version = "2.6.0" }
|
||||
blake2 = { version = "0.10.6" }
|
||||
boxcar = { version = "0.2.5" }
|
||||
bytecheck = { version = "0.8.0" }
|
||||
cargo-util = { version = "0.2.14" }
|
||||
clap = { version = "4.5.17", features = ["derive", "env", "string", "wrap_help"] }
|
||||
clap_complete_command = { version = "0.6.1" }
|
||||
configparser = { version = "3.1.0" }
|
||||
console = { version = "0.16.0", default-features = false, features = ["std"] }
|
||||
anstream = { version = "0.6.5" }
|
||||
anyhow = { version = "1.0.79" }
|
||||
async-compression = { version = "0.4.6" }
|
||||
async-trait = { version = "0.1.77" }
|
||||
async_http_range_reader = { version = "0.6.1" }
|
||||
async_zip = { git = "https://github.com/charliermarsh/rs-async-zip", rev = "d76801da0943de985254fc6255c0e476b57c5836", features = ["deflate"] }
|
||||
base64 = { version = "0.21.7" }
|
||||
cachedir = { version = "0.3.1" }
|
||||
camino = { version = "1.1.6", features = ["serde1"] }
|
||||
cargo-util = { version = "0.2.8" }
|
||||
chrono = { version = "0.4.31" }
|
||||
clap = { version = "4.4.13" }
|
||||
configparser = { version = "3.0.4" }
|
||||
console = { version = "0.15.8", default-features = false }
|
||||
csv = { version = "1.3.0" }
|
||||
ctrlc = { version = "3.4.5" }
|
||||
cyclonedx-bom = { version = "0.8.0" }
|
||||
dashmap = { version = "6.1.0" }
|
||||
data-encoding = { version = "2.6.0" }
|
||||
diskus = { version = "0.9.0", default-features = false }
|
||||
dotenvy = { version = "0.15.7" }
|
||||
dunce = { version = "1.0.5" }
|
||||
either = { version = "1.13.0" }
|
||||
encoding_rs_io = { version = "0.1.7" }
|
||||
embed-manifest = { version = "1.5.0" }
|
||||
etcetera = { version = "0.11.0" }
|
||||
fastrand = { version = "2.3.0" }
|
||||
flate2 = { version = "1.0.33", default-features = false, features = ["zlib-rs"] }
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
ctrlc = { version = "3.4.2" }
|
||||
dashmap = { version = "5.5.3" }
|
||||
data-encoding = { version = "2.5.0" }
|
||||
derivative = { version = "2.2.0" }
|
||||
directories = { version = "5.0.1" }
|
||||
dirs = { version = "5.0.1" }
|
||||
dunce = { version = "1.0.4" }
|
||||
either = { version = "1.9.0" }
|
||||
flate2 = { version = "1.0.28", default-features = false }
|
||||
fs-err = { version = "2.11.0" }
|
||||
fs2 = { version = "0.4.3" }
|
||||
futures = { version = "0.3.30" }
|
||||
git2 = { version = "0.18.1" }
|
||||
glob = { version = "0.3.1" }
|
||||
globset = { version = "0.4.15" }
|
||||
globwalk = { version = "0.9.1" }
|
||||
goblin = { version = "0.10.0", default-features = false, features = ["std", "elf32", "elf64", "endian_fd"] }
|
||||
h2 = { version = "0.4.7" }
|
||||
hashbrown = { version = "0.16.0" }
|
||||
goblin = { version = "0.8.0" }
|
||||
hex = { version = "0.4.3" }
|
||||
hmac = { version = "0.12.1" }
|
||||
home = { version = "0.5.9" }
|
||||
html-escape = { version = "0.2.13" }
|
||||
http = { version = "1.1.0" }
|
||||
indexmap = { version = "2.5.0" }
|
||||
indicatif = { version = "0.18.0" }
|
||||
indoc = { version = "2.0.5" }
|
||||
itertools = { version = "0.14.0" }
|
||||
jiff = { version = "0.2.0", features = ["serde"] }
|
||||
junction = { version = "1.2.0" }
|
||||
mailparse = { version = "0.16.0" }
|
||||
md-5 = { version = "0.10.6" }
|
||||
memchr = { version = "2.7.4" }
|
||||
miette = { version = "7.2.0", features = ["fancy-no-backtrace"] }
|
||||
http = { version = "0.2.11" }
|
||||
indexmap = { version = "2.1.0" }
|
||||
indicatif = { version = "0.17.7" }
|
||||
indoc = { version = "2.0.4" }
|
||||
itertools = { version = "0.12.0" }
|
||||
junction = { version = "1.0.0" }
|
||||
mailparse = { version = "0.14.0" }
|
||||
miette = { version = "6.0.0" }
|
||||
nanoid = { version = "0.4.0" }
|
||||
nix = { version = "0.30.0", features = ["signal"] }
|
||||
open = { version = "5.3.2" }
|
||||
owo-colors = { version = "4.1.0" }
|
||||
path-slash = { version = "0.2.1" }
|
||||
pathdiff = { version = "0.2.1" }
|
||||
percent-encoding = { version = "2.3.1" }
|
||||
petgraph = { version = "0.8.0" }
|
||||
proc-macro2 = { version = "1.0.86" }
|
||||
procfs = { version = "0.17.0", default-features = false, features = ["flate2"] }
|
||||
pubgrub = { version = "0.3.3" , package = "astral-pubgrub" }
|
||||
quote = { version = "1.0.37" }
|
||||
rayon = { version = "1.10.0" }
|
||||
ref-cast = { version = "1.0.24" }
|
||||
reflink-copy = { version = "0.1.19" }
|
||||
regex = { version = "1.10.6" }
|
||||
regex-automata = { version = "0.4.8", default-features = false, features = ["dfa-build", "dfa-search", "perf", "std", "syntax"] }
|
||||
reqsign = { version = "0.18.0", features = ["aws", "default-context"], default-features = false }
|
||||
reqwest = { version = "0.12.22", default-features = false, features = ["json", "gzip", "deflate", "zstd", "stream", "system-proxy", "rustls-tls", "rustls-tls-native-roots", "socks", "multipart", "http2", "blocking"] }
|
||||
reqwest-middleware = { version = "0.4.2", package = "astral-reqwest-middleware", features = ["multipart"] }
|
||||
reqwest-retry = { version = "0.7.0", package = "astral-reqwest-retry" }
|
||||
rkyv = { version = "0.8.8", features = ["bytecheck"] }
|
||||
rmp-serde = { version = "1.3.0" }
|
||||
rust-netrc = { version = "0.1.2" }
|
||||
rustc-hash = { version = "2.0.0" }
|
||||
rustix = { version = "1.0.0", default-features = false, features = ["fs", "std"] }
|
||||
once_cell = { version = "1.19.0" }
|
||||
owo-colors = { version = "4.0.0" }
|
||||
petgraph = { version = "0.6.4" }
|
||||
platform-info = { version = "2.0.2" }
|
||||
plist = { version = "1.6.0" }
|
||||
pubgrub = { git = "https://github.com/zanieb/pubgrub", rev = "9b6d89cb8a0c7902815c8b2ae99106ba322ffb14" }
|
||||
pyo3 = { version = "0.20.2" }
|
||||
pyo3-log = { version = "0.9.0"}
|
||||
pyproject-toml = { version = "0.8.1" }
|
||||
rand = { version = "0.8.5" }
|
||||
rayon = { version = "1.8.0" }
|
||||
reflink-copy = { version = "0.1.14" }
|
||||
regex = { version = "1.10.2" }
|
||||
reqwest = { version = "0.11.23", default-features = false, features = ["json", "gzip", "brotli", "stream", "rustls-tls-native-roots"] }
|
||||
reqwest-middleware = { version = "0.2.4" }
|
||||
reqwest-retry = { version = "0.3.0" }
|
||||
rkyv = { version = "0.7.43", features = ["strict", "validation"] }
|
||||
rmp-serde = { version = "1.1.2" }
|
||||
rustc-hash = { version = "1.1.0" }
|
||||
same-file = { version = "1.0.6" }
|
||||
schemars = { version = "1.0.0", features = ["url2"] }
|
||||
seahash = { version = "4.1.0" }
|
||||
secret-service = { version = "5.0.0", features = ["rt-tokio-crypto-rust"] }
|
||||
security-framework = { version = "3" }
|
||||
self-replace = { version = "1.5.0" }
|
||||
serde = { version = "1.0.210", features = ["derive", "rc"] }
|
||||
serde-untagged = { version = "0.1.6" }
|
||||
serde_json = { version = "1.0.128" }
|
||||
serde = { version = "1.0.194" }
|
||||
serde_json = { version = "1.0.111" }
|
||||
sha1 = { version = "0.10.6" }
|
||||
sha2 = { version = "0.10.8" }
|
||||
smallvec = { version = "1.13.2" }
|
||||
spdx = { version = "0.13.0" }
|
||||
syn = { version = "2.0.77" }
|
||||
sys-info = { version = "0.9.1" }
|
||||
tar = { version = "0.4.43" }
|
||||
target-lexicon = { version = "0.13.0" }
|
||||
tempfile = { version = "3.14.0" }
|
||||
textwrap = { version = "0.16.1" }
|
||||
thiserror = { version = "2.0.0" }
|
||||
astral-tl = { version = "0.7.11" }
|
||||
tokio = { version = "1.40.0", features = ["fs", "io-util", "macros", "process", "rt", "signal", "sync", "time"] }
|
||||
tokio-stream = { version = "0.1.16" }
|
||||
tokio-util = { version = "0.7.12", features = ["compat", "io"] }
|
||||
toml = { version = "0.9.2", features = ["fast_hash"] }
|
||||
toml_edit = { version = "0.23.2", features = ["serde"] }
|
||||
tar = { version = "0.4.40" }
|
||||
target-lexicon = { version = "0.12.13" }
|
||||
task-local-extensions = { version = "0.1.4" }
|
||||
tempfile = { version = "3.9.0" }
|
||||
textwrap = { version = "0.16.0" }
|
||||
thiserror = { version = "1.0.56" }
|
||||
tl = { version = "0.7.7" }
|
||||
tokio = { version = "1.35.1", features = ["rt-multi-thread"] }
|
||||
tokio-stream = { version = "0.1.14" }
|
||||
tokio-tar = { version = "0.3.1" }
|
||||
tokio-util = { version = "0.7.10", features = ["compat"] }
|
||||
toml = { version = "0.8.8" }
|
||||
tracing = { version = "0.1.40" }
|
||||
tracing-durations-export = { version = "0.3.0", features = ["plot"] }
|
||||
tracing-subscriber = { version = "0.3.18" } # Default feature set for uv_build, uv activates extra features
|
||||
tracing-test = { version = "0.2.5" }
|
||||
tracing-tree = { version = "0.4.0" }
|
||||
unicode-width = { version = "0.2.0" }
|
||||
tracing-durations-export = { version = "0.2.0", features = ["plot"] }
|
||||
tracing-indicatif = { version = "0.3.6" }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
tracing-tree = { version = "0.3.0" }
|
||||
unicode-width = { version = "0.1.11" }
|
||||
unscanny = { version = "0.1.0" }
|
||||
url = { version = "2.5.2", features = ["serde"] }
|
||||
uuid = { version = "1.16.0" }
|
||||
version-ranges = { version = "0.1.3", package = "astral-version-ranges" }
|
||||
walkdir = { version = "2.5.0" }
|
||||
which = { version = "8.0.0", features = ["regex"] }
|
||||
windows = { version = "0.59.0", features = ["std", "Win32_Globalization", "Win32_System_LibraryLoader", "Win32_System_Console", "Win32_System_Kernel", "Win32_System_Diagnostics_Debug", "Win32_Storage_FileSystem", "Win32_Security", "Win32_System_Registry", "Win32_System_IO", "Win32_System_Ioctl"] }
|
||||
windows-registry = { version = "0.5.0" }
|
||||
wiremock = { version = "0.6.4" }
|
||||
wmi = { version = "0.16.0", default-features = false }
|
||||
xz2 = { version = "0.1.7" }
|
||||
zeroize = { version = "1.8.1" }
|
||||
zip = { version = "2.2.3", default-features = false, features = ["deflate", "zstd", "bzip2", "lzma", "xz"] }
|
||||
zstd = { version = "0.13.3" }
|
||||
url = { version = "2.5.0" }
|
||||
urlencoding = { version = "2.1.3" }
|
||||
uuid = { version = "1.7.0", default-features = false }
|
||||
walkdir = { version = "2.4.0" }
|
||||
which = { version = "6.0.0" }
|
||||
zip = { version = "0.6.6", default-features = false, features = ["deflate"] }
|
||||
|
||||
# dev-dependencies
|
||||
assert_cmd = { version = "2.0.16" }
|
||||
assert_fs = { version = "1.1.2" }
|
||||
byteorder = { version = "1.5.0" }
|
||||
filetime = { version = "0.2.25" }
|
||||
http-body-util = { version = "0.1.2" }
|
||||
hyper = { version = "1.4.1", features = ["server", "http1"] }
|
||||
hyper-util = { version = "0.1.8", features = ["tokio", "server", "http1"] }
|
||||
ignore = { version = "0.4.23" }
|
||||
insta = { version = "1.40.0", features = ["json", "filters", "redactions"] }
|
||||
predicates = { version = "3.1.2" }
|
||||
rcgen = { version = "0.14.5", features = ["crypto", "pem", "ring"], default-features = false }
|
||||
rustls = { version = "0.23.29", default-features = false }
|
||||
similar = { version = "2.6.0" }
|
||||
temp-env = { version = "0.3.6" }
|
||||
test-case = { version = "3.3.1" }
|
||||
test-log = { version = "0.2.16", features = ["trace"], default-features = false }
|
||||
tokio-rustls = { version = "0.26.2", default-features = false }
|
||||
whoami = { version = "1.6.0" }
|
||||
[patch.crates-io]
|
||||
# For pyproject-toml
|
||||
pep440_rs = { path = "crates/pep440-rs" }
|
||||
pep508_rs = { path = "crates/pep508-rs" }
|
||||
|
||||
[workspace.lints.rust]
|
||||
unsafe_code = "warn"
|
||||
|
|
@ -236,17 +127,13 @@ char_lit_as_u8 = "allow"
|
|||
collapsible_else_if = "allow"
|
||||
collapsible_if = "allow"
|
||||
implicit_hasher = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
match_same_arms = "allow"
|
||||
missing_errors_doc = "allow"
|
||||
missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
must_use_candidate = "allow"
|
||||
similar_names = "allow"
|
||||
struct_excessive_bools = "allow"
|
||||
too_many_arguments = "allow"
|
||||
too_many_lines = "allow"
|
||||
used_underscore_binding = "allow"
|
||||
# Disallowed restriction lints
|
||||
print_stdout = "warn"
|
||||
print_stderr = "warn"
|
||||
|
|
@ -258,81 +145,43 @@ get_unwrap = "warn"
|
|||
rc_buffer = "warn"
|
||||
rc_mutex = "warn"
|
||||
rest_pat_in_fully_bound_structs = "warn"
|
||||
if_not_else = "allow"
|
||||
use_self = "warn"
|
||||
|
||||
# Diagnostics are not actionable: Enable once https://github.com/rust-lang/rust-clippy/issues/13774 is resolved.
|
||||
large_stack_arrays = "allow"
|
||||
|
||||
[profile.release]
|
||||
strip = true
|
||||
lto = "fat"
|
||||
|
||||
# This profile is meant to mimic the `release` profile as closely as
|
||||
# possible, but using settings that are more beneficial for iterative
|
||||
# development. That is, the `release` profile is intended for actually
|
||||
# building the release, where as `profiling` is meant for building `uv`
|
||||
# for running benchmarks.
|
||||
#
|
||||
# The main differences here are to avoid stripping debug information
|
||||
# and disabling lto. This does result in a mismatch between our release
|
||||
# configuration and our benchmarking configuration, which is unfortunate.
|
||||
# But compile times with `lto = true` are completely untenable:
|
||||
#
|
||||
# $ cargo b --profile profiling -p uv
|
||||
# Compiling uv-cli v0.0.1 (/home/andrew/astral/uv/crates/uv-cli)
|
||||
# Compiling uv v0.2.34 (/home/andrew/astral/uv/crates/uv)
|
||||
# Finished `profiling` profile [optimized + debuginfo] target(s) in 3m 47s
|
||||
#
|
||||
# Using `lto = "thin"` brings a massive improvement, but it's still slow:
|
||||
#
|
||||
# $ cargo b --profile profiling -p uv
|
||||
# Compiling uv v0.2.34 (/home/andrew/astral/uv/crates/uv)
|
||||
# Finished `profiling` profile [optimized + debuginfo] target(s) in 53.98s
|
||||
#
|
||||
# But with `lto = false`:
|
||||
#
|
||||
# $ cargo b --profile profiling -p uv
|
||||
# Compiling uv v0.2.34 (/home/andrew/astral/uv/crates/uv)
|
||||
# Finished `profiling` profile [optimized + debuginfo] target(s) in 30.09s
|
||||
#
|
||||
# We get more reasonable-ish compile times. At least, it's not enough
|
||||
# time to get up and get a cup of coffee before it completes.
|
||||
#
|
||||
# This setup does risk that we are measuring something in benchmarks
|
||||
# that we are shipping, but in order to make those two the same, we'd
|
||||
# either need to make compile times way worse for development, or take
|
||||
# a hit to binary size and a slight hit to runtime performance in our
|
||||
# release builds.
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
strip = false
|
||||
debug = "full"
|
||||
lto = false
|
||||
|
||||
# Profile for fast test execution: Skip debug info generation, and
|
||||
# apply basic optimization, which speed up build and running tests.
|
||||
[profile.fast-build]
|
||||
inherits = "dev"
|
||||
opt-level = 1
|
||||
debug = 0
|
||||
strip = "debuginfo"
|
||||
|
||||
# Profile for faster builds: Skip debug info generation, for faster
|
||||
# builds of smaller binaries.
|
||||
[profile.no-debug]
|
||||
inherits = "dev"
|
||||
debug = 0
|
||||
strip = "debuginfo"
|
||||
|
||||
# Profile to build a minimally sized binary for uv-build
|
||||
[profile.minimal-size]
|
||||
inherits = "release"
|
||||
opt-level = "z"
|
||||
# This will still show a panic message, we only skip the unwind
|
||||
panic = "abort"
|
||||
codegen-units = 1
|
||||
debug = true
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
lto = "thin"
|
||||
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.8.0"
|
||||
# CI backends to support
|
||||
ci = ["github"]
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".zip"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = ["aarch64-unknown-linux-gnu", "x86_64-unknown-linux-gnu", "i686-unknown-linux-gnu", "aarch64-apple-darwin", "x86_64-apple-darwin", "aarch64-unknown-linux-musl", "x86_64-unknown-linux-musl", "i686-unknown-linux-musl", "x86_64-pc-windows-msvc", "i686-pc-windows-msvc", "armv7-unknown-linux-gnueabihf", "powerpc64-unknown-linux-gnu", "powerpc64le-unknown-linux-gnu", "s390x-unknown-linux-gnu"]
|
||||
# Whether to auto-include files like READMEs and CHANGELOGs (default true)
|
||||
auto-includes = false
|
||||
# Whether cargo-dist should create a Github Release or use an existing draft
|
||||
create-release = true
|
||||
# Publish jobs to run in CI
|
||||
pr-run-mode = "skip"
|
||||
# Whether CI should trigger releases with dispatches instead of tag pushes
|
||||
dispatch-releases = true
|
||||
# Whether CI should include auto-generated code to build local artifacts
|
||||
build-local-artifacts = false
|
||||
# Local artifacts jobs to run in CI
|
||||
local-artifacts-jobs = ["./build-binaries"]
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["./publish-pypi"]
|
||||
# For: `permissions: packages: write`.
|
||||
allow-dirty = ["ci"]
|
||||
|
|
|
|||
26
Dockerfile
26
Dockerfile
|
|
@ -1,4 +1,4 @@
|
|||
FROM --platform=$BUILDPLATFORM ubuntu AS build
|
||||
FROM --platform=$BUILDPLATFORM ubuntu as build
|
||||
ENV HOME="/root"
|
||||
WORKDIR $HOME
|
||||
|
||||
|
|
@ -7,6 +7,7 @@ RUN apt update \
|
|||
build-essential \
|
||||
curl \
|
||||
python3-venv \
|
||||
cmake \
|
||||
&& apt clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
|
@ -22,36 +23,23 @@ RUN case "$TARGETPLATFORM" in \
|
|||
"linux/amd64") echo "x86_64-unknown-linux-musl" > rust_target.txt ;; \
|
||||
*) exit 1 ;; \
|
||||
esac
|
||||
|
||||
# Temporarily using nightly-2025-11-02 for bundled musl v1.2.5
|
||||
# Ref: https://github.com/rust-lang/rust/pull/142682
|
||||
# TODO(samypr100): Remove when toolchain updates to 1.93
|
||||
COPY <<EOF rust-toolchain.toml
|
||||
[toolchain]
|
||||
channel = "nightly-2025-11-02"
|
||||
EOF
|
||||
# Update rustup whenever we bump the rust version
|
||||
# COPY rust-toolchain.toml rust-toolchain.toml
|
||||
COPY rust-toolchain.toml rust-toolchain.toml
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --target $(cat rust_target.txt) --profile minimal --default-toolchain none
|
||||
ENV PATH="$HOME/.cargo/bin:$PATH"
|
||||
# Install the toolchain then the musl target
|
||||
RUN rustup toolchain install
|
||||
# Installs the correct toolchain version from rust-toolchain.toml and then the musl target
|
||||
RUN rustup target add $(cat rust_target.txt)
|
||||
|
||||
# Build
|
||||
COPY crates crates
|
||||
COPY ./Cargo.toml Cargo.toml
|
||||
COPY ./Cargo.lock Cargo.lock
|
||||
RUN case "${TARGETPLATFORM}" in \
|
||||
"linux/arm64") export JEMALLOC_SYS_WITH_LG_PAGE=16;; \
|
||||
esac && \
|
||||
cargo zigbuild --bin uv --bin uvx --target $(cat rust_target.txt) --release
|
||||
RUN cp target/$(cat rust_target.txt)/release/uv /uv \
|
||||
&& cp target/$(cat rust_target.txt)/release/uvx /uvx
|
||||
RUN cargo zigbuild --bin uv --target $(cat rust_target.txt) --release
|
||||
RUN cp target/$(cat rust_target.txt)/release/uv /uv
|
||||
# TODO(konsti): Optimize binary size, with a version that also works when cross compiling
|
||||
# RUN strip --strip-all /uv
|
||||
|
||||
FROM scratch
|
||||
COPY --from=build /uv /uvx /
|
||||
COPY --from=build /uv /uv
|
||||
WORKDIR /io
|
||||
ENTRYPOINT ["/uv"]
|
||||
|
|
|
|||
|
|
@ -198,4 +198,4 @@
|
|||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
limitations under the License.
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2025 Astral Software Inc.
|
||||
Copyright (c) 2023 Astral Software Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
This reference document has moved to the
|
||||
[documentation website](https://docs.astral.sh/uv/pip/compatibility/).
|
||||
498
README.md
498
README.md
|
|
@ -4,322 +4,356 @@
|
|||
[](https://pypi.python.org/pypi/uv)
|
||||
[](https://pypi.python.org/pypi/uv)
|
||||
[](https://pypi.python.org/pypi/uv)
|
||||
[](https://github.com/astral-sh/uv/actions)
|
||||
[](https://github.com/astral-sh/uv/actions)
|
||||
[](https://discord.gg/astral-sh)
|
||||
|
||||
An extremely fast Python package and project manager, written in Rust.
|
||||
An extremely fast Python package installer and resolver, written in Rust. Designed as a drop-in
|
||||
replacement for `pip` and `pip-compile`.
|
||||
|
||||
<p align="center">
|
||||
<picture align="center">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/astral-sh/uv/assets/1309177/03aa9163-1c79-4a87-a31d-7a9311ed9310">
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://github.com/astral-sh/uv/assets/1309177/629e59c0-9c6e-4013-9ad4-adb2bcf5080d">
|
||||
<img alt="Shows a bar chart with benchmark results." src="https://github.com/astral-sh/uv/assets/1309177/629e59c0-9c6e-4013-9ad4-adb2bcf5080d">
|
||||
</picture>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<i>Installing <a href="https://trio.readthedocs.io/">Trio</a>'s dependencies with a warm cache.</i>
|
||||
</p>
|
||||
uv is backed by [Astral](https://astral.sh), the creators of [Ruff](https://github.com/astral-sh/ruff).
|
||||
|
||||
## Highlights
|
||||
|
||||
- 🚀 A single tool to replace `pip`, `pip-tools`, `pipx`, `poetry`, `pyenv`, `twine`, `virtualenv`,
|
||||
and more.
|
||||
- ⚡️ [10-100x faster](https://github.com/astral-sh/uv/blob/main/BENCHMARKS.md) than `pip`.
|
||||
- 🗂️ Provides [comprehensive project management](#projects), with a
|
||||
[universal lockfile](https://docs.astral.sh/uv/concepts/projects/layout#the-lockfile).
|
||||
- ❇️ [Runs scripts](#scripts), with support for
|
||||
[inline dependency metadata](https://docs.astral.sh/uv/guides/scripts#declaring-script-dependencies).
|
||||
- 🐍 [Installs and manages](#python-versions) Python versions.
|
||||
- 🛠️ [Runs and installs](#tools) tools published as Python packages.
|
||||
- 🔩 Includes a [pip-compatible interface](#the-pip-interface) for a performance boost with a
|
||||
familiar CLI.
|
||||
- 🏢 Supports Cargo-style [workspaces](https://docs.astral.sh/uv/concepts/projects/workspaces) for
|
||||
scalable projects.
|
||||
- 💾 Disk-space efficient, with a [global cache](https://docs.astral.sh/uv/concepts/cache) for
|
||||
dependency deduplication.
|
||||
- ⏬ Installable without Rust or Python via `curl` or `pip`.
|
||||
- 🖥️ Supports macOS, Linux, and Windows.
|
||||
- ⚖️ Drop-in replacement for common `pip`, `pip-tools`, and `virtualenv` commands.
|
||||
- ⚡️ [10-100x faster](https://github.com/astral-sh/uv/blob/main/BENCHMARKS.md) than `pip`
|
||||
and `pip-tools` (`pip-compile` and `pip-sync`).
|
||||
- 💾 Disk-space efficient, with a global cache for dependency deduplication.
|
||||
- 🐍 Installable via `curl`, `pip`, `pipx`, etc. uv is a static binary that can be installed
|
||||
without Rust or Python.
|
||||
- 🧪 Tested at-scale against the top 10,000 PyPI packages.
|
||||
- 🖥️ Support for macOS, Linux, and Windows.
|
||||
- 🧰 Advanced features such as [dependency version overrides](#dependency-overrides) and
|
||||
[alternative resolution strategies](#resolution-strategy).
|
||||
- ⁉️ Best-in-class error messages with a conflict-tracking resolver.
|
||||
- 🤝 Support for a wide range of advanced `pip` features, including editable installs, Git
|
||||
dependencies, direct URL dependencies, local dependencies, constraints, source distributions,
|
||||
HTML and JSON indexes, and more.
|
||||
|
||||
uv is backed by [Astral](https://astral.sh), the creators of
|
||||
[Ruff](https://github.com/astral-sh/ruff) and [ty](https://github.com/astral-sh/ty).
|
||||
## Getting Started
|
||||
|
||||
## Installation
|
||||
Install uv with our standalone installers, or from [PyPI](https://pypi.org/project/uv/):
|
||||
|
||||
Install uv with our standalone installers:
|
||||
|
||||
```bash
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
```
|
||||
|
||||
```bash
|
||||
# On Windows.
|
||||
powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/install.ps1 | iex"
|
||||
```
|
||||
irm https://astral.sh/uv/install.ps1 | iex
|
||||
|
||||
Or, from [PyPI](https://pypi.org/project/uv/):
|
||||
|
||||
```bash
|
||||
# With pip.
|
||||
pip install uv
|
||||
```
|
||||
|
||||
```bash
|
||||
# Or pipx.
|
||||
# With pipx.
|
||||
pipx install uv
|
||||
```
|
||||
|
||||
If installed via the standalone installer, uv can update itself to the latest version:
|
||||
To create a virtual environment:
|
||||
|
||||
```bash
|
||||
uv self update
|
||||
```shell
|
||||
uv venv # Create a virtual environment at .venv.
|
||||
```
|
||||
|
||||
See the [installation documentation](https://docs.astral.sh/uv/getting-started/installation/) for
|
||||
details and alternative installation methods.
|
||||
To activate the virtual environment:
|
||||
|
||||
## Documentation
|
||||
```shell
|
||||
# On macOS and Linux.
|
||||
source .venv/bin/activate
|
||||
|
||||
uv's documentation is available at [docs.astral.sh/uv](https://docs.astral.sh/uv).
|
||||
|
||||
Additionally, the command line reference documentation can be viewed with `uv help`.
|
||||
|
||||
## Features
|
||||
|
||||
### Projects
|
||||
|
||||
uv manages project dependencies and environments, with support for lockfiles, workspaces, and more,
|
||||
similar to `rye` or `poetry`:
|
||||
|
||||
```console
|
||||
$ uv init example
|
||||
Initialized project `example` at `/home/user/example`
|
||||
|
||||
$ cd example
|
||||
|
||||
$ uv add ruff
|
||||
Creating virtual environment at: .venv
|
||||
Resolved 2 packages in 170ms
|
||||
Built example @ file:///home/user/example
|
||||
Prepared 2 packages in 627ms
|
||||
Installed 2 packages in 1ms
|
||||
+ example==0.1.0 (from file:///home/user/example)
|
||||
+ ruff==0.5.0
|
||||
|
||||
$ uv run ruff check
|
||||
All checks passed!
|
||||
|
||||
$ uv lock
|
||||
Resolved 2 packages in 0.33ms
|
||||
|
||||
$ uv sync
|
||||
Resolved 2 packages in 0.70ms
|
||||
Audited 1 package in 0.02ms
|
||||
# On Windows.
|
||||
.\.venv\Scripts\activate.ps1
|
||||
```
|
||||
|
||||
See the [project documentation](https://docs.astral.sh/uv/guides/projects/) to get started.
|
||||
To install a package into the virtual environment:
|
||||
|
||||
uv also supports building and publishing projects, even if they're not managed with uv. See the
|
||||
[publish guide](https://docs.astral.sh/uv/guides/publish/) to learn more.
|
||||
|
||||
### Scripts
|
||||
|
||||
uv manages dependencies and environments for single-file scripts.
|
||||
|
||||
Create a new script and add inline metadata declaring its dependencies:
|
||||
|
||||
```console
|
||||
$ echo 'import requests; print(requests.get("https://astral.sh"))' > example.py
|
||||
|
||||
$ uv add --script example.py requests
|
||||
Updated `example.py`
|
||||
```shell
|
||||
uv pip install flask # Install Flask.
|
||||
uv pip install -r requirements.txt # Install from a requirements.txt file.
|
||||
uv pip install -e . # Install the current project in editable mode.
|
||||
uv pip install "package @ ." # Install the current project from disk
|
||||
```
|
||||
|
||||
Then, run the script in an isolated virtual environment:
|
||||
To generate a set of locked dependencies from an input file:
|
||||
|
||||
```console
|
||||
$ uv run example.py
|
||||
Reading inline script metadata from: example.py
|
||||
Installed 5 packages in 12ms
|
||||
<Response [200]>
|
||||
```shell
|
||||
uv pip compile pyproject.toml -o requirements.txt # Read a pyproject.toml file.
|
||||
uv pip compile requirements.in -o requirements.txt # Read a requirements.in file.
|
||||
```
|
||||
|
||||
See the [scripts documentation](https://docs.astral.sh/uv/guides/scripts/) to get started.
|
||||
To sync a set of locked dependencies with the virtual environment:
|
||||
|
||||
### Tools
|
||||
|
||||
uv executes and installs command-line tools provided by Python packages, similar to `pipx`.
|
||||
|
||||
Run a tool in an ephemeral environment using `uvx` (an alias for `uv tool run`):
|
||||
|
||||
```console
|
||||
$ uvx pycowsay 'hello world!'
|
||||
Resolved 1 package in 167ms
|
||||
Installed 1 package in 9ms
|
||||
+ pycowsay==0.0.0.2
|
||||
"""
|
||||
|
||||
------------
|
||||
< hello world! >
|
||||
------------
|
||||
\ ^__^
|
||||
\ (oo)\_______
|
||||
(__)\ )\/\
|
||||
||----w |
|
||||
|| ||
|
||||
```shell
|
||||
uv pip sync requirements.txt # Install from a requirements.txt file.
|
||||
```
|
||||
|
||||
Install a tool with `uv tool install`:
|
||||
uv's `pip-install` and `pip-compile` commands support many of the same command-line arguments
|
||||
as existing tools, including `-r requirements.txt`, `-c constraints.txt`, `-e .` (for editable
|
||||
installs), `--index-url`, and more.
|
||||
|
||||
```console
|
||||
$ uv tool install ruff
|
||||
Resolved 1 package in 6ms
|
||||
Installed 1 package in 2ms
|
||||
+ ruff==0.5.0
|
||||
Installed 1 executable: ruff
|
||||
## Limitations
|
||||
|
||||
$ ruff --version
|
||||
ruff 0.5.0
|
||||
uv does not support the entire `pip` feature set. Namely, uv does not (and does not plan to)
|
||||
support the following `pip` features:
|
||||
|
||||
- `.egg` dependencies
|
||||
- Editable installs for Git and direct URL dependencies (editable installs _are_ supported for local
|
||||
dependencies)
|
||||
|
||||
On the other hand, uv plans to (but does not currently) support:
|
||||
|
||||
- [Hash-checking mode](https://github.com/astral-sh/uv/issues/474)
|
||||
- [URL requirements without package names](https://github.com/astral-sh/uv/issues/313)
|
||||
(e.g., `https://...` instead of `package @ https://...`)
|
||||
|
||||
Like `pip-compile`, uv generates a platform-specific `requirements.txt` file (unlike, e.g.,
|
||||
`poetry` and `pdm`, which generate platform-agnostic `poetry.lock` and `pdm.lock` files). As such,
|
||||
uv's `requirements.txt` files may not be portable across platforms and Python versions.
|
||||
|
||||
## Roadmap
|
||||
|
||||
uv is an extremely fast Python package resolver and installer, designed as a drop-in
|
||||
replacement for `pip`, `pip-tools` (`pip-compile` and `pip-sync`), and `virtualenv`.
|
||||
|
||||
uv represents an intermediary goal in our pursuit of a ["Cargo for Python"](https://blog.rust-lang.org/2016/05/05/cargo-pillars.html#pillars-of-cargo):
|
||||
a comprehensive project and package manager that is extremely fast, reliable, and easy to use.
|
||||
|
||||
Think: a single binary that bootstraps your Python installation and gives you everything you need to
|
||||
be productive with Python, bundling not only `pip`, `pip-tools`, and `virtualenv`, but also `pipx`,
|
||||
`tox`, `poetry`, `pyenv`, `ruff`, and more.
|
||||
|
||||
Our goal is to evolve uv into such a tool.
|
||||
|
||||
In the meantime, though, the narrower `pip-tools` scope allows us to solve the low-level problems
|
||||
involved in building such a tool (like package installation) while shipping something immediately
|
||||
useful with a minimal barrier to adoption.
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Python discovery
|
||||
|
||||
uv itself does not depend on Python, but it does need to locate a Python environment to (1)
|
||||
install dependencies into the environment and (2) build source distributions.
|
||||
|
||||
When running `pip sync` or `pip install`, uv will search for a virtual environment in the
|
||||
following order:
|
||||
|
||||
- An activated virtual environment based on the `VIRTUAL_ENV` environment variable.
|
||||
- An activated Conda environment based on the `CONDA_PREFIX` environment variable.
|
||||
- A virtual environment at `.venv` in the current directory, or in the nearest parent directory.
|
||||
|
||||
If no virtual environment is found, uv will prompt the user to create one in the current
|
||||
directory via `uv venv`.
|
||||
|
||||
When running `pip compile`, uv does not _require_ a virtual environment and will search for a
|
||||
Python interpreter in the following order:
|
||||
|
||||
- An activated virtual environment based on the `VIRTUAL_ENV` environment variable.
|
||||
- An activated Conda environment based on the `CONDA_PREFIX` environment variable.
|
||||
- A virtual environment at `.venv` in the current directory, or in the nearest parent directory.
|
||||
- The Python interpreter available as `python3` on macOS and Linux, or `python.exe` on Windows.
|
||||
|
||||
If a `--python-version` is provided to `pip compile` (e.g., `--python-version=3.7`), uv will
|
||||
search for a Python interpreter matching that version in the following order:
|
||||
|
||||
- An activated virtual environment based on the `VIRTUAL_ENV` environment variable.
|
||||
- An activated Conda environment based on the `CONDA_PREFIX` environment variable.
|
||||
- A virtual environment at `.venv` in the current directory, or in the nearest parent directory.
|
||||
- The Python interpreter available as, e.g., `python3.7` on macOS and Linux. On Windows, uv
|
||||
will use the same mechanism as `py --list-paths` to discover all available Python interpreters,
|
||||
and will select the first interpreter matching the requested version.
|
||||
- The Python interpreter available as `python3` on macOS and Linux, or `python.exe` on Windows.
|
||||
|
||||
Since uv has no dependency on Python, it can even install into virtual environments other than
|
||||
its own. For example, setting `VIRTUAL_ENV=/path/to/venv` will cause uv to install into
|
||||
`/path/to/venv`, no matter where uv is installed.
|
||||
|
||||
### Dependency caching
|
||||
|
||||
uv uses aggressive caching to avoid re-downloading (and re-building dependencies) that have
|
||||
already been accessed in prior runs.
|
||||
|
||||
The specifics of uv's caching semantics vary based on the nature of the dependency:
|
||||
|
||||
- **For registry dependencies** (like those downloaded from PyPI), uv respects HTTP caching headers.
|
||||
- **For direct URL dependencies**, uv respects HTTP caching headers, and also caches based on
|
||||
the URL itself.
|
||||
- **For Git dependencies**, uv caches based on the fully-resolved Git commit hash. As such,
|
||||
`uv pip compile` will pin Git dependencies to a specific commit hash when writing the resolved
|
||||
dependency set.
|
||||
- **For local dependencies**, uv caches based on the last-modified time of the `setup.py` or
|
||||
`pyproject.toml` file.
|
||||
|
||||
If you're running into caching issues, uv includes a few escape hatches:
|
||||
|
||||
- To force uv to revalidate cached data for all dependencies, run `uv pip install --refresh ...`.
|
||||
- To force uv to revalidate cached data for a specific dependency, run, e.g., `uv pip install --refresh-package flask ...`.
|
||||
- To force uv to ignore existing installed versions, run `uv pip install --reinstall ...`.
|
||||
- To clear the global cache entirely, run `uv clean`.
|
||||
|
||||
### Resolution strategy
|
||||
|
||||
By default, uv follows the standard Python dependency resolution strategy of preferring the
|
||||
latest compatible version of each package. For example, `uv pip install flask>=2.0.0` will
|
||||
install the latest version of Flask (at time of writing: `3.0.0`).
|
||||
|
||||
However, uv's resolution strategy can be configured to prefer the _lowest_ compatible version of
|
||||
each package (`--resolution=lowest`), or even the lowest compatible version of any _direct_
|
||||
dependencies (`--resolution=lowest-direct`), both of which can be useful for library authors looking
|
||||
to test their packages against the oldest supported versions of their dependencies.
|
||||
|
||||
For example, given the following `requirements.in` file:
|
||||
|
||||
```text
|
||||
flask>=2.0.0
|
||||
```
|
||||
|
||||
See the [tools documentation](https://docs.astral.sh/uv/guides/tools/) to get started.
|
||||
|
||||
### Python versions
|
||||
|
||||
uv installs Python and allows quickly switching between versions.
|
||||
|
||||
Install multiple Python versions:
|
||||
|
||||
```console
|
||||
$ uv python install 3.12 3.13 3.14
|
||||
Installed 3 versions in 972ms
|
||||
+ cpython-3.12.12-macos-aarch64-none (python3.12)
|
||||
+ cpython-3.13.9-macos-aarch64-none (python3.13)
|
||||
+ cpython-3.14.0-macos-aarch64-none (python3.14)
|
||||
Running `uv pip compile requirements.in` would produce the following `requirements.txt` file:
|
||||
|
||||
```text
|
||||
# This file was autogenerated by uv v0.0.1 via the following command:
|
||||
# uv pip compile requirements.in
|
||||
blinker==1.7.0
|
||||
# via flask
|
||||
click==8.1.7
|
||||
# via flask
|
||||
flask==3.0.0
|
||||
itsdangerous==2.1.2
|
||||
# via flask
|
||||
jinja2==3.1.2
|
||||
# via flask
|
||||
markupsafe==2.1.3
|
||||
# via
|
||||
# jinja2
|
||||
# werkzeug
|
||||
werkzeug==3.0.1
|
||||
# via flask
|
||||
```
|
||||
|
||||
Download Python versions as needed:
|
||||
However, `uv pip compile --resolution=lowest requirements.in` would instead produce:
|
||||
|
||||
```console
|
||||
$ uv venv --python 3.12.0
|
||||
Using Python 3.12.0
|
||||
Creating virtual environment at: .venv
|
||||
Activate with: source .venv/bin/activate
|
||||
|
||||
$ uv run --python pypy@3.8 -- python --version
|
||||
Python 3.8.16 (a9dbdca6fc3286b0addd2240f11d97d8e8de187a, Dec 29 2022, 11:45:30)
|
||||
[PyPy 7.3.11 with GCC Apple LLVM 13.1.6 (clang-1316.0.21.2.5)] on darwin
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
>>>>
|
||||
```text
|
||||
# This file was autogenerated by uv v0.0.1 via the following command:
|
||||
# uv pip compile requirements.in --resolution=lowest
|
||||
click==7.1.2
|
||||
# via flask
|
||||
flask==2.0.0
|
||||
itsdangerous==2.0.0
|
||||
# via flask
|
||||
jinja2==3.0.0
|
||||
# via flask
|
||||
markupsafe==2.0.0
|
||||
# via jinja2
|
||||
werkzeug==2.0.0
|
||||
# via flask
|
||||
```
|
||||
|
||||
Use a specific Python version in the current directory:
|
||||
### Pre-release handling
|
||||
|
||||
```console
|
||||
$ uv python pin 3.11
|
||||
Pinned `.python-version` to `3.11`
|
||||
```
|
||||
By default, uv will accept pre-release versions during dependency resolution in two cases:
|
||||
|
||||
See the [Python installation documentation](https://docs.astral.sh/uv/guides/install-python/) to get
|
||||
started.
|
||||
1. If the package is a direct dependency, and its version markers include a pre-release specifier
|
||||
(e.g., `flask>=2.0.0rc1`).
|
||||
1. If _all_ published versions of a package are pre-releases.
|
||||
|
||||
### The pip interface
|
||||
If dependency resolution fails due to a transitive pre-release, uv will prompt the user to
|
||||
re-run with `--prerelease=allow`, to allow pre-releases for all dependencies.
|
||||
|
||||
uv provides a drop-in replacement for common `pip`, `pip-tools`, and `virtualenv` commands.
|
||||
Alternatively, you can add the transitive dependency to your `requirements.in` file with
|
||||
pre-release specifier (e.g., `flask>=2.0.0rc1`) to opt in to pre-release support for that specific
|
||||
dependency.
|
||||
|
||||
uv extends their interfaces with advanced features, such as dependency version overrides,
|
||||
platform-independent resolutions, reproducible resolutions, alternative resolution strategies, and
|
||||
more.
|
||||
Pre-releases are [notoriously difficult](https://pubgrub-rs-guide.netlify.app/limitations/prerelease_versions)
|
||||
to model, and are a frequent source of bugs in other packaging tools. uv's pre-release handling
|
||||
is _intentionally_ limited and _intentionally_ requires user intervention to opt in to pre-releases
|
||||
to ensure correctness, though pre-release handling will be revisited in future releases.
|
||||
|
||||
Migrate to uv without changing your existing workflows — and experience a 10-100x speedup — with the
|
||||
`uv pip` interface.
|
||||
### Dependency overrides
|
||||
|
||||
Compile requirements into a platform-independent requirements file:
|
||||
Historically, `pip` has supported "constraints" (`-c constraints.txt`), which allows users to
|
||||
narrow the set of acceptable versions for a given package.
|
||||
|
||||
```console
|
||||
$ uv pip compile docs/requirements.in \
|
||||
--universal \
|
||||
--output-file docs/requirements.txt
|
||||
Resolved 43 packages in 12ms
|
||||
```
|
||||
uv supports constraints, but also takes this concept further by allowing users to _override_ the
|
||||
acceptable versions of a package across the dependency tree via overrides (`-o overrides.txt`).
|
||||
|
||||
Create a virtual environment:
|
||||
In short, overrides allow the user to lie to the resolver by overriding the declared dependencies
|
||||
of a package. Overrides are a useful last resort for cases in which the user knows that a
|
||||
dependency is compatible with a newer version of a package than the package declares, but the
|
||||
package has not yet been updated to declare that compatibility.
|
||||
|
||||
```console
|
||||
$ uv venv
|
||||
Using Python 3.12.3
|
||||
Creating virtual environment at: .venv
|
||||
Activate with: source .venv/bin/activate
|
||||
```
|
||||
For example, if a transitive dependency declares `pydantic>=1.0,<2.0`, but the user knows that
|
||||
the package is compatible with `pydantic>=2.0`, the user can override the declared dependency
|
||||
with `pydantic>=2.0,<3` to allow the resolver to continue.
|
||||
|
||||
Install the locked requirements:
|
||||
While constraints are purely _additive_, and thus cannot _expand_ the set of acceptable versions for
|
||||
a package, overrides _can_ expand the set of acceptable versions for a package, providing an escape
|
||||
hatch for erroneous upper version bounds.
|
||||
|
||||
```console
|
||||
$ uv pip sync docs/requirements.txt
|
||||
Resolved 43 packages in 11ms
|
||||
Installed 43 packages in 208ms
|
||||
+ babel==2.15.0
|
||||
+ black==24.4.2
|
||||
+ certifi==2024.7.4
|
||||
...
|
||||
```
|
||||
### Multi-version resolution
|
||||
|
||||
See the [pip interface documentation](https://docs.astral.sh/uv/pip/index/) to get started.
|
||||
uv's `pip-compile` command produces a resolution that's known to be compatible with the
|
||||
current platform and Python version. Unlike Poetry, PDM, and other package managers, uv does
|
||||
not yet produce a machine-agnostic lockfile.
|
||||
|
||||
## Contributing
|
||||
However, uv _does_ support resolving for alternate Python versions via the `--python-version`
|
||||
command line argument. For example, if you're running uv on Python 3.9, but want to resolve for
|
||||
Python 3.8, you can run `uv pip compile --python-version=3.8 requirements.in` to produce a
|
||||
Python 3.8-compatible resolution.
|
||||
|
||||
We are passionate about supporting contributors of all levels of experience and would love to see
|
||||
you get involved in the project. See the
|
||||
[contributing guide](https://github.com/astral-sh/uv/blob/main/CONTRIBUTING.md) to get started.
|
||||
## Platform support
|
||||
|
||||
## FAQ
|
||||
uv has Tier 1 support for the following platforms:
|
||||
|
||||
#### How do you pronounce uv?
|
||||
- macOS (Apple Silicon)
|
||||
- macOS (x86_64)
|
||||
- Linux (x86_64)
|
||||
- Windows (x86_64)
|
||||
|
||||
It's pronounced as "you - vee" ([`/juː viː/`](https://en.wikipedia.org/wiki/Help:IPA/English#Key))
|
||||
uv is continuously built, tested, and developed against its Tier 1 platforms. Inspired by the
|
||||
Rust project, Tier 1 can be thought of as ["guaranteed to work"](https://doc.rust-lang.org/beta/rustc/platform-support.html).
|
||||
|
||||
#### How should I stylize uv?
|
||||
uv has Tier 2 support (["guaranteed to build"](https://doc.rust-lang.org/beta/rustc/platform-support.html)) for the following platforms:
|
||||
|
||||
Just "uv", please. See the [style guide](./STYLE.md#styling-uv) for details.
|
||||
- Linux (PPC64)
|
||||
- Linux (PPC64LE)
|
||||
- Linux (aarch64)
|
||||
- Linux (armv7)
|
||||
- Linux (i686)
|
||||
- Linux (s390x)
|
||||
|
||||
#### What platforms does uv support?
|
||||
uv ships pre-built wheels to [PyPI](https://pypi.org/project/uv/) for its Tier 1 and
|
||||
Tier 2 platforms. However, while Tier 2 platforms are continuously built, they are not continuously
|
||||
tested or developed against, and so stability may vary in practice.
|
||||
|
||||
See uv's [platform support](https://docs.astral.sh/uv/reference/platforms/) document.
|
||||
Beyond the Tier 1 and Tier 2 platforms, uv is known to build on i686 Windows, and known _not_
|
||||
to build on aarch64 Windows, but does not consider either platform to be supported at this time.
|
||||
|
||||
#### Is uv ready for production?
|
||||
|
||||
Yes, uv is stable and widely used in production. See uv's
|
||||
[versioning policy](https://docs.astral.sh/uv/reference/versioning/) document for details.
|
||||
uv supports and is tested against Python 3.8, 3.9, 3.10, 3.11, and 3.12.
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood. We're
|
||||
grateful to the PubGrub maintainers, especially [Jacob Finkelman](https://github.com/Eh2406), for
|
||||
their support.
|
||||
uv's dependency resolver uses [PubGrub](https://github.com/pubgrub-rs/pubgrub) under the hood.
|
||||
We're grateful to the PubGrub maintainers, especially [Jacob Finkelman](https://github.com/Eh2406),
|
||||
for their support.
|
||||
|
||||
uv's Git implementation is based on [Cargo](https://github.com/rust-lang/cargo).
|
||||
|
||||
Some of uv's optimizations are inspired by the great work we've seen in [pnpm](https://pnpm.io/),
|
||||
[Orogene](https://github.com/orogene/orogene), and [Bun](https://github.com/oven-sh/bun). We've also
|
||||
learned a lot from Nathaniel J. Smith's [Posy](https://github.com/njsmith/posy) and adapted its
|
||||
[trampoline](https://github.com/njsmith/posy/tree/main/src/trampolines/windows-trampolines/posy-trampoline)
|
||||
Some of uv's optimizations are inspired by the great work we've seen in
|
||||
[pnpm](https://pnpm.io/), [Orogene](https://github.com/orogene/orogene), and
|
||||
[Bun](https://github.com/oven-sh/bun). We've also learned a lot from Nathaniel
|
||||
J. Smith's [Posy](https://github.com/njsmith/posy) and adapted its [trampoline](https://github.com/njsmith/posy/tree/main/src/trampolines/windows-trampolines/posy-trampoline)
|
||||
for Windows support.
|
||||
|
||||
## License
|
||||
|
||||
uv is licensed under either of
|
||||
|
||||
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
<https://www.apache.org/licenses/LICENSE-2.0>)
|
||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or <https://opensource.org/licenses/MIT>)
|
||||
- Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0)
|
||||
- MIT license ([LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in uv
|
||||
by you, as defined in the Apache-2.0 license, shall be dually licensed as above, without any
|
||||
additional terms or conditions.
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted
|
||||
for inclusion in uv by you, as defined in the Apache-2.0 license, shall be
|
||||
dually licensed as above, without any additional terms or conditions.
|
||||
|
||||
<div align="center">
|
||||
<a target="_blank" href="https://astral.sh" style="background:none">
|
||||
|
|
|
|||
28
SECURITY.md
28
SECURITY.md
|
|
@ -1,28 +0,0 @@
|
|||
# Security policy
|
||||
|
||||
## Scope of security vulnerabilities
|
||||
|
||||
uv is a Python package manager. Due to the design of the Python packaging ecosystem and the dynamic
|
||||
nature of Python itself, there are many cases where uv can execute arbitrary code. For example:
|
||||
|
||||
- uv invokes Python interpreters on the system to retrieve metadata
|
||||
- uv builds source distributions as described by PEP 517
|
||||
- uv may build packages from the requested package indexes
|
||||
|
||||
These are not considered vulnerabilities in uv. If you think uv's stance in these areas can be
|
||||
hardened, please file an issue for a new feature.
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
If you have found a possible vulnerability that is not excluded by the above
|
||||
[scope](#scope-of-security-vulnerabilities), please email `security at astral dot sh`.
|
||||
|
||||
## Bug bounties
|
||||
|
||||
While we sincerely appreciate and encourage reports of suspected security problems, please note that
|
||||
Astral does not currently run any bug bounty programs.
|
||||
|
||||
## Vulnerability disclosures
|
||||
|
||||
Critical vulnerabilities will be disclosed via GitHub's
|
||||
[security advisory](https://github.com/astral-sh/uv/security) system.
|
||||
134
STYLE.md
134
STYLE.md
|
|
@ -1,134 +0,0 @@
|
|||
# Style guide
|
||||
|
||||
_The following is a work-in-progress style guide for our user-facing messaging in the CLI output and
|
||||
documentation_.
|
||||
|
||||
## General
|
||||
|
||||
1. Use of "e.g." and "i.e." should always be wrapped in commas, e.g., as shown here.
|
||||
1. Em-dashes are okay, but not recommended when using monospace fonts. Use "—", not "--" or "-".
|
||||
1. Always wrap em-dashes in spaces, e.g., "hello — world" not "hello—world".
|
||||
1. Hyphenate compound words, e.g., use "platform-specific" not "platform specific".
|
||||
1. Use backticks to escape: commands, code expressions, package names, and file paths.
|
||||
1. Use less than and greater than symbols to wrap bare URLs, e.g., `<https://astral.sh>` (unless it
|
||||
is an example; then, use backticks).
|
||||
1. Avoid bare URLs outside of reference documentation, prefer labels, e.g., `[name](url)`.
|
||||
1. If a message ends with a single relevant value, precede it with a colon, e.g.,
|
||||
`This is the value: value`. If the value is a literal, wrap it in backticks.
|
||||
1. Markdown files should be wrapped at 100 characters.
|
||||
1. Use a space, not an equals sign, for command-line arguments with a value, e.g.
|
||||
`--resolution lowest`, not `--resolution=lowest`.
|
||||
|
||||
## Styling uv
|
||||
|
||||
Just uv, please.
|
||||
|
||||
1. Do not escape with backticks, e.g., `uv`, unless referring specifically to the `uv` executable.
|
||||
1. Do not capitalize, e.g., "Uv", even at the beginning of a sentence.
|
||||
1. Do not uppercase, e.g., "UV", unless referring to an environment variable, e.g., `UV_PYTHON`.
|
||||
|
||||
## Terminology
|
||||
|
||||
1. Use "lockfile" not "lock file".
|
||||
2. Use "pre-release", not "prerelease" (except in code, in which case: use `Prerelease`, not
|
||||
`PreRelease`; and `prerelease`, not `pre_release`).
|
||||
|
||||
## Documentation
|
||||
|
||||
1. Use periods at the end of all sentences, including lists unless they enumerate single items.
|
||||
1. Avoid language that patronizes the reader, e.g., "simply do this".
|
||||
1. Only refer to "the user" in internal or contributor documentation.
|
||||
1. Avoid "we" in favor of "uv" or imperative language.
|
||||
|
||||
### Sections
|
||||
|
||||
The documentation is divided into:
|
||||
|
||||
1. Guides
|
||||
2. Concepts
|
||||
3. Reference documentation
|
||||
|
||||
#### Guides
|
||||
|
||||
1. Should assume no previous knowledge about uv.
|
||||
1. May assume basic knowledge of the domain.
|
||||
1. Should refer to relevant concept documentation.
|
||||
1. Should have a clear flow.
|
||||
1. Should be followed by a clear call to action.
|
||||
1. Should cover the basic behavior needed to get started.
|
||||
1. Should not cover behavior in detail.
|
||||
1. Should not enumerate all possibilities.
|
||||
1. Should avoid linking to reference documentation unless not covered in a concept document.
|
||||
1. May generally ignore platform-specific behavior.
|
||||
1. Should be written from second-person point of view.
|
||||
1. Should use the imperative voice.
|
||||
|
||||
#### Concepts
|
||||
|
||||
1. Should cover behavior in detail.
|
||||
1. Should not enumerate all possibilities.
|
||||
1. Should cover most common configuration.
|
||||
1. Should refer to the relevant reference documentation.
|
||||
1. Should discuss platform-specific behavior.
|
||||
1. Should be written from the third-person point of view, not second-person (i.e., avoid "you").
|
||||
1. Should not use the imperative voice.
|
||||
|
||||
#### Reference documentation
|
||||
|
||||
1. Should enumerate all options.
|
||||
1. Should generally be generated from documentation in the code.
|
||||
1. Should be written from the third-person point of view, not second-person (i.e., avoid "you").
|
||||
1. Should not use the imperative voice.
|
||||
|
||||
### Code blocks
|
||||
|
||||
1. All code blocks should have a language marker.
|
||||
1. When using `console` syntax, use `$` to indicate commands — everything else is output.
|
||||
1. Never use the `bash` syntax when displaying command output.
|
||||
1. Prefer `console` with `$` prefixed commands over `bash`.
|
||||
1. Command output should rarely be included — it's hard to keep up-to-date.
|
||||
1. Use `title` for example files, e.g., `pyproject.toml`, `Dockerfile`, or `example.py`.
|
||||
|
||||
## CLI
|
||||
|
||||
1. Do not use periods at the end of sentences :), unless the message spans more than a single
|
||||
sentence.
|
||||
1. May use the second-person point of view, e.g., "Did you mean...?".
|
||||
|
||||
### Colors and style
|
||||
|
||||
1. All CLI output must be interpretable and understandable _without_ the use of color and other
|
||||
styling. (For example: even if a command is rendered in green, wrap it in backticks.)
|
||||
1. `NO_COLOR` must be respected when using any colors or styling.
|
||||
1. `UV_NO_PROGRESS` must be respected when using progress-styling like bars or spinners.
|
||||
1. In general, use:
|
||||
- Green for success.
|
||||
- Red for error.
|
||||
- Yellow for warning.
|
||||
- Cyan for hints.
|
||||
- Cyan for file paths.
|
||||
- Cyan for important user-facing literals (e.g., a package name in a message).
|
||||
- Green for commands.
|
||||
|
||||
### Logging
|
||||
|
||||
1. `warn`, `info`, `debug`, and `trace` logs are all shown with the `--verbose` flag.
|
||||
- Note that the displayed level is controlled with `RUST_LOG`.
|
||||
1. All logging should be to stderr.
|
||||
|
||||
### Output
|
||||
|
||||
1. Text can be written to stdout if it is "data" that could be piped to another program.
|
||||
|
||||
### Warnings
|
||||
|
||||
1. `warn_user` and `warn_user_once` are shown without the `--verbose` flag.
|
||||
- These methods should be preferred over tracing warnings when the warning is actionable.
|
||||
- Deprecation warnings should use these methods.
|
||||
1. Deprecation warnings must be actionable.
|
||||
|
||||
### Hints
|
||||
|
||||
1. Errors may be followed by hints suggesting a solution.
|
||||
1. Hints should be separated from errors by a blank newline.
|
||||
1. Hints should be stylized as `hint: <content>`.
|
||||
25
_typos.toml
25
_typos.toml
|
|
@ -1,23 +1,12 @@
|
|||
[files]
|
||||
extend-exclude = [
|
||||
"**/snapshots/",
|
||||
"test/ecosystem/**",
|
||||
"test/requirements/**/*.in",
|
||||
"crates/uv-build-frontend/src/pipreqs/mapping",
|
||||
]
|
||||
ignore-hidden = false
|
||||
|
||||
[default]
|
||||
extend-ignore-re = [
|
||||
"FRiENDlY-\\.\\.\\.-_-BARd",
|
||||
"FrIeNdLy-\\._\\.-bArD",
|
||||
"I borken you cache",
|
||||
"eb1ba5f5",
|
||||
"e8208120cae3ba69",
|
||||
"github_pat_[0-9a-zA-Z_]+",
|
||||
"LICENSEs",
|
||||
"astroid",
|
||||
"**/resources/**/*",
|
||||
"**/snapshots/**/*",
|
||||
"scripts/**/*.in",
|
||||
]
|
||||
|
||||
[default.extend-identifiers]
|
||||
[default.extend-words]
|
||||
BA = "BA" # acronym for "Bad Allowed", used in testing.
|
||||
Nd = "Nd" # secret codeword used by friendly bards
|
||||
borken = "borken" # the word is borken :(
|
||||
seeked = "seeked" # special term used for streams
|
||||
|
|
|
|||
|
|
@ -1,147 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="496" height="107" viewBox="0 0 469 107">
|
||||
<g fill="none" stroke-miterlimit="10" transform="translate(60,0)">
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="group mark container">
|
||||
<g transform="translate(0,0)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"/>
|
||||
<g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="group mark container">
|
||||
<g transform="translate(0,0)">
|
||||
<path aria-hidden="true" d="M0,0h400v90h-400Z"/>
|
||||
<g>
|
||||
<g aria-hidden="true">
|
||||
<g transform="translate(0.5,90.5)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"
|
||||
pointer-events="none"/>
|
||||
<g>
|
||||
<g pointer-events="none">
|
||||
<line transform="translate(0,0)" x2="0" y2="-90"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(160,0)" x2="0" y2="-90"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(320,0)" x2="0" y2="-90"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" pointer-events="none"
|
||||
display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
<g role="graphics-symbol" aria-roledescription="axis"
|
||||
aria-label="X-axis for a linear scale with values from 0 to 5">
|
||||
<g transform="translate(0.5,90.5)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"
|
||||
pointer-events="none"/>
|
||||
<g>
|
||||
<g pointer-events="none">
|
||||
<line transform="translate(0,0)" x2="0" y2="0"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(160,0)" x2="0" y2="0"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(320,0)" x2="0" y2="0"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
</g>
|
||||
<g pointer-events="none">
|
||||
<text text-anchor="middle" transform="translate(0,15)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9" opacity="1">0s
|
||||
</text>
|
||||
<text text-anchor="middle" transform="translate(160,15)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9" opacity="1">2s
|
||||
</text>
|
||||
<text text-anchor="middle" transform="translate(320,15)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9" opacity="1">4s
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" pointer-events="none"
|
||||
display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
<g role="graphics-symbol" aria-roledescription="axis"
|
||||
aria-label="Y-axis for a discrete scale with 4 values: uv, poetry, pdm, pip-sync">
|
||||
<g transform="translate(0.5,0.5)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"
|
||||
pointer-events="none"/>
|
||||
<g>
|
||||
<g pointer-events="none">
|
||||
<text text-anchor="end" transform="translate(-10,14.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" font-weight="bold" fill="#C9D1D9" opacity="1">uv
|
||||
</text>
|
||||
<text text-anchor="end" transform="translate(-10,37.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9" opacity="1">poetry
|
||||
</text>
|
||||
<text text-anchor="end" transform="translate(-10,59.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9" opacity="1">pdm
|
||||
</text>
|
||||
<text text-anchor="end" transform="translate(-10,82.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9" opacity="1">pip-sync
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" pointer-events="none"
|
||||
display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="rect mark container">
|
||||
<path aria-label="Sum of time: 0.0576289908; tool: uv" role="graphics-symbol"
|
||||
aria-roledescription="bar" d="M0,4.75h4.610319264v13h-4.610319264Z"
|
||||
fill="#6340AC"/>
|
||||
<path aria-label="Sum of time: 0.9872183659; tool: poetry" role="graphics-symbol"
|
||||
aria-roledescription="bar" d="M0,27.25h78.97746927200001v13h-78.97746927200001Z"
|
||||
fill="#6340AC"/>
|
||||
<path aria-label="Sum of time: 1.8969612492; tool: pdm" role="graphics-symbol"
|
||||
aria-roledescription="bar" d="M0,49.75h151.756899936v13h-151.756899936Z"
|
||||
fill="#6340AC"/>
|
||||
<path aria-label="Sum of time: 4.6313483826; tool: pip-sync"
|
||||
role="graphics-symbol" aria-roledescription="bar"
|
||||
d="M0,72.25h370.50787060799996v13h-370.50787060799996Z" fill="#6340AC"/>
|
||||
</g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="text mark container">
|
||||
<text aria-label="Sum of time: 0.9872183659; tool: poetry; timeFormat: 0.99s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(84.97746927200001,37.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9">0.99s
|
||||
</text>
|
||||
<text aria-label="Sum of time: 1.8969612492; tool: pdm; timeFormat: 1.90s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(157.756899936,60.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9">1.90s
|
||||
</text>
|
||||
<text aria-label="Sum of time: 4.6313483826; tool: pip-sync; timeFormat: 4.63s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(376.50787060799996,82.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#C9D1D9">4.63s
|
||||
</text>
|
||||
</g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="text mark container">
|
||||
<text aria-label="Sum of time: 0.0576289908; tool: uv; timeFormat: 0.06s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(10.610319264000001,15.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" font-weight="bold" fill="#C9D1D9">0.06s
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 9.2 KiB |
|
|
@ -1,147 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="496" height="107" viewBox="0 0 469 107">
|
||||
<g fill="none" stroke-miterlimit="10" transform="translate(60,0)">
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="group mark container">
|
||||
<g transform="translate(0,0)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"/>
|
||||
<g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="group mark container">
|
||||
<g transform="translate(0,0)">
|
||||
<path aria-hidden="true" d="M0,0h400v90h-400Z"/>
|
||||
<g>
|
||||
<g aria-hidden="true">
|
||||
<g transform="translate(0.5,90.5)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"
|
||||
pointer-events="none"/>
|
||||
<g>
|
||||
<g pointer-events="none">
|
||||
<line transform="translate(0,0)" x2="0" y2="-90"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(160,0)" x2="0" y2="-90"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(320,0)" x2="0" y2="-90"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" pointer-events="none"
|
||||
display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
<g role="graphics-symbol" aria-roledescription="axis"
|
||||
aria-label="X-axis for a linear scale with values from 0 to 5">
|
||||
<g transform="translate(0.5,90.5)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"
|
||||
pointer-events="none"/>
|
||||
<g>
|
||||
<g pointer-events="none">
|
||||
<line transform="translate(0,0)" x2="0" y2="0"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(160,0)" x2="0" y2="0"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
<line transform="translate(320,0)" x2="0" y2="0"
|
||||
stroke="rgba(127,127,127,0.25)" stroke-width="1" opacity="1"/>
|
||||
</g>
|
||||
<g pointer-events="none">
|
||||
<text text-anchor="middle" transform="translate(0,15)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333" opacity="1">0s
|
||||
</text>
|
||||
<text text-anchor="middle" transform="translate(160,15)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333" opacity="1">2s
|
||||
</text>
|
||||
<text text-anchor="middle" transform="translate(320,15)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333" opacity="1">4s
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" pointer-events="none"
|
||||
display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
<g role="graphics-symbol" aria-roledescription="axis"
|
||||
aria-label="Y-axis for a discrete scale with 4 values: uv, poetry, pdm, pip-sync">
|
||||
<g transform="translate(0.5,0.5)">
|
||||
<path aria-hidden="true" d="M0,0h0v0h0Z"
|
||||
pointer-events="none"/>
|
||||
<g>
|
||||
<g pointer-events="none">
|
||||
<text text-anchor="end" transform="translate(-10,14.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" font-weight="bold" fill="#333333" opacity="1">uv
|
||||
</text>
|
||||
<text text-anchor="end" transform="translate(-10,37.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333" opacity="1">poetry
|
||||
</text>
|
||||
<text text-anchor="end" transform="translate(-10,59.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333" opacity="1">pdm
|
||||
</text>
|
||||
<text text-anchor="end" transform="translate(-10,82.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333" opacity="1">pip-sync
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" pointer-events="none"
|
||||
display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="rect mark container">
|
||||
<path aria-label="Sum of time: 0.0576289908; tool: uv" role="graphics-symbol"
|
||||
aria-roledescription="bar" d="M0,4.75h4.610319264v13h-4.610319264Z"
|
||||
fill="#6340AC"/>
|
||||
<path aria-label="Sum of time: 0.9872183659; tool: poetry" role="graphics-symbol"
|
||||
aria-roledescription="bar" d="M0,27.25h78.97746927200001v13h-78.97746927200001Z"
|
||||
fill="#6340AC"/>
|
||||
<path aria-label="Sum of time: 1.8969612492; tool: pdm" role="graphics-symbol"
|
||||
aria-roledescription="bar" d="M0,49.75h151.756899936v13h-151.756899936Z"
|
||||
fill="#6340AC"/>
|
||||
<path aria-label="Sum of time: 4.6313483826; tool: pip-sync"
|
||||
role="graphics-symbol" aria-roledescription="bar"
|
||||
d="M0,72.25h370.50787060799996v13h-370.50787060799996Z" fill="#6340AC"/>
|
||||
</g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="text mark container">
|
||||
<text aria-label="Sum of time: 0.9872183659; tool: poetry; timeFormat: 0.99s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(84.97746927200001,37.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333">0.99s
|
||||
</text>
|
||||
<text aria-label="Sum of time: 1.8969612492; tool: pdm; timeFormat: 1.90s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(157.756899936,60.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333">1.90s
|
||||
</text>
|
||||
<text aria-label="Sum of time: 4.6313483826; tool: pip-sync; timeFormat: 4.63s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(376.50787060799996,82.75)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" fill="#333333">4.63s
|
||||
</text>
|
||||
</g>
|
||||
<g role="graphics-object"
|
||||
aria-roledescription="text mark container">
|
||||
<text aria-label="Sum of time: 0.0576289908; tool: uv; timeFormat: 0.06s"
|
||||
role="graphics-symbol" aria-roledescription="text mark" text-anchor="start"
|
||||
transform="translate(10.610319264000001,15.25)"
|
||||
font-family="-apple-system,BlinkMacSystemFont,"Segoe UI",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji""
|
||||
font-size="12px" font-weight="bold" fill="#333333">0.06s
|
||||
</text>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<path aria-hidden="true" d="" display="none"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 9.2 KiB |
1456
changelogs/0.1.x.md
1456
changelogs/0.1.x.md
File diff suppressed because it is too large
Load Diff
1702
changelogs/0.2.x.md
1702
changelogs/0.2.x.md
File diff suppressed because it is too large
Load Diff
|
|
@ -1,360 +0,0 @@
|
|||
# Changelog 0.3.x
|
||||
|
||||
## 0.3.0
|
||||
|
||||
This release introduces the uv [project](https://docs.astral.sh/uv/guides/projects/),
|
||||
[tool](https://docs.astral.sh/uv/guides/tools/),
|
||||
[script](https://docs.astral.sh/uv/guides/scripts/), and
|
||||
[python](https://docs.astral.sh/uv/guides/install-python/) interfaces. If you've been following uv's
|
||||
development, you've probably seen these new commands behind a preview flag. Now, the interfaces are
|
||||
stable and ready for production-use.
|
||||
|
||||
These features are all documented in [new, comprehensive documentation](https://docs.astral.sh/uv/).
|
||||
|
||||
This release also stabilizes preview functionality in `uv venv`:
|
||||
|
||||
- `uv venv --python <version>` will
|
||||
[automatically download](https://docs.astral.sh/uv/concepts/python-versions/#requesting-a-version)
|
||||
the Python version if required
|
||||
- `uv venv` will read the required Python version from the `.python-version` file or
|
||||
`pyproject.toml`
|
||||
|
||||
The `uv pip` interface should not be affected by any breaking changes.
|
||||
|
||||
Note the following changelog entries does not include all the new features since they were added
|
||||
incrementally as preview features. See the
|
||||
[feature page](https://docs.astral.sh/uv/getting-started/features/) in the documentation for a
|
||||
comprehensive listing, or read the [blog post](https://astral.sh/blog/uv-unified-python-packaging)
|
||||
for more context on the new features.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- Migrate to XDG and Linux strategy for macOS directories
|
||||
([#5806](https://github.com/astral-sh/uv/pull/5806))
|
||||
- Move concurrency settings to top-level ([#4257](https://github.com/astral-sh/uv/pull/4257))
|
||||
- Apply system Python filtering to executable name requests
|
||||
([#4309](https://github.com/astral-sh/uv/pull/4309))
|
||||
- Remove `--legacy-setup-py` command-line argument
|
||||
([#4255](https://github.com/astral-sh/uv/pull/4255))
|
||||
- Stabilize preview features ([#6166](https://github.com/astral-sh/uv/pull/6166))
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add 32-bit Windows target ([#6252](https://github.com/astral-sh/uv/pull/6252))
|
||||
- Add support for `python_version in ...` markers
|
||||
([#6172](https://github.com/astral-sh/uv/pull/6172))
|
||||
- Allow user to constrain supported lock environments
|
||||
([#6210](https://github.com/astral-sh/uv/pull/6210))
|
||||
- Lift requirement that .egg-info filenames must include version
|
||||
([#6179](https://github.com/astral-sh/uv/pull/6179))
|
||||
- Change "any of" to "all of" in error messages ([#6222](https://github.com/astral-sh/uv/pull/6222))
|
||||
- Collapse redundant dependency clauses enumerating available versions
|
||||
([#6160](https://github.com/astral-sh/uv/pull/6160))
|
||||
- Collapse unavailable packages in resolver errors
|
||||
([#6154](https://github.com/astral-sh/uv/pull/6154))
|
||||
- Fix messages for unavailable packages when range is plural
|
||||
([#6221](https://github.com/astral-sh/uv/pull/6221))
|
||||
- Improve resolver error messages when `--offline` is used
|
||||
([#6156](https://github.com/astral-sh/uv/pull/6156))
|
||||
- Avoid overwriting dependencies with different markers in `uv add`
|
||||
([#6010](https://github.com/astral-sh/uv/pull/6010))
|
||||
- Simplify available package version ranges when the name includes markers or extras
|
||||
([#6162](https://github.com/astral-sh/uv/pull/6162))
|
||||
- Simplify version ranges reported for unavailable packages
|
||||
([#6155](https://github.com/astral-sh/uv/pull/6155))
|
||||
- Rename `environment-markers` to `resolution-markers`
|
||||
([#6240](https://github.com/astral-sh/uv/pull/6240))
|
||||
- Support `uv add -r requirements.txt` ([#6005](https://github.com/astral-sh/uv/pull/6005))
|
||||
|
||||
### CLI
|
||||
|
||||
- Hide global options in `uv generate-shell-completion`
|
||||
([#6170](https://github.com/astral-sh/uv/pull/6170))
|
||||
- Show generate-shell-completion command in `uv help`
|
||||
([#6180](https://github.com/astral-sh/uv/pull/6180))
|
||||
- Special-case reinstalls in environment update summaries
|
||||
([#6243](https://github.com/astral-sh/uv/pull/6243))
|
||||
- Add output when `uv add` and `uv remove` update scripts
|
||||
([#6231](https://github.com/astral-sh/uv/pull/6231))
|
||||
- Add support for `package@latest` in `tool run`
|
||||
([#6138](https://github.com/astral-sh/uv/pull/6138))
|
||||
- Show `python find` output with `-q` ([#6256](https://github.com/astral-sh/uv/pull/6256))
|
||||
- Warn when `--upgrade` is passed to `tool run` ([#6140](https://github.com/astral-sh/uv/pull/6140))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Allow customizing the tool install directory with `UV_TOOL_BIN_DIR`
|
||||
([#6207](https://github.com/astral-sh/uv/pull/6207))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use `FxHash` in `uv-auth` ([#6149](https://github.com/astral-sh/uv/pull/6149))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid panicking when the resolver thread encounters a closed channel
|
||||
([#6182](https://github.com/astral-sh/uv/pull/6182))
|
||||
- Respect release-only semantics of `python_full_version` when constructing markers
|
||||
([#6171](https://github.com/astral-sh/uv/pull/6171))
|
||||
- Tolerate missing `[project]` table in `uv venv`
|
||||
([#6178](https://github.com/astral-sh/uv/pull/6178))
|
||||
- Avoid using workspace `lock_path` as relative root
|
||||
([#6157](https://github.com/astral-sh/uv/pull/6157))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Preview changes are now included in the standard changelog
|
||||
([#6259](https://github.com/astral-sh/uv/pull/6259))
|
||||
- Document dynamic metadata behavior for cache ([#5993](https://github.com/astral-sh/uv/pull/5993))
|
||||
- Document the effect of ordering on package priority
|
||||
([#6211](https://github.com/astral-sh/uv/pull/6211))
|
||||
- Make some edits to the workspace concept documentation
|
||||
([#6223](https://github.com/astral-sh/uv/pull/6223))
|
||||
- Update environment variables doc ([#5994](https://github.com/astral-sh/uv/pull/5994))
|
||||
- Disable collapsible navigation in the documentation
|
||||
([#5674](https://github.com/astral-sh/uv/pull/5674))
|
||||
- Document `uv add` and `uv remove` behavior with markers
|
||||
([#6163](https://github.com/astral-sh/uv/pull/6163))
|
||||
- Document the Python installation directory ([#6227](https://github.com/astral-sh/uv/pull/6227))
|
||||
- Document the `uv.pip` section semantics ([#6225](https://github.com/astral-sh/uv/pull/6225))
|
||||
- Document the cache directory ([#6229](https://github.com/astral-sh/uv/pull/6229))
|
||||
- Document the tools directory ([#6228](https://github.com/astral-sh/uv/pull/6228))
|
||||
- Document yanked packages caveat during sync ([#6219](https://github.com/astral-sh/uv/pull/6219))
|
||||
- Link to persistent configuration options in Python versions document
|
||||
([#6226](https://github.com/astral-sh/uv/pull/6226))
|
||||
- Link to the projects concept from the dependencies concept
|
||||
([#6224](https://github.com/astral-sh/uv/pull/6224))
|
||||
- Improvements to the Docker installation guide ([#6216](https://github.com/astral-sh/uv/pull/6216))
|
||||
- Increase the size of navigation entries ([#6233](https://github.com/astral-sh/uv/pull/6233))
|
||||
- Install `ca-certificates` in docker and use pipefail
|
||||
([#6208](https://github.com/astral-sh/uv/pull/6208))
|
||||
- Add script support to feature highlights in index
|
||||
([#6251](https://github.com/astral-sh/uv/pull/6251))
|
||||
- Show `uv generate-shell-completion` in CLI documentation reference
|
||||
([#6146](https://github.com/astral-sh/uv/pull/6146))
|
||||
- Update Docker guide for projects ([#6217](https://github.com/astral-sh/uv/pull/6217))
|
||||
- Use `uv add --script` in guide ([#6215](https://github.com/astral-sh/uv/pull/6215))
|
||||
- Show pinned version example on in GitHub Actions integration guide
|
||||
([#6234](https://github.com/astral-sh/uv/pull/6234))
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
## 0.3.1
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--with-editable` support to `uv run` ([#6262](https://github.com/astral-sh/uv/pull/6262))
|
||||
- Respect `.python-version` files and `pyproject.toml` in `uv python find`
|
||||
([#6369](https://github.com/astral-sh/uv/pull/6369))
|
||||
- Allow manylinux compatibility override via `_manylinux` module
|
||||
([#6039](https://github.com/astral-sh/uv/pull/6039))
|
||||
|
||||
### CLI
|
||||
|
||||
- Avoid treating `uv add -r` as `--raw-sources` ([#6287](https://github.com/astral-sh/uv/pull/6287))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Always invoke found interpreter when `uv run python` is used
|
||||
([#6363](https://github.com/astral-sh/uv/pull/6363))
|
||||
- Avoid adding extra newline for script with non-empty prelude
|
||||
([#6366](https://github.com/astral-sh/uv/pull/6366))
|
||||
- Fix metadata cache instability for lockfile ([#6332](https://github.com/astral-sh/uv/pull/6332))
|
||||
- Handle Ctrl-C properly in `uvx` invocations ([#6346](https://github.com/astral-sh/uv/pull/6346))
|
||||
- Ignore workspace discovery errors with `--no-workspace`
|
||||
([#6328](https://github.com/astral-sh/uv/pull/6328))
|
||||
- Invalidate `uv.lock` when virtual `dev-dependencies` change
|
||||
([#6291](https://github.com/astral-sh/uv/pull/6291))
|
||||
- Make cache robust to removed archives ([#6284](https://github.com/astral-sh/uv/pull/6284))
|
||||
- Preserve Git username for SSH dependencies ([#6335](https://github.com/astral-sh/uv/pull/6335))
|
||||
- Respect `--no-build-isolation` in `uv add` ([#6368](https://github.com/astral-sh/uv/pull/6368))
|
||||
- Respect `.python-version` files in `uv run` outside projects
|
||||
([#6361](https://github.com/astral-sh/uv/pull/6361))
|
||||
- Use `sys_executable` for `uv run` invocations ([#6354](https://github.com/astral-sh/uv/pull/6354))
|
||||
- Use atomic write for `pip compile` output ([#6274](https://github.com/astral-sh/uv/pull/6274))
|
||||
- Use consistent logic for deserializing short revisions
|
||||
([#6341](https://github.com/astral-sh/uv/pull/6341))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove the preview default value of `python-preference`
|
||||
([#6301](https://github.com/astral-sh/uv/pull/6301))
|
||||
- Update env vars doc about `XDG_*` variables on macOS
|
||||
([#6337](https://github.com/astral-sh/uv/pull/6337))
|
||||
|
||||
## 0.3.2
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add support for configuring `python-downloads` with `UV_PYTHON_DOWNLOADS`
|
||||
([#6436](https://github.com/astral-sh/uv/pull/6436))
|
||||
- Add support for configuring the `python-preference` with `UV_PYTHON_PREFERENCE`
|
||||
([#6432](https://github.com/astral-sh/uv/pull/6432))
|
||||
- Deny invalid members in workspace schema ([#6450](https://github.com/astral-sh/uv/pull/6450))
|
||||
|
||||
### Performance
|
||||
|
||||
- Stop streaming wheels when `METADATA` is discovered (if range requests aren't supported)
|
||||
([#6470](https://github.com/astral-sh/uv/pull/6470))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Remove URI type from JSON Schema ([#6449](https://github.com/astral-sh/uv/pull/6449))
|
||||
- Fix retrieval of credentials for URLs from cache
|
||||
([#6452](https://github.com/astral-sh/uv/pull/6452))
|
||||
- Restore `cache` suffix on Windows cache path ([#6482](https://github.com/astral-sh/uv/pull/6482))
|
||||
- Treat `.pyw` files as scripts in `uv run` on Windows
|
||||
([#6453](https://github.com/astral-sh/uv/pull/6453))
|
||||
- Treat invalid extras as `false` in marker evaluation
|
||||
([#6395](https://github.com/astral-sh/uv/pull/6395))
|
||||
- Avoid overwriting symlinks in `pip compile` output
|
||||
([#6487](https://github.com/astral-sh/uv/pull/6487))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add `uv run` hint to the `uvx` guide ([#6454](https://github.com/astral-sh/uv/pull/6454))
|
||||
- Add a guide for using uv with FastAPI ([#6401](https://github.com/astral-sh/uv/pull/6401))
|
||||
- Add tip for using `managed = false` to disable project management
|
||||
([#6465](https://github.com/astral-sh/uv/pull/6465))
|
||||
- Clarify the `uv tool run`, `uvx`, and `uv run` relationships
|
||||
([#6455](https://github.com/astral-sh/uv/pull/6455))
|
||||
- Fix references to `--python-downloads` (it is `--no-python-downloads`)
|
||||
([#6439](https://github.com/astral-sh/uv/pull/6439))
|
||||
- Further clarifications to the tools documentation
|
||||
([#6474](https://github.com/astral-sh/uv/pull/6474))
|
||||
- Update docs dockerfile (bullseye -> bookworm) ([#6441](https://github.com/astral-sh/uv/pull/6441))
|
||||
- Update the installation documentation page ([#6468](https://github.com/astral-sh/uv/pull/6468))
|
||||
- Update pip compatibility pages to mention configuration files support
|
||||
([#6410](https://github.com/astral-sh/uv/pull/6410))
|
||||
- Add `uv run` docs for gui scripts ([#6478](https://github.com/astral-sh/uv/pull/6478))
|
||||
|
||||
## 0.3.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv sync --no-install-project` to skip installation of the project
|
||||
([#6538](https://github.com/astral-sh/uv/pull/6538))
|
||||
- Add `uv sync --no-install-workspace` to skip installation of all workspace members
|
||||
([#6539](https://github.com/astral-sh/uv/pull/6539))
|
||||
- Add `uv sync --no-install-package` to skip installation of specific packages
|
||||
([#6540](https://github.com/astral-sh/uv/pull/6540))
|
||||
- Show previous version in self update message ([#6473](https://github.com/astral-sh/uv/pull/6473))
|
||||
|
||||
### CLI
|
||||
|
||||
- Add `--no-project` alias for `uv python pin --no-workspace`
|
||||
([#6514](https://github.com/astral-sh/uv/pull/6514))
|
||||
- Ignore `.python-version` files in `uv venv` with `--no-config`
|
||||
([#6513](https://github.com/astral-sh/uv/pull/6513))
|
||||
- Include virtual environment interpreters in `uv python find`
|
||||
([#6521](https://github.com/astral-sh/uv/pull/6521))
|
||||
- Respect `-` as stdin channel for `uv run` ([#6481](https://github.com/astral-sh/uv/pull/6481))
|
||||
- Revert changes to pyproject.toml when sync fails during `uv add`
|
||||
([#6526](https://github.com/astral-sh/uv/pull/6526))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_COMPILE_BYTECODE` environment variable
|
||||
([#6530](https://github.com/astral-sh/uv/pull/6530))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Set `VIRTUAL_ENV` for `uv run` invocations ([#6543](https://github.com/astral-sh/uv/pull/6543))
|
||||
- Ignore errors in workspace discovery with `--no-project`
|
||||
([#6554](https://github.com/astral-sh/uv/pull/6554))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation for `uv python find` ([#6527](https://github.com/astral-sh/uv/pull/6527))
|
||||
- Add uv tool install example in Docker ([#6547](https://github.com/astral-sh/uv/pull/6547))
|
||||
- Document why we do lower bounds ([#6516](https://github.com/astral-sh/uv/pull/6516))
|
||||
- Fix to miss string termination in PowerShell commands for shell autocompletion documentation
|
||||
([#6491](https://github.com/astral-sh/uv/pull/6491))
|
||||
- Fix incorrect workspace members keyword ([#6502](https://github.com/astral-sh/uv/pull/6502))
|
||||
- Use proper environment variables for Windows ([#6433](https://github.com/astral-sh/uv/pull/6433))
|
||||
- Improve caveat in `uvx` note ([#6546](https://github.com/astral-sh/uv/pull/6546))
|
||||
|
||||
## 0.3.4
|
||||
|
||||
### CLI
|
||||
|
||||
- Show `--editable` on the `uv add` CLI ([#6608](https://github.com/astral-sh/uv/pull/6608))
|
||||
- Add `--refresh` to `tool run` warning for `--with` dependencies
|
||||
([#6609](https://github.com/astral-sh/uv/pull/6609))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow per dependency build isolation for `setup.py`-based projects
|
||||
([#6517](https://github.com/astral-sh/uv/pull/6517))
|
||||
- Avoid un-strict syncing by-default for build isolation
|
||||
([#6606](https://github.com/astral-sh/uv/pull/6606))
|
||||
- Respect `--no-build-isolation-package` in `uv sync`
|
||||
([#6605](https://github.com/astral-sh/uv/pull/6605))
|
||||
- Respect extras and markers on virtual dev dependencies
|
||||
([#6620](https://github.com/astral-sh/uv/pull/6620))
|
||||
- Support PEP 723 scripts in GUI files ([#6611](https://github.com/astral-sh/uv/pull/6611))
|
||||
- Update lockfile after setting minimum bounds in `uv add`
|
||||
([#6618](https://github.com/astral-sh/uv/pull/6618))
|
||||
- Use relative paths for `--find-links` and local registries
|
||||
([#6566](https://github.com/astral-sh/uv/pull/6566))
|
||||
- Use separate types to represent raw vs. resolver markers
|
||||
([#6646](https://github.com/astral-sh/uv/pull/6646))
|
||||
- Parse wheels `WHEEL` and `METADATA` files as email messages
|
||||
([#6616](https://github.com/astral-sh/uv/pull/6616))
|
||||
- Support unquoted hrefs in `--find-links` and other HTML sources
|
||||
([#6622](https://github.com/astral-sh/uv/pull/6622))
|
||||
- Don't canonicalize paths to user requirements ([#6560](https://github.com/astral-sh/uv/pull/6560))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add FastAPI guide to overview ([#6603](https://github.com/astral-sh/uv/pull/6603))
|
||||
- Add docs for disabling build isolation with `uv sync`
|
||||
([#6607](https://github.com/astral-sh/uv/pull/6607))
|
||||
- Add example of reading script from stdin using echo
|
||||
([#6567](https://github.com/astral-sh/uv/pull/6567))
|
||||
- Add tip to use intermediate layers in Docker builds
|
||||
([#6650](https://github.com/astral-sh/uv/pull/6650))
|
||||
- Clarify need to include `pyproject.toml` with `--no-install-project`
|
||||
([#6581](https://github.com/astral-sh/uv/pull/6581))
|
||||
- Move `WORKDIR` directive in Docker examples ([#6652](https://github.com/astral-sh/uv/pull/6652))
|
||||
- Remove duplicate `WORKDIR` directive in Docker example
|
||||
([#6651](https://github.com/astral-sh/uv/pull/6651))
|
||||
|
||||
## 0.3.5
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for `--allow-insecure-host` (aliased to `--trusted-host`)
|
||||
([#6591](https://github.com/astral-sh/uv/pull/6591))
|
||||
- Read requirements from `requires.txt` when available
|
||||
([#6655](https://github.com/astral-sh/uv/pull/6655))
|
||||
- Respect `tool.uv.environments` in `pip compile --universal`
|
||||
([#6663](https://github.com/astral-sh/uv/pull/6663))
|
||||
- Use relative paths by default in `uv add` ([#6686](https://github.com/astral-sh/uv/pull/6686))
|
||||
- Improve messages for empty solves and installs
|
||||
([#6588](https://github.com/astral-sh/uv/pull/6588))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reusing state across tool upgrades ([#6660](https://github.com/astral-sh/uv/pull/6660))
|
||||
- Detect musl and error for musl Python builds ([#6643](https://github.com/astral-sh/uv/pull/6643))
|
||||
- Ignore `send` errors in installer ([#6667](https://github.com/astral-sh/uv/pull/6667))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add development section to Docker guide and reference new example project
|
||||
([#6666](https://github.com/astral-sh/uv/pull/6666))
|
||||
- Add docs for `constraint-dependencies` and `override-dependencies`
|
||||
([#6596](https://github.com/astral-sh/uv/pull/6596))
|
||||
- Clarify package priority order in pip compatibility guide
|
||||
([#6619](https://github.com/astral-sh/uv/pull/6619))
|
||||
- Fix docs for disabling build isolation with `uv sync`
|
||||
([#6674](https://github.com/astral-sh/uv/pull/6674))
|
||||
- Improve consistency of directory lookup instructions in Docker
|
||||
([#6665](https://github.com/astral-sh/uv/pull/6665))
|
||||
- Improve lockfile concept documentation, add coverage for upgrades
|
||||
([#6698](https://github.com/astral-sh/uv/pull/6698))
|
||||
- Shift the order of some of the Docker guide content
|
||||
([#6664](https://github.com/astral-sh/uv/pull/6664))
|
||||
- Use `python` to highlight requirements and use more content tabs
|
||||
([#6549](https://github.com/astral-sh/uv/pull/6549))
|
||||
1331
changelogs/0.4.x.md
1331
changelogs/0.4.x.md
File diff suppressed because it is too large
Load Diff
1662
changelogs/0.5.x.md
1662
changelogs/0.5.x.md
File diff suppressed because it is too large
Load Diff
|
|
@ -1,836 +0,0 @@
|
|||
# Changelog 0.6.x
|
||||
|
||||
## 0.6.0
|
||||
|
||||
There have been 31 releases and 1135 pull requests since
|
||||
[0.5.0](https://github.com/astral-sh/uv/releases/tag/0.5.0), our last release with breaking changes.
|
||||
As before, we've accumulated various changes that improve correctness and user experience, but could
|
||||
break some workflows. This release contains those changes; many have been marked as breaking out of
|
||||
an abundance of caution. We expect most users to be able to upgrade without making changes.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Create `main.py` instead of `hello.py` in `uv init`**
|
||||
([#10369](https://github.com/astral-sh/uv/pull/10369))
|
||||
|
||||
Previously, `uv init` created a `hello.py` sample file. Now, `uv init` will create `main.py`
|
||||
instead — which aligns with expectations from user feedback. The `--bare` option can be used to
|
||||
avoid creating the file altogether.
|
||||
|
||||
- **Respect `UV_PYTHON` in `uv python install`**
|
||||
([#11487](https://github.com/astral-sh/uv/pull/11487))
|
||||
|
||||
Previously, `uv python install` did not read this environment variable; now it does. We believe
|
||||
this matches user expectations, however, this will take priority over `.python-version` files
|
||||
which could be considered breaking.
|
||||
|
||||
- **Set `UV` to the uv executable path** ([#11326](https://github.com/astral-sh/uv/pull/11326))
|
||||
|
||||
When uv spawns a subprocess, it will now have the `UV` environment variable set to the `uv` binary
|
||||
path. This change is breaking if you are setting the `UV` environment variable yourself, as we
|
||||
will overwrite its value.
|
||||
|
||||
Additionally, this change requires marking the uv Rust entrypoint (`uv::main`) as `unsafe` to
|
||||
avoid unsoundness — this is only relevant if you are invoking uv using Rust. See the
|
||||
[Rust documentation](https://doc.rust-lang.org/std/env/fn.set_var.html#safety) for details about
|
||||
the safety of updating a process' environment.
|
||||
|
||||
- **Error on non-existent extras, e.g., in `uv sync`**
|
||||
([#11426](https://github.com/astral-sh/uv/pull/11426))
|
||||
|
||||
Previously, uv would silently ignore non-existent extras requested on the command-line (e.g., via
|
||||
`uv sync --extra foo`). This is _generally_ correct behavior when resolving requests for package
|
||||
extras, because an extra may be present on one compatible version of a package but not another.
|
||||
However, this flexibility doesn't need to apply to the local project and it's less surprising to
|
||||
error here.
|
||||
|
||||
- **Error on missing dependency groups when `--frozen` is provided**
|
||||
([#11499](https://github.com/astral-sh/uv/pull/11499))
|
||||
|
||||
Previously, uv would not validate that the requested dependency groups were present in the
|
||||
lockfile when the `--frozen` flag was used. Now, an error will be raised if a requested dependency
|
||||
group is not present.
|
||||
|
||||
- **Change `-p` to a `--python` alias in `uv pip compile`**
|
||||
([#11486](https://github.com/astral-sh/uv/pull/11486))
|
||||
|
||||
In `uv pip compile`, `-p` was an alias for `--python-version` while everywhere else in uv's
|
||||
interface it is an alias for `--python`. Additionally, `uv pip compile` did not respect the
|
||||
`UV_PYTHON` environment variable. Now, the semantics of this flag have been updated for parity
|
||||
with the rest of the CLI.
|
||||
|
||||
However, `--python-version` is unique: if we cannot find an interpreter with the given version, we
|
||||
will not fail. Instead, we'll use an alternative interpreter and override its version tags with
|
||||
the requested version during package resolution. This behavior is retained here for backwards
|
||||
compatibility, `--python <version>` / `-p <version>` will not fail if the version cannot be found.
|
||||
However, if a specific interpreter is requested, e.g., with `--python <path>` or `--python pypy`,
|
||||
and cannot be found — uv will exit with an error.
|
||||
|
||||
The breaking changes here are that `UV_PYTHON` is respected and `--python <version>` will no
|
||||
longer fail if the version cannot be found.
|
||||
|
||||
- **Bump `alpine` default tag to 3.21 for derived Docker images**
|
||||
([#11157](https://github.com/astral-sh/uv/pull/11157))
|
||||
|
||||
Alpine 3.21 was released in Dec 2024 and is used in the official Alpine-based Python images. Our
|
||||
`uv:python3.x-alpine` images have been using 3.21 since uv v0.5.8. However, now the `uv:alpine`
|
||||
image will use 3.21 instead of 3.20 and `uv:alpine3.20` will no longer be updated.
|
||||
|
||||
- **Use files instead of junctions on Windows**
|
||||
([#11269](https://github.com/astral-sh/uv/pull/11269))
|
||||
|
||||
Previously, we used junctions for atomic replacement of cache entries on Windows. Now, we use a
|
||||
file with a pointer to the cache entry instead. This resolves various edge-case behaviors with
|
||||
junctions. These files are only intended to be consumed by uv and the cache version has been
|
||||
bumped. We do not think this change will affect workflows.
|
||||
|
||||
### Stabilizations
|
||||
|
||||
- **`uv publish` is no longer in preview** ([#11032](https://github.com/astral-sh/uv/pull/11032))
|
||||
|
||||
This does not come with any behavior changes. You will no longer see an experimental warning when
|
||||
using `uv publish`. See the linked pull request for a report on the stabilization.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Support `--active` for PEP 723 script environments
|
||||
([#11433](https://github.com/astral-sh/uv/pull/11433))
|
||||
- Add `revision` to the lockfile to allow backwards-compatible metadata changes
|
||||
([#11500](https://github.com/astral-sh/uv/pull/11500))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reading metadata from `.egg-info` files
|
||||
([#11395](https://github.com/astral-sh/uv/pull/11395))
|
||||
- Include archive bucket version in archive pointers
|
||||
([#11306](https://github.com/astral-sh/uv/pull/11306))
|
||||
- Omit lockfile version when additional fields are dynamic
|
||||
([#11468](https://github.com/astral-sh/uv/pull/11468))
|
||||
- Respect executable name in `uvx --from tool@latest`
|
||||
([#11465](https://github.com/astral-sh/uv/pull/11465))
|
||||
|
||||
### Documentation
|
||||
|
||||
- The `CHANGELOG.md` is now split into separate files for each "major" version to fix rendering
|
||||
([#11510](https://github.com/astral-sh/uv/pull/11510))
|
||||
|
||||
## 0.6.1
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Allow users to mark platforms as "required" for wheel coverage
|
||||
([#10067](https://github.com/astral-sh/uv/pull/10067))
|
||||
- Warn for builds in non-build and workspace root pyproject.toml
|
||||
([#11394](https://github.com/astral-sh/uv/pull/11394))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add `--all` to `uvx --reinstall` message ([#11535](https://github.com/astral-sh/uv/pull/11535))
|
||||
- Fallback to `GET` on HTTP 400 when attempting to use range requests for wheel download
|
||||
([#11539](https://github.com/astral-sh/uv/pull/11539))
|
||||
- Prefer local variants in preference selection
|
||||
([#11546](https://github.com/astral-sh/uv/pull/11546))
|
||||
- Respect verbatim executable name in `uvx` ([#11524](https://github.com/astral-sh/uv/pull/11524))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation for required environments ([#11542](https://github.com/astral-sh/uv/pull/11542))
|
||||
- Note that `main.py` used to be `hello.py` ([#11519](https://github.com/astral-sh/uv/pull/11519))
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for constraining build dependencies with `tool.uv.build-constraint-dependencies`
|
||||
([#11585](https://github.com/astral-sh/uv/pull/11585))
|
||||
- Sort dependency group keys when adding new group
|
||||
([#11591](https://github.com/astral-sh/uv/pull/11591))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use an `Arc` for index URLs ([#11586](https://github.com/astral-sh/uv/pull/11586))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow use of x86-64 Python on ARM Windows ([#11625](https://github.com/astral-sh/uv/pull/11625))
|
||||
- Fix an issue where conflict markers could instigate a very large lock file
|
||||
([#11293](https://github.com/astral-sh/uv/pull/11293))
|
||||
- Fix duplicate packages with multiple conflicting extras declared
|
||||
([#11513](https://github.com/astral-sh/uv/pull/11513))
|
||||
- Respect color settings for log messages ([#11604](https://github.com/astral-sh/uv/pull/11604))
|
||||
- Eagerly reject unsupported Git schemes ([#11514](https://github.com/astral-sh/uv/pull/11514))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation for specifying Python versions in tool commands
|
||||
([#11598](https://github.com/astral-sh/uv/pull/11598))
|
||||
|
||||
## 0.6.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Allow quotes around command-line options in `requirement.txt files`
|
||||
([#11644](https://github.com/astral-sh/uv/pull/11644))
|
||||
- Initialize PEP 723 script in `uv lock --script`
|
||||
([#11717](https://github.com/astral-sh/uv/pull/11717))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Accept multiple `.env` files in `UV_ENV_FILE`
|
||||
([#11665](https://github.com/astral-sh/uv/pull/11665))
|
||||
|
||||
### Performance
|
||||
|
||||
- Reduce overhead in converting resolutions ([#11660](https://github.com/astral-sh/uv/pull/11660))
|
||||
- Use `SmallString` on `Hashes` ([#11756](https://github.com/astral-sh/uv/pull/11756))
|
||||
- Use a `Box` for `Yanked` on `File` ([#11755](https://github.com/astral-sh/uv/pull/11755))
|
||||
- Use a `SmallString` for the `Yanked` enum ([#11715](https://github.com/astral-sh/uv/pull/11715))
|
||||
- Use boxed slices for hash vector ([#11714](https://github.com/astral-sh/uv/pull/11714))
|
||||
- Use install concurrency for bytecode compilation too
|
||||
([#11615](https://github.com/astral-sh/uv/pull/11615))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid installing duplicate dependencies across conflicting groups
|
||||
([#11653](https://github.com/astral-sh/uv/pull/11653))
|
||||
- Check subdirectory existence after cache heal
|
||||
([#11719](https://github.com/astral-sh/uv/pull/11719))
|
||||
- Include uppercase platforms for Windows wheels
|
||||
([#11681](https://github.com/astral-sh/uv/pull/11681))
|
||||
- Respect existing PEP 723 script settings in `uv add`
|
||||
([#11716](https://github.com/astral-sh/uv/pull/11716))
|
||||
- Reuse refined interpreter to create tool environment
|
||||
([#11680](https://github.com/astral-sh/uv/pull/11680))
|
||||
- Skip removed directories during bytecode compilation
|
||||
([#11633](https://github.com/astral-sh/uv/pull/11633))
|
||||
- Support conflict markers in `uv export` ([#11643](https://github.com/astral-sh/uv/pull/11643))
|
||||
- Treat lockfile as outdated if (empty) extras are added
|
||||
([#11702](https://github.com/astral-sh/uv/pull/11702))
|
||||
- Display path separators as backslashes on Windows
|
||||
([#11667](https://github.com/astral-sh/uv/pull/11667))
|
||||
- Display the built file name instead of the canonicalized name in `uv build`
|
||||
([#11593](https://github.com/astral-sh/uv/pull/11593))
|
||||
- Fix message when there are no buildable packages
|
||||
([#11722](https://github.com/astral-sh/uv/pull/11722))
|
||||
- Re-allow HTTP schemes for Git dependencies ([#11687](https://github.com/astral-sh/uv/pull/11687))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add anchor links to arguments and options in the CLI reference
|
||||
([#11754](https://github.com/astral-sh/uv/pull/11754))
|
||||
- Add link to environment marker specification
|
||||
([#11748](https://github.com/astral-sh/uv/pull/11748))
|
||||
- Fix missing a closing bracket in the `cache-keys` setting
|
||||
([#11669](https://github.com/astral-sh/uv/pull/11669))
|
||||
- Remove the last edited date from documentation pages
|
||||
([#11753](https://github.com/astral-sh/uv/pull/11753))
|
||||
- Fix readme typo ([#11742](https://github.com/astral-sh/uv/pull/11742))
|
||||
|
||||
## 0.6.4
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Upgrade pypy3.10 to v7.3.19 ([#11814](https://github.com/astral-sh/uv/pull/11814))
|
||||
- Allow configuring log verbosity from the CLI (i.e., `-vvv`)
|
||||
([#11758](https://github.com/astral-sh/uv/pull/11758))
|
||||
- Warn when duplicate index names found in single file
|
||||
([#11824](https://github.com/astral-sh/uv/pull/11824))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Always store registry index on resolution packages
|
||||
([#11815](https://github.com/astral-sh/uv/pull/11815))
|
||||
- Avoid error on relative paths in `uv tool uninstall`
|
||||
([#11889](https://github.com/astral-sh/uv/pull/11889))
|
||||
- Avoid silently dropping errors in directory enumeration
|
||||
([#11890](https://github.com/astral-sh/uv/pull/11890))
|
||||
- Disable interactive git terminal prompts during fetches
|
||||
([#11744](https://github.com/astral-sh/uv/pull/11744))
|
||||
- Discover Windows registry (PEP 514) Python versions across 32/64-bit
|
||||
([#11801](https://github.com/astral-sh/uv/pull/11801))
|
||||
- Don't panic on Ctrl-C in confirm prompt ([#11706](https://github.com/astral-sh/uv/pull/11706))
|
||||
- Fix non-directory in workspace on Windows ([#11833](https://github.com/astral-sh/uv/pull/11833))
|
||||
- Make interpreter caching robust to OS upgrades
|
||||
([#11875](https://github.com/astral-sh/uv/pull/11875))
|
||||
- Respect `include-system-site-packages` in layered environments
|
||||
([#11873](https://github.com/astral-sh/uv/pull/11873))
|
||||
- Suggest `uv tool update-shell` in PowerShell
|
||||
([#11846](https://github.com/astral-sh/uv/pull/11846))
|
||||
- Update code page to `65001` before setting environment variables in virtual environments
|
||||
([#11831](https://github.com/astral-sh/uv/pull/11831))
|
||||
- Use hash instead of full wheel name in wheels bucket
|
||||
([#11738](https://github.com/astral-sh/uv/pull/11738))
|
||||
- Fix version string truncation while generating cache_key
|
||||
([#11830](https://github.com/astral-sh/uv/pull/11830))
|
||||
- Explicitly handle ctrl-c in confirmation prompt instead of using a signal handler
|
||||
([#11897](https://github.com/astral-sh/uv/pull/11897))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid cloning to string when creating cache path
|
||||
([#11772](https://github.com/astral-sh/uv/pull/11772))
|
||||
- Avoid redundant clones in version containment check
|
||||
([#11767](https://github.com/astral-sh/uv/pull/11767))
|
||||
- Avoid string allocation when enumerating tool names
|
||||
([#11910](https://github.com/astral-sh/uv/pull/11910))
|
||||
- Avoid using owned `String` for package name constructors
|
||||
([#11768](https://github.com/astral-sh/uv/pull/11768))
|
||||
- Avoid using owned `String` in deserializers ([#11764](https://github.com/astral-sh/uv/pull/11764))
|
||||
- Migrate to `zlib-rs` (again) ([#11894](https://github.com/astral-sh/uv/pull/11894))
|
||||
- Remove unnecessary clones when adding package names
|
||||
([#11771](https://github.com/astral-sh/uv/pull/11771))
|
||||
- Skip unquote allocation for non-quoted strings
|
||||
([#11813](https://github.com/astral-sh/uv/pull/11813))
|
||||
- Use `SmallString` for filenames and URLs ([#11765](https://github.com/astral-sh/uv/pull/11765))
|
||||
- Use a Boxed slice for version specifiers ([#11766](https://github.com/astral-sh/uv/pull/11766))
|
||||
- Use matches over contains for extra value parsing
|
||||
([#11770](https://github.com/astral-sh/uv/pull/11770))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Avoid fallback to PyPI in mixed CPU/CUDA example
|
||||
([#11115](https://github.com/astral-sh/uv/pull/11115))
|
||||
- Docs: Clarify that setting cache-keys overrides defaults
|
||||
([#11895](https://github.com/astral-sh/uv/pull/11895))
|
||||
- Document our MSRV policy ([#11898](https://github.com/astral-sh/uv/pull/11898))
|
||||
- Fix reference to macOS cache path ([#11845](https://github.com/astral-sh/uv/pull/11845))
|
||||
- Fix typo in `no_default_groups` documentation and changelog
|
||||
([#11928](https://github.com/astral-sh/uv/pull/11928))
|
||||
- Update the "Locking and syncing" page ([#11647](https://github.com/astral-sh/uv/pull/11647))
|
||||
- Update alternative indexes documentation to use new interface
|
||||
([#10826](https://github.com/astral-sh/uv/pull/10826))
|
||||
|
||||
## 0.6.5
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Allow `--constraints` and `--overrides` in `uvx`
|
||||
([#10207](https://github.com/astral-sh/uv/pull/10207))
|
||||
- Allow overrides in `satisfies` check for `uv tool run`
|
||||
([#11994](https://github.com/astral-sh/uv/pull/11994))
|
||||
- Allow users to set `package = true` on `tool.uv.sources`
|
||||
([#12014](https://github.com/astral-sh/uv/pull/12014))
|
||||
- Add support for Windows legacy scripts via `uv run`
|
||||
([#11888](https://github.com/astral-sh/uv/pull/11888))
|
||||
- Return error when running uvx with a `.py` script
|
||||
([#11623](https://github.com/astral-sh/uv/pull/11623))
|
||||
- Warn user on use of `uvx run` ([#11992](https://github.com/astral-sh/uv/pull/11992))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `NO_BUILD` and `NO_BUILD_PACKAGE` environment variables
|
||||
([#11968](https://github.com/astral-sh/uv/pull/11968))
|
||||
|
||||
### Performance
|
||||
|
||||
- Allow overrides in all satisfies checks ([#11995](https://github.com/astral-sh/uv/pull/11995))
|
||||
- Respect markers on constraints when validating current environment
|
||||
([#11976](https://github.com/astral-sh/uv/pull/11976))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Compare major-minor specifiers when filtering interpreters
|
||||
([#11952](https://github.com/astral-sh/uv/pull/11952))
|
||||
- Fix system site packages detection default ([#11956](https://github.com/astral-sh/uv/pull/11956))
|
||||
- Invalidate lockfile when empty dependency groups are added or removed
|
||||
([#12010](https://github.com/astral-sh/uv/pull/12010))
|
||||
- Remove prepended sys.path ([#11954](https://github.com/astral-sh/uv/pull/11954))
|
||||
- Fix PyPy Python version label ([#11965](https://github.com/astral-sh/uv/pull/11965))
|
||||
- Fix error message suggesting `--user` instead of `--username`
|
||||
([#11947](https://github.com/astral-sh/uv/pull/11947))
|
||||
|
||||
### Preview
|
||||
|
||||
- Move the uv build backend into a separate, minimal `uv_build` package
|
||||
([#11446](https://github.com/astral-sh/uv/pull/11446))
|
||||
|
||||
## 0.6.6
|
||||
|
||||
### Python
|
||||
|
||||
- Add support for dynamic musl Python distributions on x86-64 Linux
|
||||
([#12121](https://github.com/astral-sh/uv/pull/12121))
|
||||
- Allow the experimental JIT to be enabled at runtime on Python 3.13 and 3.14 on Linux
|
||||
- Upgrade the build toolchain to LLVM 20, improving performance
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250311)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--marker` flag to `uv add` ([#12012](https://github.com/astral-sh/uv/pull/12012))
|
||||
- Allow overriding module name for uv build backend
|
||||
([#11884](https://github.com/astral-sh/uv/pull/11884))
|
||||
- Sync latest Python releases ([#12120](https://github.com/astral-sh/uv/pull/12120))
|
||||
- Use 'Upload' instead of 'Download' in publish reporter
|
||||
([#12029](https://github.com/astral-sh/uv/pull/12029))
|
||||
- Add `[index].authenticate` allowing authentication to be required on an index
|
||||
([#11896](https://github.com/astral-sh/uv/pull/11896))
|
||||
- Add support for Windows legacy scripts in `uv tool run`
|
||||
([#12079](https://github.com/astral-sh/uv/pull/12079))
|
||||
- Propagate conflicting dependency groups when using `include-group`
|
||||
([#12005](https://github.com/astral-sh/uv/pull/12005))
|
||||
- Show ambiguous requirements when `uv add` failed
|
||||
([#12106](https://github.com/astral-sh/uv/pull/12106))
|
||||
|
||||
### Performance
|
||||
|
||||
- Cache workspace discovery ([#12096](https://github.com/astral-sh/uv/pull/12096))
|
||||
- Insert dependencies into fork state prior to fetching metadata
|
||||
([#12057](https://github.com/astral-sh/uv/pull/12057))
|
||||
- Remove some allocations from `uv-auth` ([#12077](https://github.com/astral-sh/uv/pull/12077))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid considering `PATH` updated when the `export` is commented in the shellrc
|
||||
([#12043](https://github.com/astral-sh/uv/pull/12043))
|
||||
- Fix `uv publish` retry on network failures ([#12041](https://github.com/astral-sh/uv/pull/12041))
|
||||
- Use a sized stream in `uv publish` to comply with WSGI PyPI server constraints
|
||||
([#12111](https://github.com/astral-sh/uv/pull/12111))
|
||||
- Fix `uv python install --reinstall` when the version was not previously installed
|
||||
([#12124](https://github.com/astral-sh/uv/pull/12124))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Fix `uv_build` invocation ([#12058](https://github.com/astral-sh/uv/pull/12058))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Quote versions string in `python-versions.md`
|
||||
([#12112](https://github.com/astral-sh/uv/pull/12112))
|
||||
- Fix tool concept page headings ([#12053](https://github.com/astral-sh/uv/pull/12053))
|
||||
- Update the `[index].authenticate` docs ([#12102](https://github.com/astral-sh/uv/pull/12102))
|
||||
- Update versioning policy ([#11666](https://github.com/astral-sh/uv/pull/11666))
|
||||
|
||||
## 0.6.7
|
||||
|
||||
### Python
|
||||
|
||||
- Add CPython 3.14.0a6
|
||||
- Fix regression where extension modules would use wrong `CXX` compiler on Linux
|
||||
- Enable FTS3 enhanced query syntax for SQLite
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250317)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for `-c` constraints in `uv add`
|
||||
([#12209](https://github.com/astral-sh/uv/pull/12209))
|
||||
- Add support for `--global` default version in `uv python pin`
|
||||
([#12115](https://github.com/astral-sh/uv/pull/12115))
|
||||
- Always reinstall local source trees passed to `uv pip install`
|
||||
([#12176](https://github.com/astral-sh/uv/pull/12176))
|
||||
- Render token claims on publish permission error
|
||||
([#12135](https://github.com/astral-sh/uv/pull/12135))
|
||||
- Add pip-compatible `--group` flag to `uv pip install` and `uv pip compile`
|
||||
([#11686](https://github.com/astral-sh/uv/pull/11686))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Avoid creating duplicate directory entries in built wheels
|
||||
([#12206](https://github.com/astral-sh/uv/pull/12206))
|
||||
- Allow overriding module names for editable builds
|
||||
([#12137](https://github.com/astral-sh/uv/pull/12137))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid replicating core-metadata field on `File` struct
|
||||
([#12159](https://github.com/astral-sh/uv/pull/12159))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add `src` to default cache keys ([#12062](https://github.com/astral-sh/uv/pull/12062))
|
||||
- Discard insufficient fork markers ([#10682](https://github.com/astral-sh/uv/pull/10682))
|
||||
- Ensure `python pin --global` creates parent directories if missing
|
||||
([#12180](https://github.com/astral-sh/uv/pull/12180))
|
||||
- Fix GraalPy abi tag parsing and discovery ([#12154](https://github.com/astral-sh/uv/pull/12154))
|
||||
- Remove extraneous script packages in `uv sync --script`
|
||||
([#12158](https://github.com/astral-sh/uv/pull/12158))
|
||||
- Remove redundant `activate.bat` output ([#12160](https://github.com/astral-sh/uv/pull/12160))
|
||||
- Avoid subsequent index hint when no versions are available on the first index
|
||||
([#9332](https://github.com/astral-sh/uv/pull/9332))
|
||||
- Error on lockfiles with incoherent wheel versions
|
||||
([#12235](https://github.com/astral-sh/uv/pull/12235))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Update `BaseClientBuild` to accept custom proxies
|
||||
([#12232](https://github.com/astral-sh/uv/pull/12232))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Make testpypi index explicit in example snippet
|
||||
([#12148](https://github.com/astral-sh/uv/pull/12148))
|
||||
- Reverse and format the archived changelogs ([#12099](https://github.com/astral-sh/uv/pull/12099))
|
||||
- Use consistent commas around i.e. and e.g. ([#12157](https://github.com/astral-sh/uv/pull/12157))
|
||||
- Fix typos in MRE docs ([#12198](https://github.com/astral-sh/uv/pull/12198))
|
||||
- Fix double space typo ([#12171](https://github.com/astral-sh/uv/pull/12171))
|
||||
|
||||
## 0.6.8
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add support for enabling all groups by default with `default-groups = "all"`
|
||||
([#12289](https://github.com/astral-sh/uv/pull/12289))
|
||||
- Add simpler `--managed-python` and `--no-managed-python` flags for toggling Python preferences
|
||||
([#12246](https://github.com/astral-sh/uv/pull/12246))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid allocations for default cache keys ([#12063](https://github.com/astral-sh/uv/pull/12063))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow local version mismatches when validating lockfile
|
||||
([#12285](https://github.com/astral-sh/uv/pull/12285))
|
||||
- Allow owned string when deserializing `requires-python`
|
||||
([#12278](https://github.com/astral-sh/uv/pull/12278))
|
||||
- Make cache errors non-fatal in `Planner::build`
|
||||
([#12281](https://github.com/astral-sh/uv/pull/12281))
|
||||
|
||||
## 0.6.9
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Use `keyring --mode creds` when `authenticate = "always"`
|
||||
([#12316](https://github.com/astral-sh/uv/pull/12316))
|
||||
- Fail with specific error message when no password is present and `authenticate = "always"`
|
||||
([#12313](https://github.com/astral-sh/uv/pull/12313))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add boolish value parser for `UV_MANAGED_PYTHON` flags
|
||||
([#12345](https://github.com/astral-sh/uv/pull/12345))
|
||||
- Make deserialization non-fatal when assessing source tree revisions
|
||||
([#12319](https://github.com/astral-sh/uv/pull/12319))
|
||||
- Use resolver-returned wheel over alternate cached wheel
|
||||
([#12301](https://github.com/astral-sh/uv/pull/12301))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add experimental `--torch-backend` to the PyTorch guide
|
||||
([#12317](https://github.com/astral-sh/uv/pull/12317))
|
||||
- Fix `#keyring-provider` references in alternative index docs
|
||||
([#12315](https://github.com/astral-sh/uv/pull/12315))
|
||||
- Fix `--directory` path in examples ([#12165](https://github.com/astral-sh/uv/pull/12165))
|
||||
|
||||
### Preview changes
|
||||
|
||||
- Automatically infer the PyTorch index via `--torch-backend=auto`
|
||||
([#12070](https://github.com/astral-sh/uv/pull/12070))
|
||||
|
||||
## 0.6.10
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv sync --check` flag ([#12342](https://github.com/astral-sh/uv/pull/12342))
|
||||
- Add support for Python version requests in `uv python list`
|
||||
([#12375](https://github.com/astral-sh/uv/pull/12375))
|
||||
- Support `.env` files in `uv tool run` ([#12386](https://github.com/astral-sh/uv/pull/12386))
|
||||
- Support `python find --script` ([#11891](https://github.com/astral-sh/uv/pull/11891))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Check all compatible torch indexes when `--torch-backend` is enabled
|
||||
([#12385](https://github.com/astral-sh/uv/pull/12385))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use a boxed slice for extras and groups ([#12391](https://github.com/astral-sh/uv/pull/12391))
|
||||
- Use small string for index name type ([#12355](https://github.com/astral-sh/uv/pull/12355))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow virtual packages with `--no-build` ([#12314](https://github.com/astral-sh/uv/pull/12314))
|
||||
- Ignore `--find-links` entries for pinned indexes
|
||||
([#12396](https://github.com/astral-sh/uv/pull/12396))
|
||||
- Omit wheels from lockfile based on `--exclude-newer`
|
||||
([#12299](https://github.com/astral-sh/uv/pull/12299))
|
||||
- Retain end-of-line comment position when adding dependency
|
||||
([#12360](https://github.com/astral-sh/uv/pull/12360))
|
||||
- Omit fragment when querying for wheels in Simple HTML API
|
||||
([#12384](https://github.com/astral-sh/uv/pull/12384))
|
||||
- Error on missing argument in `requirements.txt`
|
||||
([#12354](https://github.com/astral-sh/uv/pull/12354))
|
||||
- Support modules with different casing in build backend
|
||||
([#12240](https://github.com/astral-sh/uv/pull/12240))
|
||||
- Add authentication policy support for `pip` commands
|
||||
([#12470](https://github.com/astral-sh/uv/pull/12470))
|
||||
|
||||
## 0.6.11
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add dependents ("via ..." comments) in `uv export` command
|
||||
([#12350](https://github.com/astral-sh/uv/pull/12350))
|
||||
- Bump least-recent non-EOL macOS version to 13.0
|
||||
([#12518](https://github.com/astral-sh/uv/pull/12518))
|
||||
- Support `--find-links`-style "flat" indexes in `[[tool.uv.index]]`
|
||||
([#12407](https://github.com/astral-sh/uv/pull/12407))
|
||||
- Distinguish between `-q` and `-qq` ([#12300](https://github.com/astral-sh/uv/pull/12300))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Support `UV_PROJECT` environment to set project directory.
|
||||
([#12327](https://github.com/astral-sh/uv/pull/12327))
|
||||
|
||||
### Performance
|
||||
|
||||
- Use a boxed slice for various requirement types
|
||||
([#12514](https://github.com/astral-sh/uv/pull/12514))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add a newline after metadata when initializing scripts with other metadata blocks
|
||||
([#12501](https://github.com/astral-sh/uv/pull/12501))
|
||||
- Avoid writing empty `requires-python` to script blocks
|
||||
([#12517](https://github.com/astral-sh/uv/pull/12517))
|
||||
- Respect build constraints in `uv sync` ([#12502](https://github.com/astral-sh/uv/pull/12502))
|
||||
- Respect transitive dependencies in `uv tree --only-group`
|
||||
([#12560](https://github.com/astral-sh/uv/pull/12560))
|
||||
|
||||
## 0.6.12
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Report the queried executable path in `uv python list`
|
||||
([#12628](https://github.com/astral-sh/uv/pull/12628))
|
||||
- Improve archive unpack error messages ([#12627](https://github.com/astral-sh/uv/pull/12627))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect `authenticate` when using `explicit = true`
|
||||
([#12631](https://github.com/astral-sh/uv/pull/12631))
|
||||
- Normalize extra and group names in `uv add` and `uv remove`
|
||||
([#12586](https://github.com/astral-sh/uv/pull/12586))
|
||||
- Enforce CRC-32 checks when unpacking archives
|
||||
([#12623](https://github.com/astral-sh/uv/pull/12623))
|
||||
- Fix parsing of `python-platform` in settings files
|
||||
([#12592](https://github.com/astral-sh/uv/pull/12592))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add note about `uv build` to `package = false`
|
||||
([#12608](https://github.com/astral-sh/uv/pull/12608))
|
||||
- Add index fallback note to `authenticate = always` documentation
|
||||
([#12498](https://github.com/astral-sh/uv/pull/12498))
|
||||
- Fix invalid 'kind' reference in flat index docs
|
||||
([#12583](https://github.com/astral-sh/uv/pull/12583))
|
||||
|
||||
## 0.6.13
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--show-version` to `uv python find` ([#12376](https://github.com/astral-sh/uv/pull/12376))
|
||||
- Remove `--no-config` warning from `uv pip compile` and `uv pip sync`
|
||||
([#12642](https://github.com/astral-sh/uv/pull/12642))
|
||||
- Skip repeated directories in `PATH` when searching for Python interpreters
|
||||
([#12367](https://github.com/astral-sh/uv/pull/12367))
|
||||
- Unset `SCRIPT_PATH` in relocatable activation script
|
||||
([#12672](https://github.com/astral-sh/uv/pull/12672))
|
||||
- Add `UV_PYTHON_DOWNLOADS_JSON_URL` to set custom managed python sources
|
||||
([#10939](https://github.com/astral-sh/uv/pull/10939))
|
||||
- Reject `pyproject.toml` files in `uv pip compile -o`
|
||||
([#12673](https://github.com/astral-sh/uv/pull/12673))
|
||||
- Respect the `--offline` flag for Git operations
|
||||
([#12619](https://github.com/astral-sh/uv/pull/12619))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Warn instead of error if CRC appears to be missing
|
||||
([#12722](https://github.com/astral-sh/uv/pull/12722))
|
||||
- Avoid infinite loop in `uv export` with conflicts
|
||||
([#12726](https://github.com/astral-sh/uv/pull/12726))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Update MSRV to 1.84 ([#12670](https://github.com/astral-sh/uv/pull/12670))
|
||||
|
||||
## 0.6.14
|
||||
|
||||
### Python versions
|
||||
|
||||
The following Python versions have been added:
|
||||
|
||||
- CPython 3.13.3
|
||||
- CPython 3.12.10
|
||||
- CPython 3.11.12
|
||||
- CPython 3.10.17
|
||||
- CPython 3.9.22
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250409)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv-build` and `uv_build` aliases to `uv init --build-backend`
|
||||
([#12776](https://github.com/astral-sh/uv/pull/12776))
|
||||
- Emit dedicated error message for Conda `environment.yml` files
|
||||
([#12669](https://github.com/astral-sh/uv/pull/12669))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Check module dir exists for sdist build
|
||||
([#12779](https://github.com/astral-sh/uv/pull/12779))
|
||||
- Build backend: Fix sdist with long directories
|
||||
([#12764](https://github.com/astral-sh/uv/pull/12764))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid querying GitHub on repeated install invocations
|
||||
([#12767](https://github.com/astral-sh/uv/pull/12767))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Error when `tool.uv.sources` is set in system-level configuration file
|
||||
([#12757](https://github.com/astral-sh/uv/pull/12757))
|
||||
- Split workspace members onto their own lines in `uv init`
|
||||
([#12756](https://github.com/astral-sh/uv/pull/12756))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add lockfile note about PEP 751 ([#12732](https://github.com/astral-sh/uv/pull/12732))
|
||||
- Extend the reference documentation for `uv pip sync`
|
||||
([#12683](https://github.com/astral-sh/uv/pull/12683))
|
||||
- Fix mismatched pip interface header / nav titles
|
||||
([#12640](https://github.com/astral-sh/uv/pull/12640))
|
||||
|
||||
## 0.6.15
|
||||
|
||||
This release includes preliminary support for the `pylock.toml` file format, as standardized in
|
||||
[PEP 751](https://peps.python.org/pep-0751/). `pylock.toml` is an alternative resolution output
|
||||
format intended to replace `requirements.txt` (e.g., in the context of `uv pip compile`, whereby a
|
||||
"locked" `requirements.txt` file is generated from a set of input requirements). `pylock.toml` is
|
||||
standardized and tool-agnostic, such that in the future, `pylock.toml` files generated by uv could
|
||||
be installed by other tools, and vice versa.
|
||||
|
||||
As of this release, `pylock.toml` is supported in the following commands:
|
||||
|
||||
- To export a `uv.lock` to the `pylock.toml` format, run: `uv export -o pylock.toml`
|
||||
- To generate a `pylock.toml` file from a set of requirements, run:
|
||||
`uv pip compile -o pylock.toml requirements.in`
|
||||
- To install from a `pylock.toml` file, run: `uv pip sync pylock.toml` or
|
||||
`uv pip install -r pylock.toml`
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add PEP 751 support to `uv pip compile` ([#13019](https://github.com/astral-sh/uv/pull/13019))
|
||||
- Add `uv export` support for PEP 751 ([#12955](https://github.com/astral-sh/uv/pull/12955))
|
||||
- Accept `requirements.txt` (verbatim) as a format on the CLI
|
||||
([#12957](https://github.com/astral-sh/uv/pull/12957))
|
||||
- Add `UV_NO_EDITABLE` environment variable to set `--no-editable` on all invocations
|
||||
([#12773](https://github.com/astral-sh/uv/pull/12773))
|
||||
- Add `pylock.toml` to `uv pip install` and `uv pip sync`
|
||||
([#12992](https://github.com/astral-sh/uv/pull/12992))
|
||||
- Add a brief sleep before sending `SIGINT` to child processes
|
||||
([#13018](https://github.com/astral-sh/uv/pull/13018))
|
||||
- Add upload time to `uv.lock` ([#12968](https://github.com/astral-sh/uv/pull/12968))
|
||||
- Allow updating Git sources by name ([#12897](https://github.com/astral-sh/uv/pull/12897))
|
||||
- Cache `which git` in `uv init` ([#12893](https://github.com/astral-sh/uv/pull/12893))
|
||||
- Enable `--dry-run` with `--locked` / `--frozen` for `uv sync`
|
||||
([#12778](https://github.com/astral-sh/uv/pull/12778))
|
||||
- Infer output type in `uv export` ([#12958](https://github.com/astral-sh/uv/pull/12958))
|
||||
- Make `uv init` resilient against broken git ([#12895](https://github.com/astral-sh/uv/pull/12895))
|
||||
- Respect build constraints for `uv run --with` dependencies
|
||||
([#12882](https://github.com/astral-sh/uv/pull/12882))
|
||||
- Split UV_INDEX on all whitespace ([#12820](https://github.com/astral-sh/uv/pull/12820))
|
||||
- Support build constraints in `uv tool` and PEP723 scripts.
|
||||
([#12842](https://github.com/astral-sh/uv/pull/12842))
|
||||
- Use suffix from `uvx` binary when searching for uv binary
|
||||
([#12923](https://github.com/astral-sh/uv/pull/12923))
|
||||
- Update version formatting to use cyan color ([#12943](https://github.com/astral-sh/uv/pull/12943))
|
||||
- Add debug logs for version file search ([#12951](https://github.com/astral-sh/uv/pull/12951))
|
||||
- Fix `SourceNotAllowed` error message during Python discovery
|
||||
([#13012](https://github.com/astral-sh/uv/pull/13012))
|
||||
- Obfuscate password in credentials debug messages
|
||||
([#12944](https://github.com/astral-sh/uv/pull/12944))
|
||||
- Obfuscate possible tokens in URL logs ([#12969](https://github.com/astral-sh/uv/pull/12969))
|
||||
- Validate that PEP 751 entries don't include multiple sources
|
||||
([#12993](https://github.com/astral-sh/uv/pull/12993))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Add reference docs and schema
|
||||
([#12803](https://github.com/astral-sh/uv/pull/12803))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Align supported `config-settings` with example in docs
|
||||
([#12947](https://github.com/astral-sh/uv/pull/12947))
|
||||
- Ensure virtual environment is compatible with interpreter on sync
|
||||
([#12884](https://github.com/astral-sh/uv/pull/12884))
|
||||
- Fix `PythonDownloadRequest` parsing for partial keys
|
||||
([#12925](https://github.com/astral-sh/uv/pull/12925))
|
||||
- Fix pre-release exclusive comparison operator in `uv-pep440`
|
||||
([#12836](https://github.com/astral-sh/uv/pull/12836))
|
||||
- Forward additional signals to the child process in `uv run`
|
||||
([#13017](https://github.com/astral-sh/uv/pull/13017))
|
||||
- Omit PEP 751 version for source trees ([#13030](https://github.com/astral-sh/uv/pull/13030))
|
||||
- Patch `CC` and `CCX` entries in sysconfig for cross-compiled `aarch64` Python distributions
|
||||
([#12239](https://github.com/astral-sh/uv/pull/12239))
|
||||
- Properly handle authentication for HTTP 302 redirect URLs
|
||||
([#12920](https://github.com/astral-sh/uv/pull/12920))
|
||||
- Set 4MB stack size for all threads, introduce `UV_STACK_SIZE`
|
||||
([#12839](https://github.com/astral-sh/uv/pull/12839))
|
||||
- Show PyPy downloads during `uv python list` ([#12915](https://github.com/astral-sh/uv/pull/12915))
|
||||
- Add `subdirectory` to Direct URL for local directories
|
||||
([#12971](https://github.com/astral-sh/uv/pull/12971))
|
||||
- Prefer stable releases over pre-releases in `uv python install`
|
||||
([#12194](https://github.com/astral-sh/uv/pull/12194))
|
||||
- Write requested Python variant to pin file in `uv init`
|
||||
([#12870](https://github.com/astral-sh/uv/pull/12870))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix CLI reference with code block ([#12807](https://github.com/astral-sh/uv/pull/12807))
|
||||
- Fix lockfile note ([#12793](https://github.com/astral-sh/uv/pull/12793))
|
||||
- Fix typo in a reference ([#12858](https://github.com/astral-sh/uv/pull/12858))
|
||||
- Improve docs for `uv python list --only-downloads` and `--only-installed`
|
||||
([#12916](https://github.com/astral-sh/uv/pull/12916))
|
||||
- Update note on lack of musl distributions to ARM-only
|
||||
([#12825](https://github.com/astral-sh/uv/pull/12825))
|
||||
- Add section on shebangs for scripts ([#11553](https://github.com/astral-sh/uv/pull/11553))
|
||||
- Display aliases for long and short args in the CLI reference
|
||||
([#12824](https://github.com/astral-sh/uv/pull/12824))
|
||||
- Fix highlight line in explicit index documentation
|
||||
([#12887](https://github.com/astral-sh/uv/pull/12887))
|
||||
- Add explicit source (matching PyTorch guide)
|
||||
([#12844](https://github.com/astral-sh/uv/pull/12844))
|
||||
- Fix link to issue ([#12823](https://github.com/astral-sh/uv/pull/12823))
|
||||
- Fix grammatical error in FastAPI guide ([#12908](https://github.com/astral-sh/uv/pull/12908))
|
||||
- Add `--locked` to `uv sync` in GitHub Actions guide
|
||||
([#12819](https://github.com/astral-sh/uv/pull/12819))
|
||||
- Improve formatting for `"all"` `default-groups` setting documentation
|
||||
([#12963](https://github.com/astral-sh/uv/pull/12963))
|
||||
- Replace `--frozen` with `--locked` in Docker integration guide
|
||||
([#12818](https://github.com/astral-sh/uv/pull/12818))
|
||||
|
||||
## 0.6.16
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert "Properly handle authentication for 302 redirect URLs"
|
||||
([#13041](https://github.com/astral-sh/uv/pull/13041))
|
||||
|
||||
## 0.6.17
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add PyTorch v2.7.0 to GPU backend ([#13072](https://github.com/astral-sh/uv/pull/13072))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid panic for invalid Python versions ([#13077](https://github.com/astral-sh/uv/pull/13077))
|
||||
- Block scripts from overwriting `python` ([#13051](https://github.com/astral-sh/uv/pull/13051))
|
||||
- Check distribution names to handle invalid redirects
|
||||
([#12917](https://github.com/astral-sh/uv/pull/12917))
|
||||
- Check for mismatched package and distribution names on resolver thread
|
||||
([#13088](https://github.com/astral-sh/uv/pull/13088))
|
||||
- Fix panic with invalid last character in PEP 508 name
|
||||
([#13105](https://github.com/astral-sh/uv/pull/13105))
|
||||
- Reject `requires-python` even if not listed on the index page
|
||||
([#13086](https://github.com/astral-sh/uv/pull/13086))
|
||||
|
|
@ -1,995 +0,0 @@
|
|||
# Changelog 0.7.x
|
||||
|
||||
## 0.7.0
|
||||
|
||||
This release contains various changes that improve correctness and user experience, but could break
|
||||
some workflows; many changes have been marked as breaking out of an abundance of caution. We expect
|
||||
most users to be able to upgrade without making changes.
|
||||
|
||||
### Breaking changes
|
||||
|
||||
- **Update `uv version` to display and update project versions
|
||||
([#12349](https://github.com/astral-sh/uv/pull/12349))**
|
||||
|
||||
Previously, `uv version` displayed uv's version. Now, `uv version` will display or update the
|
||||
project's version. This interface was
|
||||
[heavily requested](https://github.com/astral-sh/uv/issues/6298) and, after much consideration, we
|
||||
decided that transitioning the top-level command was the best option.
|
||||
|
||||
Here's a brief example:
|
||||
|
||||
```console
|
||||
$ uv init example
|
||||
Initialized project `example` at `./example`
|
||||
$ cd example
|
||||
$ uv version
|
||||
example 0.1.0
|
||||
$ uv version --bump major
|
||||
example 0.1.0 => 1.0.0
|
||||
$ uv version --short
|
||||
1.0.0
|
||||
```
|
||||
|
||||
If used outside of a project, uv will fallback to showing its own version still:
|
||||
|
||||
```console
|
||||
$ uv version
|
||||
warning: failed to read project: No `pyproject.toml` found in current directory or any parent directory
|
||||
running `uv self version` for compatibility with old `uv version` command.
|
||||
this fallback will be removed soon, pass `--preview` to make this an error.
|
||||
|
||||
uv 0.7.0 (4433f41c9 2025-04-29)
|
||||
```
|
||||
|
||||
As described in the warning, `--preview` can be used to error instead:
|
||||
|
||||
```console
|
||||
$ uv version --preview
|
||||
error: No `pyproject.toml` found in current directory or any parent directory
|
||||
```
|
||||
|
||||
The previous functionality of `uv version` was moved to `uv self version`.
|
||||
|
||||
- **Avoid fallback to subsequent indexes on authentication failure
|
||||
([#12805](https://github.com/astral-sh/uv/pull/12805))**
|
||||
|
||||
When using the `first-index` strategy (the default), uv will stop searching indexes for a package
|
||||
once it is found on a single index. Previously, uv considered a package as "missing" from an index
|
||||
during authentication failures, such as an HTTP 401 or HTTP 403 (normally, missing packages are
|
||||
represented by an HTTP 404). This behavior was motivated by unusual responses from some package
|
||||
indexes, but reduces the safety of uv's index strategy when authentication fails. Now, uv will
|
||||
consider an authentication failure as a stop-point when searching for a package across indexes.
|
||||
The `index.ignore-error-codes` option can be used to recover the existing behavior, e.g.:
|
||||
|
||||
```toml
|
||||
[[tool.uv.index]]
|
||||
name = "pytorch"
|
||||
url = "https://download.pytorch.org/whl/cpu"
|
||||
ignore-error-codes = [401, 403]
|
||||
```
|
||||
|
||||
Since PyTorch's indexes always return a HTTP 403 for missing packages, uv special-cases indexes on
|
||||
the `pytorch.org` domain to ignore that error code by default.
|
||||
|
||||
- **Require the command in `uvx <name>` to be available in the Python environment
|
||||
([#11603](https://github.com/astral-sh/uv/pull/11603))**
|
||||
|
||||
Previously, `uvx` would attempt to execute a command even if it was not provided by a Python
|
||||
package. For example, if we presume `foo` is an empty Python package which provides no command,
|
||||
`uvx foo` would invoke the `foo` command on the `PATH` (if present). Now, uv will error early if
|
||||
the `foo` executable is not provided by the requested Python package. This check is not enforced
|
||||
when `--from` is used, so patterns like `uvx --from foo bash -c "..."` are still valid. uv also
|
||||
still allows `uvx foo` where the `foo` executable is provided by a dependency of `foo` instead of
|
||||
`foo` itself, as this is fairly common for packages which depend on a dedicated package for their
|
||||
command-line interface.
|
||||
|
||||
- **Use index URL instead of package URL for keyring credential lookups
|
||||
([#12651](https://github.com/astral-sh/uv/pull/12651))**
|
||||
|
||||
When determining credentials for querying a package URL, uv previously sent the full URL to the
|
||||
`keyring` command. However, some keyring plugins expect to receive the _index URL_ (which is
|
||||
usually a parent of the package URL). Now, uv requests credentials for the index URL instead. This
|
||||
behavior matches `pip`.
|
||||
|
||||
- **Remove `--version` from subcommands ([#13108](https://github.com/astral-sh/uv/pull/13108))**
|
||||
|
||||
Previously, uv allowed the `--version` flag on arbitrary subcommands, e.g., `uv run --version`.
|
||||
However, the `--version` flag is useful for other operations since uv is a package manager.
|
||||
Consequently, we've removed the `--version` flag from subcommands — it is only available as
|
||||
`uv --version`.
|
||||
|
||||
- **Omit Python 3.7 downloads from managed versions
|
||||
([#13022](https://github.com/astral-sh/uv/pull/13022))**
|
||||
|
||||
Python 3.7 is EOL and not formally supported by uv; however, Python 3.7 was previously available
|
||||
for download on a subset of platforms.
|
||||
|
||||
- **Reject non-PEP 751 TOML files in install, compile, and export commands
|
||||
([#13120](https://github.com/astral-sh/uv/pull/13120),
|
||||
[#13119](https://github.com/astral-sh/uv/pull/13119))**
|
||||
|
||||
Previously, uv treated arbitrary `.toml` files passed to commands (e.g.,
|
||||
`uv pip install -r foo.toml` or `uv pip compile -o foo.toml`) as `requirements.txt`-formatted
|
||||
files. Now, uv will error instead. If using PEP 751 lockfiles, use the standardized format for
|
||||
custom names instead, e.g., `pylock.foo.toml`.
|
||||
|
||||
- **Ignore arbitrary Python requests in version files
|
||||
([#12909](https://github.com/astral-sh/uv/pull/12909))**
|
||||
|
||||
uv allows arbitrary strings to be used for Python version requests, in which they are treated as
|
||||
an executable name to search for in the `PATH`. However, using this form of request in
|
||||
`.python-version` files is non-standard and conflicts with `pyenv-virtualenv` which writes
|
||||
environment names to `.python-version` files. In this release, uv will now ignore requests that
|
||||
are arbitrary strings when found in `.python-version` files.
|
||||
|
||||
- **Error on unknown dependency object specifiers
|
||||
([12811](https://github.com/astral-sh/uv/pull/12811))**
|
||||
|
||||
The `[dependency-groups]` entries can include "object specifiers", e.g. `set-phasers-to = ...` in:
|
||||
|
||||
```toml
|
||||
[dependency-groups]
|
||||
foo = ["pyparsing"]
|
||||
bar = [{set-phasers-to = "stun"}]
|
||||
```
|
||||
|
||||
However, the only current spec-compliant object specifier is `include-group`. Previously, uv would
|
||||
ignore unknown object specifiers. Now, uv will error.
|
||||
|
||||
- **Make `--frozen` and `--no-sources` conflicting options
|
||||
([#12671](https://github.com/astral-sh/uv/pull/12671))**
|
||||
|
||||
Using `--no-sources` always requires a new resolution and `--frozen` will always fail when used
|
||||
with it. Now, this conflict is encoded in the CLI options for clarity.
|
||||
|
||||
- **Treat empty `UV_PYTHON_INSTALL_DIR` and `UV_TOOL_DIR` as unset
|
||||
([#12907](https://github.com/astral-sh/uv/pull/12907),
|
||||
[#12905](https://github.com/astral-sh/uv/pull/12905))**
|
||||
|
||||
Previously, these variables were treated as set to the current working directory when set to an
|
||||
empty string. Now, uv will ignore these variables when empty. This matches uv's behavior for other
|
||||
environment variables which configure directories.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Disallow mixing requirements across PyTorch indexes
|
||||
([#13179](https://github.com/astral-sh/uv/pull/13179))
|
||||
- Add optional managed Python archive download cache
|
||||
([#12175](https://github.com/astral-sh/uv/pull/12175))
|
||||
- Add `poetry-core` as a `uv init` build backend option
|
||||
([#12781](https://github.com/astral-sh/uv/pull/12781))
|
||||
- Show tag hints when failing to find a compatible wheel in `pylock.toml`
|
||||
([#13136](https://github.com/astral-sh/uv/pull/13136))
|
||||
- Report Python versions in `pyvenv.cfg` version mismatch
|
||||
([#13027](https://github.com/astral-sh/uv/pull/13027))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on omitted wheel-only packages in `pylock.toml`
|
||||
([#13132](https://github.com/astral-sh/uv/pull/13132))
|
||||
- Fix display name for `uvx --version` ([#13109](https://github.com/astral-sh/uv/pull/13109))
|
||||
- Restore handling of authentication when encountering redirects
|
||||
([#13050](https://github.com/astral-sh/uv/pull/13050))
|
||||
- Respect build options (`--no-binary` et al) in `pylock.toml`
|
||||
([#13134](https://github.com/astral-sh/uv/pull/13134))
|
||||
- Use `upload-time` rather than `upload_time` in `uv.lock`
|
||||
([#13176](https://github.com/astral-sh/uv/pull/13176))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Changed `fish` completions append `>>` to overwrite `>`
|
||||
([#13130](https://github.com/astral-sh/uv/pull/13130))
|
||||
- Add `pylock.toml` mentions where relevant ([#13115](https://github.com/astral-sh/uv/pull/13115))
|
||||
- Add ROCm example to the PyTorch guide ([#13200](https://github.com/astral-sh/uv/pull/13200))
|
||||
- Upgrade PyTorch guide to CUDA 12.8 and PyTorch 2.7
|
||||
([#13199](https://github.com/astral-sh/uv/pull/13199))
|
||||
|
||||
## 0.7.1
|
||||
|
||||
### Enhancement
|
||||
|
||||
- Add support for BLAKE2b-256 ([#13204](https://github.com/astral-sh/uv/pull/13204))
|
||||
|
||||
### Bugfix
|
||||
|
||||
- Revert fix handling of authentication when encountering redirects
|
||||
([#13215](https://github.com/astral-sh/uv/pull/13215))
|
||||
|
||||
## 0.7.2
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve trace log for retryable errors ([#13228](https://github.com/astral-sh/uv/pull/13228))
|
||||
- Use "error" instead of "warning" for self-update message
|
||||
([#13229](https://github.com/astral-sh/uv/pull/13229))
|
||||
- Error when `uv version` is used with project-specific flags but no project is found
|
||||
([#13203](https://github.com/astral-sh/uv/pull/13203))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix incorrect virtual environment invalidation for pre-release Python versions
|
||||
([#13234](https://github.com/astral-sh/uv/pull/13234))
|
||||
- Fix patching of `clang` in managed Python sysconfig
|
||||
([#13237](https://github.com/astral-sh/uv/pull/13237))
|
||||
- Respect `--project` in `uv version` ([#13230](https://github.com/astral-sh/uv/pull/13230))
|
||||
|
||||
## 0.7.3
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--dry-run` support to `uv self update` ([#9829](https://github.com/astral-sh/uv/pull/9829))
|
||||
- Add `--show-with` to `uv tool list` to list packages included by `--with`
|
||||
([#13264](https://github.com/astral-sh/uv/pull/13264))
|
||||
- De-duplicate fetched index URLs ([#13205](https://github.com/astral-sh/uv/pull/13205))
|
||||
- Support more zip compression formats: bzip2, lzma, xz, zstd
|
||||
([#13285](https://github.com/astral-sh/uv/pull/13285))
|
||||
- Add support for downloading GraalPy ([#13172](https://github.com/astral-sh/uv/pull/13172))
|
||||
- Improve error message when a virtual environment Python symlink is broken
|
||||
([#12168](https://github.com/astral-sh/uv/pull/12168))
|
||||
- Use `fs_err` for paths in symlinking errors ([#13303](https://github.com/astral-sh/uv/pull/13303))
|
||||
- Minify and embed managed Python JSON at compile time
|
||||
([#12967](https://github.com/astral-sh/uv/pull/12967))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Make preview default and add configuration docs
|
||||
([#12804](https://github.com/astral-sh/uv/pull/12804))
|
||||
- Build backend: Allow escaping in globs ([#13313](https://github.com/astral-sh/uv/pull/13313))
|
||||
- Build backend: Make builds reproducible across operating systems
|
||||
([#13171](https://github.com/astral-sh/uv/pull/13171))
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `python-downloads-json-url` option for `uv.toml` to configure custom Python installations via
|
||||
JSON URL ([#12974](https://github.com/astral-sh/uv/pull/12974))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check nested IO errors for retries ([#13260](https://github.com/astral-sh/uv/pull/13260))
|
||||
- Accept `musllinux_1_0` as a valid platform tag
|
||||
([#13289](https://github.com/astral-sh/uv/pull/13289))
|
||||
- Fix discovery of pre-release managed Python versions in range requests
|
||||
([#13330](https://github.com/astral-sh/uv/pull/13330))
|
||||
- Respect locked script preferences in `uv run --with`
|
||||
([#13283](https://github.com/astral-sh/uv/pull/13283))
|
||||
- Retry streaming downloads on broken pipe errors
|
||||
([#13281](https://github.com/astral-sh/uv/pull/13281))
|
||||
- Treat already-installed base environment packages as preferences in `uv run --with`
|
||||
([#13284](https://github.com/astral-sh/uv/pull/13284))
|
||||
- Avoid enumerating sources in errors for path Python requests
|
||||
([#13335](https://github.com/astral-sh/uv/pull/13335))
|
||||
- Avoid re-creating virtual environment with `--no-sync`
|
||||
([#13287](https://github.com/astral-sh/uv/pull/13287))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove outdated description of index strategy
|
||||
([#13326](https://github.com/astral-sh/uv/pull/13326))
|
||||
- Update "Viewing the version" docs ([#13241](https://github.com/astral-sh/uv/pull/13241))
|
||||
|
||||
## 0.7.4
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add more context to external errors ([#13351](https://github.com/astral-sh/uv/pull/13351))
|
||||
- Align indentation of long arguments ([#13394](https://github.com/astral-sh/uv/pull/13394))
|
||||
- Preserve order of dependencies which are sorted naively
|
||||
([#13334](https://github.com/astral-sh/uv/pull/13334))
|
||||
- Align progress bars by largest name length ([#13266](https://github.com/astral-sh/uv/pull/13266))
|
||||
- Reinstall local packages in `uv add` ([#13462](https://github.com/astral-sh/uv/pull/13462))
|
||||
- Rename `--raw-sources` to `--raw` ([#13348](https://github.com/astral-sh/uv/pull/13348))
|
||||
- Show 'Downgraded' when `self update` is used to install an older version
|
||||
([#13340](https://github.com/astral-sh/uv/pull/13340))
|
||||
- Suggest `uv self update` if required uv version is newer
|
||||
([#13305](https://github.com/astral-sh/uv/pull/13305))
|
||||
- Add 3.14 beta images to uv Docker images ([#13390](https://github.com/astral-sh/uv/pull/13390))
|
||||
- Add comma after "i.e." in Conda environment error
|
||||
([#13423](https://github.com/astral-sh/uv/pull/13423))
|
||||
- Be more precise in unpinned packages warning
|
||||
([#13426](https://github.com/astral-sh/uv/pull/13426))
|
||||
- Fix detection of sorted dependencies when include-group is used
|
||||
([#13354](https://github.com/astral-sh/uv/pull/13354))
|
||||
- Fix display of HTTP responses in trace logs for retry of errors
|
||||
([#13339](https://github.com/astral-sh/uv/pull/13339))
|
||||
- Log skip reasons during Python installation key interpreter match checks
|
||||
([#13472](https://github.com/astral-sh/uv/pull/13472))
|
||||
- Redact credentials when displaying URLs ([#13333](https://github.com/astral-sh/uv/pull/13333))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid erroring on `pylock.toml` dependency entries
|
||||
([#13384](https://github.com/astral-sh/uv/pull/13384))
|
||||
- Avoid panics for cannot-be-a-base URLs ([#13406](https://github.com/astral-sh/uv/pull/13406))
|
||||
- Ensure cached realm credentials are applied if no password is found for index URL
|
||||
([#13463](https://github.com/astral-sh/uv/pull/13463))
|
||||
- Fix `.tgz` parsing to respect true extension
|
||||
([#13382](https://github.com/astral-sh/uv/pull/13382))
|
||||
- Fix double self-dependency ([#13366](https://github.com/astral-sh/uv/pull/13366))
|
||||
- Reject `pylock.toml` in `uv add -r` ([#13421](https://github.com/astral-sh/uv/pull/13421))
|
||||
- Retain dot-separated wheel tags during cache prune
|
||||
([#13379](https://github.com/astral-sh/uv/pull/13379))
|
||||
- Retain trailing comments after PEP 723 metadata block
|
||||
([#13460](https://github.com/astral-sh/uv/pull/13460))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "export" instead of "install" in `uv export` arguments
|
||||
([#13430](https://github.com/astral-sh/uv/pull/13430))
|
||||
- Remove extra newline ([#13461](https://github.com/astral-sh/uv/pull/13461))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Build backend: Normalize glob paths ([#13465](https://github.com/astral-sh/uv/pull/13465))
|
||||
|
||||
## 0.7.5
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Support case-sensitive module discovery in the build backend
|
||||
([#13468](https://github.com/astral-sh/uv/pull/13468))
|
||||
- Bump Simple cache bucket to v16 ([#13498](https://github.com/astral-sh/uv/pull/13498))
|
||||
- Don't error when the script is too short for the buffer
|
||||
([#13488](https://github.com/astral-sh/uv/pull/13488))
|
||||
- Add missing word in "script not supported" error
|
||||
([#13483](https://github.com/astral-sh/uv/pull/13483))
|
||||
|
||||
## 0.7.6
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- Add free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250517)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve compatibility of `VIRTUAL_ENV_PROMPT` value
|
||||
([#13501](https://github.com/astral-sh/uv/pull/13501))
|
||||
- Bump MSRV to 1.85 and Edition 2024 ([#13516](https://github.com/astral-sh/uv/pull/13516))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Respect default extras in uv remove ([#13380](https://github.com/astral-sh/uv/pull/13380))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Fix PowerShell code blocks ([#13511](https://github.com/astral-sh/uv/pull/13511))
|
||||
|
||||
## 0.7.7
|
||||
|
||||
### Python
|
||||
|
||||
- Work around third-party packages that (incorrectly) assume the interpreter is dynamically linking
|
||||
libpython
|
||||
- Allow the experimental JIT to be enabled at runtime on Python 3.13 and 3.14 on macOS on aarch64
|
||||
aka Apple Silicon
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250521)
|
||||
for more details.
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make `uv version` lock and sync ([#13317](https://github.com/astral-sh/uv/pull/13317))
|
||||
- Fix references to `ldd` in diagnostics to correctly refer to `ld.so`
|
||||
([#13552](https://github.com/astral-sh/uv/pull/13552))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Clarify adding SSH Git dependencies ([#13534](https://github.com/astral-sh/uv/pull/13534))
|
||||
|
||||
## 0.7.8
|
||||
|
||||
### Python
|
||||
|
||||
We are reverting most of our Python changes from `uv 0.7.6` and `uv 0.7.7` due to a miscompilation
|
||||
that makes the Python interpreter behave incorrectly, resulting in spurious type-errors involving
|
||||
str. This issue seems to be isolated to x86_64 Linux, and affected at least Python 3.12, 3.13, and
|
||||
3.14.
|
||||
|
||||
The following changes that were introduced in those versions of uv are temporarily being reverted
|
||||
while we test and deploy a proper fix for the miscompilation:
|
||||
|
||||
- Add Python 3.14 on musl
|
||||
- free-threaded Python on musl
|
||||
- Add Python 3.14.0a7
|
||||
- Statically link `libpython` into the interpreter on Linux for a significant performance boost
|
||||
|
||||
See [the issue for details](https://github.com/astral-sh/uv/issues/13610).
|
||||
|
||||
### Documentation
|
||||
|
||||
- Remove misleading line in pin documentation ([#13611](https://github.com/astral-sh/uv/pull/13611))
|
||||
|
||||
## 0.7.9
|
||||
|
||||
### Python
|
||||
|
||||
The changes reverted in [0.7.8](#078) have been restored.
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250529)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Improve obfuscation of credentials in URLs ([#13560](https://github.com/astral-sh/uv/pull/13560))
|
||||
- Allow running non-default Python implementations via `uvx`
|
||||
([#13583](https://github.com/astral-sh/uv/pull/13583))
|
||||
- Add `uvw` as alias for `uv` without console window on Windows
|
||||
([#11786](https://github.com/astral-sh/uv/pull/11786))
|
||||
- Allow discovery of x86-64 managed Python builds on macOS
|
||||
([#13722](https://github.com/astral-sh/uv/pull/13722))
|
||||
- Differentiate between implicit vs explicit architecture requests
|
||||
([#13723](https://github.com/astral-sh/uv/pull/13723))
|
||||
- Implement ordering for Python architectures to prefer native installations
|
||||
([#13709](https://github.com/astral-sh/uv/pull/13709))
|
||||
- Only show the first match per platform (and architecture) by default in `uv python list`
|
||||
([#13721](https://github.com/astral-sh/uv/pull/13721))
|
||||
- Write the path of the parent environment to an `extends-environment` key in the `pyvenv.cfg` file
|
||||
of an ephemeral environment ([#13598](https://github.com/astral-sh/uv/pull/13598))
|
||||
- Improve the error message when libc cannot be found, e.g., when using the distroless containers
|
||||
([#13549](https://github.com/astral-sh/uv/pull/13549))
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid rendering info log level ([#13642](https://github.com/astral-sh/uv/pull/13642))
|
||||
- Improve performance of `uv-python` crate's manylinux submodule
|
||||
([#11131](https://github.com/astral-sh/uv/pull/11131))
|
||||
- Optimize `Version` display ([#13643](https://github.com/astral-sh/uv/pull/13643))
|
||||
- Reduce number of reference-checks for `uv cache clean`
|
||||
([#13669](https://github.com/astral-sh/uv/pull/13669))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid reinstalling dependency group members with `--all-packages`
|
||||
([#13678](https://github.com/astral-sh/uv/pull/13678))
|
||||
- Don't fail direct URL hash checking with dependency metadata
|
||||
([#13736](https://github.com/astral-sh/uv/pull/13736))
|
||||
- Exit early on `self update` if global `--offline` is set
|
||||
([#13663](https://github.com/astral-sh/uv/pull/13663))
|
||||
- Fix cases where the uv lock is incorrectly marked as out of date
|
||||
([#13635](https://github.com/astral-sh/uv/pull/13635))
|
||||
- Include pre-release versions in `uv python install --reinstall`
|
||||
([#13645](https://github.com/astral-sh/uv/pull/13645))
|
||||
- Set `LC_ALL=C` for git when checking git worktree
|
||||
([#13637](https://github.com/astral-sh/uv/pull/13637))
|
||||
- Avoid rejecting Windows paths for remote Python download JSON targets
|
||||
([#13625](https://github.com/astral-sh/uv/pull/13625))
|
||||
|
||||
### Preview
|
||||
|
||||
- Add `uv add --bounds` to configure version constraints
|
||||
([#12946](https://github.com/astral-sh/uv/pull/12946))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add documentation about Python versions to Tools concept page
|
||||
([#7673](https://github.com/astral-sh/uv/pull/7673))
|
||||
- Add example of enabling Dependabot ([#13692](https://github.com/astral-sh/uv/pull/13692))
|
||||
- Fix `exclude-newer` date format for persistent configuration files
|
||||
([#13706](https://github.com/astral-sh/uv/pull/13706))
|
||||
- Quote versions variables in GitLab documentation
|
||||
([#13679](https://github.com/astral-sh/uv/pull/13679))
|
||||
- Update Dependabot support status ([#13690](https://github.com/astral-sh/uv/pull/13690))
|
||||
- Explicitly specify to add a new repo entry to the repos list item in the `.pre-commit-config.yaml`
|
||||
([#10243](https://github.com/astral-sh/uv/pull/10243))
|
||||
- Add integration with marimo guide ([#13691](https://github.com/astral-sh/uv/pull/13691))
|
||||
- Add pronunciation to README ([#5336](https://github.com/astral-sh/uv/pull/5336))
|
||||
|
||||
## 0.7.10
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--show-extras` to `uv tool list` ([#13783](https://github.com/astral-sh/uv/pull/13783))
|
||||
- Add dynamically generated sysconfig replacement mappings
|
||||
([#13441](https://github.com/astral-sh/uv/pull/13441))
|
||||
- Add data locations to install wheel logs ([#13797](https://github.com/astral-sh/uv/pull/13797))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Avoid redaction of placeholder `git` username when using SSH authentication
|
||||
([#13799](https://github.com/astral-sh/uv/pull/13799))
|
||||
- Propagate credentials to files on devpi indexes ending in `/+simple`
|
||||
([#13743](https://github.com/astral-sh/uv/pull/13743))
|
||||
- Restore retention of credentials for direct URLs in `uv export`
|
||||
([#13809](https://github.com/astral-sh/uv/pull/13809))
|
||||
|
||||
## 0.7.11
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b1
|
||||
- Add Python 3.13.4
|
||||
- Add Python 3.12.11
|
||||
- Add Python 3.11.13
|
||||
- Add Python 3.10.18
|
||||
- Add Python 3.9.23
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add Pyodide support ([#12731](https://github.com/astral-sh/uv/pull/12731))
|
||||
- Better error message for version specifier with missing operator
|
||||
([#13803](https://github.com/astral-sh/uv/pull/13803))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Downgrade `reqwest` and `hyper-util` to resolve connection reset errors over IPv6
|
||||
([#13835](https://github.com/astral-sh/uv/pull/13835))
|
||||
- Prefer `uv`'s binary's version when checking if it's up to date
|
||||
([#13840](https://github.com/astral-sh/uv/pull/13840))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Use "terminal driver" instead of "shell" in `SIGINT` docs
|
||||
([#13787](https://github.com/astral-sh/uv/pull/13787))
|
||||
|
||||
## 0.7.12
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `uv python pin --rm` to remove `.python-version` pins
|
||||
([#13860](https://github.com/astral-sh/uv/pull/13860))
|
||||
- Don't hint at versions removed by `excluded-newer`
|
||||
([#13884](https://github.com/astral-sh/uv/pull/13884))
|
||||
- Add hint to use `tool.uv.environments` on resolution error
|
||||
([#13455](https://github.com/astral-sh/uv/pull/13455))
|
||||
- Add hint to use `tool.uv.required-environments` on resolution error
|
||||
([#13575](https://github.com/astral-sh/uv/pull/13575))
|
||||
- Improve `python pin` error messages ([#13862](https://github.com/astral-sh/uv/pull/13862))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Lock environments during `uv sync`, `uv add` and `uv remove` to prevent race conditions
|
||||
([#13869](https://github.com/astral-sh/uv/pull/13869))
|
||||
- Add `--no-editable` to `uv export` for `pylock.toml`
|
||||
([#13852](https://github.com/astral-sh/uv/pull/13852))
|
||||
|
||||
### Documentation
|
||||
|
||||
- List `.gitignore` in project init files ([#13855](https://github.com/astral-sh/uv/pull/13855))
|
||||
- Move the pip interface documentation into the concepts section
|
||||
([#13841](https://github.com/astral-sh/uv/pull/13841))
|
||||
- Remove the configuration section in favor of concepts / reference
|
||||
([#13842](https://github.com/astral-sh/uv/pull/13842))
|
||||
- Update Git and GitHub Actions docs to mention `gh auth login`
|
||||
([#13850](https://github.com/astral-sh/uv/pull/13850))
|
||||
|
||||
### Preview
|
||||
|
||||
- Fix directory glob traversal fallback preventing exclusion of all files
|
||||
([#13882](https://github.com/astral-sh/uv/pull/13882))
|
||||
|
||||
## 0.7.13
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b2
|
||||
- Add Python 3.13.5
|
||||
- Fix stability of `uuid.getnode` on 3.13
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250612)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Download versions in `uv python pin` if not found
|
||||
([#13946](https://github.com/astral-sh/uv/pull/13946))
|
||||
- Use TTY detection to determine if SIGINT forwarding is enabled
|
||||
([#13925](https://github.com/astral-sh/uv/pull/13925))
|
||||
- Avoid fetching an exact, cached Git commit, even if it isn't locked
|
||||
([#13748](https://github.com/astral-sh/uv/pull/13748))
|
||||
- Add `zstd` and `deflate` to `Accept-Encoding`
|
||||
([#13982](https://github.com/astral-sh/uv/pull/13982))
|
||||
- Build binaries for riscv64 ([#12688](https://github.com/astral-sh/uv/pull/12688))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Check if relative URL is valid directory before treating as index
|
||||
([#13917](https://github.com/astral-sh/uv/pull/13917))
|
||||
- Ignore Python discovery errors during `uv python pin`
|
||||
([#13944](https://github.com/astral-sh/uv/pull/13944))
|
||||
- Do not allow `uv add --group ... --script` ([#13997](https://github.com/astral-sh/uv/pull/13997))
|
||||
|
||||
### Preview changes
|
||||
|
||||
- Build backend: Support namespace packages ([#13833](https://github.com/astral-sh/uv/pull/13833))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add 3.14 to the supported platform reference
|
||||
([#13990](https://github.com/astral-sh/uv/pull/13990))
|
||||
- Add an `llms.txt` to uv ([#13929](https://github.com/astral-sh/uv/pull/13929))
|
||||
- Add supported macOS version to the platform reference
|
||||
([#13993](https://github.com/astral-sh/uv/pull/13993))
|
||||
- Update platform support reference to include Python implementation list
|
||||
([#13991](https://github.com/astral-sh/uv/pull/13991))
|
||||
- Update pytorch.md ([#13899](https://github.com/astral-sh/uv/pull/13899))
|
||||
- Update the CLI help and reference to include references to the Python bin directory
|
||||
([#13978](https://github.com/astral-sh/uv/pull/13978))
|
||||
|
||||
## 0.7.14
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add XPU to `--torch-backend` ([#14172](https://github.com/astral-sh/uv/pull/14172))
|
||||
- Add ROCm backends to `--torch-backend` ([#14120](https://github.com/astral-sh/uv/pull/14120))
|
||||
- Remove preview label from `--torch-backend` ([#14119](https://github.com/astral-sh/uv/pull/14119))
|
||||
- Add `[tool.uv.dependency-groups].mygroup.requires-python`
|
||||
([#13735](https://github.com/astral-sh/uv/pull/13735))
|
||||
- Add auto-detection for AMD GPUs ([#14176](https://github.com/astral-sh/uv/pull/14176))
|
||||
- Show retries for HTTP status code errors ([#13897](https://github.com/astral-sh/uv/pull/13897))
|
||||
- Support transparent Python patch version upgrades
|
||||
([#13954](https://github.com/astral-sh/uv/pull/13954))
|
||||
- Warn on empty index directory ([#13940](https://github.com/astral-sh/uv/pull/13940))
|
||||
- Publish to DockerHub ([#14088](https://github.com/astral-sh/uv/pull/14088))
|
||||
|
||||
### Performance
|
||||
|
||||
- Make cold resolves about 10% faster ([#14035](https://github.com/astral-sh/uv/pull/14035))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Don't use walrus operator in interpreter query script
|
||||
([#14108](https://github.com/astral-sh/uv/pull/14108))
|
||||
- Fix handling of changes to `requires-python`
|
||||
([#14076](https://github.com/astral-sh/uv/pull/14076))
|
||||
- Fix implied `platform_machine` marker for `win_amd64` platform tag
|
||||
([#14041](https://github.com/astral-sh/uv/pull/14041))
|
||||
- Only update existing symlink directories on preview uninstall
|
||||
([#14179](https://github.com/astral-sh/uv/pull/14179))
|
||||
- Serialize Python requests for tools as canonicalized strings
|
||||
([#14109](https://github.com/astral-sh/uv/pull/14109))
|
||||
- Support netrc and same-origin credential propagation on index redirects
|
||||
([#14126](https://github.com/astral-sh/uv/pull/14126))
|
||||
- Support reading `dependency-groups` from pyproject.tomls with no `[project]`
|
||||
([#13742](https://github.com/astral-sh/uv/pull/13742))
|
||||
- Handle an existing shebang in `uv init --script`
|
||||
([#14141](https://github.com/astral-sh/uv/pull/14141))
|
||||
- Prevent concurrent updates of the environment in `uv run`
|
||||
([#14153](https://github.com/astral-sh/uv/pull/14153))
|
||||
- Filter managed Python distributions by platform before querying when included in request
|
||||
([#13936](https://github.com/astral-sh/uv/pull/13936))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Replace cuda124 with cuda128 ([#14168](https://github.com/astral-sh/uv/pull/14168))
|
||||
- Document the way member sources shadow workspace sources
|
||||
([#14136](https://github.com/astral-sh/uv/pull/14136))
|
||||
- Sync documented PyTorch integration index for CUDA and ROCm versions from PyTorch website
|
||||
([#14100](https://github.com/astral-sh/uv/pull/14100))
|
||||
|
||||
## 0.7.15
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Consistently use `Ordering::Relaxed` for standalone atomic use cases
|
||||
([#14190](https://github.com/astral-sh/uv/pull/14190))
|
||||
- Warn on ambiguous relative paths for `--index`
|
||||
([#14152](https://github.com/astral-sh/uv/pull/14152))
|
||||
- Skip GitHub fast path when rate-limited ([#13033](https://github.com/astral-sh/uv/pull/13033))
|
||||
- Preserve newlines in `schema.json` descriptions
|
||||
([#13693](https://github.com/astral-sh/uv/pull/13693))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Add check for using minor version link when creating a venv on Windows
|
||||
([#14252](https://github.com/astral-sh/uv/pull/14252))
|
||||
- Strip query parameters when parsing source URL
|
||||
([#14224](https://github.com/astral-sh/uv/pull/14224))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a link to PyPI FAQ to clarify what per-project token is
|
||||
([#14242](https://github.com/astral-sh/uv/pull/14242))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Allow symlinks in the build backend ([#14212](https://github.com/astral-sh/uv/pull/14212))
|
||||
|
||||
## 0.7.16
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b3
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250626)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Include path or URL when failing to convert in lockfile
|
||||
([#14292](https://github.com/astral-sh/uv/pull/14292))
|
||||
- Warn when `~=` is used as a Python version specifier without a patch version
|
||||
([#14008](https://github.com/astral-sh/uv/pull/14008))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ensure preview default Python installs are upgradeable
|
||||
([#14261](https://github.com/astral-sh/uv/pull/14261))
|
||||
|
||||
### Performance
|
||||
|
||||
- Share workspace cache between lock and sync operations
|
||||
([#14321](https://github.com/astral-sh/uv/pull/14321))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Allow local indexes to reference remote files
|
||||
([#14294](https://github.com/astral-sh/uv/pull/14294))
|
||||
- Avoid rendering desugared prefix matches in error messages
|
||||
([#14195](https://github.com/astral-sh/uv/pull/14195))
|
||||
- Avoid using path URL for workspace Git dependencies in `requirements.txt`
|
||||
([#14288](https://github.com/astral-sh/uv/pull/14288))
|
||||
- Normalize index URLs to remove trailing slash
|
||||
([#14245](https://github.com/astral-sh/uv/pull/14245))
|
||||
- Respect URL-encoded credentials in redirect location
|
||||
([#14315](https://github.com/astral-sh/uv/pull/14315))
|
||||
- Lock the source tree when running setuptools, to protect concurrent builds
|
||||
([#14174](https://github.com/astral-sh/uv/pull/14174))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Note that GCP Artifact Registry download URLs must have `/simple` component
|
||||
([#14251](https://github.com/astral-sh/uv/pull/14251))
|
||||
|
||||
## 0.7.17
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Apply build constraints when resolving `--with` dependencies
|
||||
([#14340](https://github.com/astral-sh/uv/pull/14340))
|
||||
- Drop trailing slashes when converting index URL from URL
|
||||
([#14346](https://github.com/astral-sh/uv/pull/14346))
|
||||
- Ignore `UV_PYTHON_CACHE_DIR` when empty ([#14336](https://github.com/astral-sh/uv/pull/14336))
|
||||
- Fix error message ordering for `pyvenv.cfg` version conflict
|
||||
([#14329](https://github.com/astral-sh/uv/pull/14329))
|
||||
|
||||
## 0.7.18
|
||||
|
||||
### Python
|
||||
|
||||
- Added arm64 Windows Python 3.11, 3.12, 3.13, and 3.14 These are not downloaded by default, since
|
||||
x86-64 Python has broader ecosystem support on Windows. However, they can be requested with
|
||||
`cpython-<version>-windows-aarch64`.
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250630)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Keep track of retries in `ManagedPythonDownload::fetch_with_retry`
|
||||
([#14378](https://github.com/astral-sh/uv/pull/14378))
|
||||
- Reuse build (virtual) environments across resolution and installation
|
||||
([#14338](https://github.com/astral-sh/uv/pull/14338))
|
||||
- Improve trace message for cached Python interpreter query
|
||||
([#14328](https://github.com/astral-sh/uv/pull/14328))
|
||||
- Use parsed URLs for conflicting URL error message
|
||||
([#14380](https://github.com/astral-sh/uv/pull/14380))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Ignore invalid build backend settings when not building
|
||||
([#14372](https://github.com/astral-sh/uv/pull/14372))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix equals-star and tilde-equals with `python_version` and `python_full_version`
|
||||
([#14271](https://github.com/astral-sh/uv/pull/14271))
|
||||
- Include the canonical path in the interpreter query cache key
|
||||
([#14331](https://github.com/astral-sh/uv/pull/14331))
|
||||
- Only drop build directories on program exit ([#14304](https://github.com/astral-sh/uv/pull/14304))
|
||||
- Error instead of panic on conflict between global and subcommand flags
|
||||
([#14368](https://github.com/astral-sh/uv/pull/14368))
|
||||
- Consistently normalize trailing slashes on URLs with no path segments
|
||||
([#14349](https://github.com/astral-sh/uv/pull/14349))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add instructions for publishing to JFrog's Artifactory
|
||||
([#14253](https://github.com/astral-sh/uv/pull/14253))
|
||||
- Edits to the build backend documentation ([#14376](https://github.com/astral-sh/uv/pull/14376))
|
||||
|
||||
## 0.7.19
|
||||
|
||||
The **[uv build backend](https://docs.astral.sh/uv/concepts/build-backend/) is now stable**, and
|
||||
considered ready for production use.
|
||||
|
||||
The uv build backend is a great choice for pure Python projects. It has reasonable defaults, with
|
||||
the goal of requiring zero configuration for most users, but provides flexible configuration to
|
||||
accommodate most Python project structures. It integrates tightly with uv, to improve messaging and
|
||||
user experience. It validates project metadata and structures, preventing common mistakes. And,
|
||||
finally, it's very fast — `uv sync` on a new project (from `uv init`) is 10-30x faster than with
|
||||
other build backends.
|
||||
|
||||
To use uv as a build backend in an existing project, add `uv_build` to the `[build-system]` section
|
||||
in your `pyproject.toml`:
|
||||
|
||||
```toml
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.7.19,<0.8.0"]
|
||||
build-backend = "uv_build"
|
||||
```
|
||||
|
||||
In a future release, it will replace `hatchling` as the default in `uv init`. As before, uv will
|
||||
remain compatible with all standards-compliant build backends.
|
||||
|
||||
### Python
|
||||
|
||||
- Add PGO distributions of Python for aarch64 Linux, which are more optimized for better performance
|
||||
|
||||
See the
|
||||
[python-build-standalone release](https://github.com/astral-sh/python-build-standalone/releases/tag/20250702)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Ignore Python patch version for `--universal` pip compile
|
||||
([#14405](https://github.com/astral-sh/uv/pull/14405))
|
||||
- Update the tilde version specifier warning to include more context
|
||||
([#14335](https://github.com/astral-sh/uv/pull/14335))
|
||||
- Clarify behavior and hint on tool install when no executables are available
|
||||
([#14423](https://github.com/astral-sh/uv/pull/14423))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Make project and interpreter lock acquisition non-fatal
|
||||
([#14404](https://github.com/astral-sh/uv/pull/14404))
|
||||
- Includes `sys.prefix` in cached environment keys to avoid `--with` collisions across projects
|
||||
([#14403](https://github.com/astral-sh/uv/pull/14403))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a migration guide from pip to uv projects
|
||||
([#12382](https://github.com/astral-sh/uv/pull/12382))
|
||||
|
||||
## 0.7.20
|
||||
|
||||
### Python
|
||||
|
||||
- Add Python 3.14.0b4
|
||||
- Add zstd support to Python 3.14 on Unix (it already was available on Windows)
|
||||
- Add PyPy 7.3.20 (for Python 3.11.13)
|
||||
|
||||
See the [PyPy](https://pypy.org/posts/2025/07/pypy-v7320-release.html) and
|
||||
[`python-build-standalone`](https://github.com/astral-sh/python-build-standalone/releases/tag/20250708)
|
||||
release notes for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--workspace` flag to `uv add` ([#14496](https://github.com/astral-sh/uv/pull/14496))
|
||||
- Add auto-detection for Intel GPUs ([#14386](https://github.com/astral-sh/uv/pull/14386))
|
||||
- Drop trailing arguments when writing shebangs
|
||||
([#14519](https://github.com/astral-sh/uv/pull/14519))
|
||||
- Add debug message when skipping Python downloads
|
||||
([#14509](https://github.com/astral-sh/uv/pull/14509))
|
||||
- Add support for declaring multiple modules in namespace packages
|
||||
([#14460](https://github.com/astral-sh/uv/pull/14460))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Revert normalization of trailing slashes on index URLs
|
||||
([#14511](https://github.com/astral-sh/uv/pull/14511))
|
||||
- Fix forced resolution with all extras in `uv version`
|
||||
([#14434](https://github.com/astral-sh/uv/pull/14434))
|
||||
- Fix handling of pre-releases in preferences ([#14498](https://github.com/astral-sh/uv/pull/14498))
|
||||
- Remove transparent variants in `uv-extract` to enable retries
|
||||
([#14450](https://github.com/astral-sh/uv/pull/14450))
|
||||
|
||||
### Rust API
|
||||
|
||||
- Add method to get packages involved in a `NoSolutionError`
|
||||
([#14457](https://github.com/astral-sh/uv/pull/14457))
|
||||
- Make `ErrorTree` for `NoSolutionError` public
|
||||
([#14444](https://github.com/astral-sh/uv/pull/14444))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Finish incomplete sentence in pip migration guide
|
||||
([#14432](https://github.com/astral-sh/uv/pull/14432))
|
||||
- Remove `cache-dependency-glob` examples for `setup-uv`
|
||||
([#14493](https://github.com/astral-sh/uv/pull/14493))
|
||||
- Remove `uv pip sync` suggestion with `pyproject.toml`
|
||||
([#14510](https://github.com/astral-sh/uv/pull/14510))
|
||||
- Update documentation for GitHub to use `setup-uv@v6`
|
||||
([#14490](https://github.com/astral-sh/uv/pull/14490))
|
||||
|
||||
## 0.7.21
|
||||
|
||||
### Python
|
||||
|
||||
- Restore the SQLite `fts4`, `fts5`, `rtree`, and `geopoly` extensions on macOS and Linux
|
||||
|
||||
See the
|
||||
[`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250712)
|
||||
for more details.
|
||||
|
||||
### Enhancements
|
||||
|
||||
- Add `--python-platform` to `uv sync` ([#14320](https://github.com/astral-sh/uv/pull/14320))
|
||||
- Support pre-releases in `uv version --bump` ([#13578](https://github.com/astral-sh/uv/pull/13578))
|
||||
- Add `-w` shorthand for `--with` ([#14530](https://github.com/astral-sh/uv/pull/14530))
|
||||
- Add an exception handler on Windows to display information on crash
|
||||
([#14582](https://github.com/astral-sh/uv/pull/14582))
|
||||
- Add hint when Python downloads are disabled ([#14522](https://github.com/astral-sh/uv/pull/14522))
|
||||
- Add `UV_HTTP_RETRIES` to customize retry counts
|
||||
([#14544](https://github.com/astral-sh/uv/pull/14544))
|
||||
- Follow leaf symlinks matched by globs in `cache-key`
|
||||
([#13438](https://github.com/astral-sh/uv/pull/13438))
|
||||
- Support parent path components (`..`) in globs in `cache-key`
|
||||
([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
- Improve `cache-key` performance ([#13469](https://github.com/astral-sh/uv/pull/13469))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Add `uv sync --output-format json` ([#13689](https://github.com/astral-sh/uv/pull/13689))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Do not re-resolve with a new Python version in `uv tool` if it is incompatible with `--python`
|
||||
([#14606](https://github.com/astral-sh/uv/pull/14606))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Document how to nest dependency groups with `include-group`
|
||||
([#14539](https://github.com/astral-sh/uv/pull/14539))
|
||||
- Fix repeated word in Pyodide doc ([#14554](https://github.com/astral-sh/uv/pull/14554))
|
||||
- Update CONTRIBUTING.md with instructions to format Markdown files via Docker
|
||||
([#14246](https://github.com/astral-sh/uv/pull/14246))
|
||||
- Fix version number for `setup-python` ([#14533](https://github.com/astral-sh/uv/pull/14533))
|
||||
|
||||
## 0.7.22
|
||||
|
||||
### Python
|
||||
|
||||
- Upgrade GraalPy to 24.2.2
|
||||
|
||||
See the [GraalPy release notes](https://github.com/oracle/graalpython/releases/tag/graal-24.2.2) for
|
||||
more details.
|
||||
|
||||
### Configuration
|
||||
|
||||
- Add `UV_COMPILE_BYTECODE_TIMEOUT` environment variable
|
||||
([#14369](https://github.com/astral-sh/uv/pull/14369))
|
||||
- Allow users to override index `cache-control` headers
|
||||
([#14620](https://github.com/astral-sh/uv/pull/14620))
|
||||
- Add `UV_LIBC` to override libc selection in multi-libc environment
|
||||
([#14646](https://github.com/astral-sh/uv/pull/14646))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix `--all-arches` when paired with `--only-downloads`
|
||||
([#14629](https://github.com/astral-sh/uv/pull/14629))
|
||||
- Skip Windows Python interpreters that return a broken MSIX package code
|
||||
([#14636](https://github.com/astral-sh/uv/pull/14636))
|
||||
- Warn on invalid `uv.toml` when provided via direct path
|
||||
([#14653](https://github.com/astral-sh/uv/pull/14653))
|
||||
- Improve async signal safety in Windows exception handler
|
||||
([#14619](https://github.com/astral-sh/uv/pull/14619))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Mention the `revision` in the lockfile versioning doc
|
||||
([#14634](https://github.com/astral-sh/uv/pull/14634))
|
||||
- Move "Conflicting dependencies" to the "Resolution" page
|
||||
([#14633](https://github.com/astral-sh/uv/pull/14633))
|
||||
- Rename "Dependency specifiers" section to exclude PEP 508 reference
|
||||
([#14631](https://github.com/astral-sh/uv/pull/14631))
|
||||
- Suggest `uv cache clean` prior to `--reinstall`
|
||||
([#14659](https://github.com/astral-sh/uv/pull/14659))
|
||||
|
||||
### Preview features
|
||||
|
||||
- Make preview Python registration on Windows non-fatal
|
||||
([#14614](https://github.com/astral-sh/uv/pull/14614))
|
||||
- Update preview installation of Python executables to be non-fatal
|
||||
([#14612](https://github.com/astral-sh/uv/pull/14612))
|
||||
- Add `uv python update-shell` ([#14627](https://github.com/astral-sh/uv/pull/14627))
|
||||
1108
changelogs/0.8.x.md
1108
changelogs/0.8.x.md
File diff suppressed because it is too large
Load Diff
68
clippy.toml
68
clippy.toml
|
|
@ -1,70 +1,4 @@
|
|||
doc-valid-idents = [
|
||||
"PyPI",
|
||||
"PubGrub",
|
||||
"PyPy",
|
||||
"CPython",
|
||||
"GraalPy",
|
||||
"ReFS",
|
||||
"PyTorch",
|
||||
"ROCm",
|
||||
"XPU",
|
||||
"PowerShell",
|
||||
".." # Include the defaults
|
||||
]
|
||||
|
||||
disallowed-types = [
|
||||
"std::fs::DirEntry",
|
||||
"std::fs::File",
|
||||
"std::fs::OpenOptions",
|
||||
"std::fs::ReadDir",
|
||||
"tokio::fs::DirBuilder",
|
||||
"tokio::fs::DirEntry",
|
||||
"tokio::fs::File",
|
||||
"tokio::fs::OpenOptions",
|
||||
"tokio::fs::ReadDir",
|
||||
]
|
||||
|
||||
disallowed-methods = [
|
||||
"std::fs::canonicalize",
|
||||
"std::fs::copy",
|
||||
"std::fs::create_dir",
|
||||
"std::fs::create_dir_all",
|
||||
"std::fs::hard_link",
|
||||
"std::fs::metadata",
|
||||
"std::fs::read",
|
||||
"std::fs::read_dir",
|
||||
"std::fs::read_link",
|
||||
"std::fs::read_to_string",
|
||||
"std::fs::remove_dir",
|
||||
"std::fs::remove_dir_all",
|
||||
"std::fs::remove_file",
|
||||
"std::fs::rename",
|
||||
"std::fs::set_permissions",
|
||||
"std::fs::soft_link",
|
||||
"std::fs::symlink_metadata",
|
||||
"std::fs::write",
|
||||
"tokio::fs::canonicalize",
|
||||
"tokio::fs::copy",
|
||||
"tokio::fs::create_dir",
|
||||
"tokio::fs::create_dir_all",
|
||||
"tokio::fs::hard_link",
|
||||
"tokio::fs::metadata",
|
||||
"tokio::fs::read",
|
||||
"tokio::fs::read_dir",
|
||||
"tokio::fs::read_link",
|
||||
"tokio::fs::read_to_string",
|
||||
"tokio::fs::remove_dir",
|
||||
"tokio::fs::remove_dir_all",
|
||||
"tokio::fs::remove_file",
|
||||
"tokio::fs::rename",
|
||||
"tokio::fs::set_permissions",
|
||||
"tokio::fs::symlink_metadata",
|
||||
"tokio::fs::try_exists",
|
||||
"tokio::fs::write",
|
||||
{ path = "std::os::unix::fs::symlink", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_dir", allow-invalid = true },
|
||||
{ path = "std::os::windows::fs::symlink_file", allow-invalid = true },
|
||||
{ path = "tokio::fs::symlink", allow-invalid = true },
|
||||
{ path = "tokio::fs::symlink_dir", allow-invalid = true },
|
||||
{ path = "tokio::fs::symlink_file", allow-invalid = true },
|
||||
]
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,52 +1,57 @@
|
|||
# Crates
|
||||
|
||||
## [uv-bench](./uv-bench)
|
||||
## [bench](./bench)
|
||||
|
||||
Functionality for benchmarking uv.
|
||||
|
||||
## [uv-cache-key](./uv-cache-key)
|
||||
## [cache-key](./cache-key)
|
||||
|
||||
Generic functionality for caching paths, URLs, and other resources across platforms.
|
||||
|
||||
## [uv-distribution-filename](./uv-distribution-filename)
|
||||
## [distribution-filename](./distribution-filename)
|
||||
|
||||
Parse built distribution (wheel) and source distribution (sdist) filenames to extract structured
|
||||
metadata.
|
||||
|
||||
## [uv-distribution-types](./uv-distribution-types)
|
||||
## [distribution-types](./distribution-types)
|
||||
|
||||
Abstractions for representing built distributions (wheels) and source distributions (sdists), and
|
||||
the sources from which they can be downloaded.
|
||||
|
||||
## [uv-install-wheel-rs](./uv-install-wheel)
|
||||
## [gourgeist](./gourgeist)
|
||||
|
||||
Install built distributions (wheels) into a virtual environment.
|
||||
A `venv` replacement to create virtual environments in Rust.
|
||||
|
||||
## [uv-once-map](./uv-once-map)
|
||||
## [install-wheel-rs](./install-wheel-rs)
|
||||
|
||||
Install built distributions (wheels) into a virtual environment.]
|
||||
|
||||
## [once-map](./once-map)
|
||||
|
||||
A [`waitmap`](https://github.com/withoutboats/waitmap)-like concurrent hash map for executing tasks
|
||||
exactly once.
|
||||
|
||||
## [uv-pep440-rs](./uv-pep440)
|
||||
## [pep440-rs](./pep440-rs)
|
||||
|
||||
Utilities for interacting with Python version numbers and specifiers.
|
||||
|
||||
## [uv-pep508-rs](./uv-pep508)
|
||||
## [pep508-rs](./pep508-rs)
|
||||
|
||||
Utilities for parsing and evaluating
|
||||
[dependency specifiers](https://packaging.python.org/en/latest/specifications/dependency-specifiers/),
|
||||
previously known as [PEP 508](https://peps.python.org/pep-0508/).
|
||||
Utilities for interacting with [PEP 508](https://peps.python.org/pep-0508/) dependency specifiers.
|
||||
|
||||
## [uv-platform-tags](./uv-platform-tags)
|
||||
## [platform-host](./platform-host)
|
||||
|
||||
Functionality for parsing and inferring Python platform tags as per
|
||||
[PEP 425](https://peps.python.org/pep-0425/).
|
||||
Functionality for detecting the current platform (operating system, architecture, etc.).
|
||||
|
||||
## [uv-cli](./uv-cli)
|
||||
## [platform-tags](./platform-tags)
|
||||
|
||||
Functionality for parsing and inferring Python platform tags as per [PEP 425](https://peps.python.org/pep-0425/).
|
||||
|
||||
## [uv](./uv)
|
||||
|
||||
Command-line interface for the uv package manager.
|
||||
|
||||
## [uv-build-frontend](./uv-build-frontend)
|
||||
## [uv-build](./uv-build)
|
||||
|
||||
A [PEP 517](https://www.python.org/dev/peps/pep-0517/)-compatible build frontend for uv.
|
||||
|
||||
|
|
@ -65,12 +70,12 @@ Development utilities for uv.
|
|||
## [uv-dispatch](./uv-dispatch)
|
||||
|
||||
A centralized `struct` for resolving and building source distributions in isolated environments.
|
||||
Implements the traits defined in `uv-types`.
|
||||
Implements the traits defined in `uv-traits`.
|
||||
|
||||
## [uv-distribution](./uv-distribution)
|
||||
|
||||
Client for interacting with built distributions (wheels) and source distributions (sdists). Capable
|
||||
of fetching metadata, distribution contents, etc.
|
||||
Client for interacting with built distributions (wheels) and source distributions (sdists).
|
||||
Capable of fetching metadata, distribution contents, etc.
|
||||
|
||||
## [uv-extract](./uv-extract)
|
||||
|
||||
|
|
@ -88,7 +93,7 @@ Functionality for interacting with Git repositories.
|
|||
|
||||
Functionality for installing Python packages into a virtual environment.
|
||||
|
||||
## [uv-python](./uv-python)
|
||||
## [uv-interpreter](./uv-interpreter)
|
||||
|
||||
Functionality for detecting and leveraging the current Python interpreter.
|
||||
|
||||
|
|
@ -96,38 +101,26 @@ Functionality for detecting and leveraging the current Python interpreter.
|
|||
|
||||
Normalize package and extra names as per Python specifications.
|
||||
|
||||
## [uv-requirements](./uv-requirements)
|
||||
## [uv-package](./uv-package)
|
||||
|
||||
Utilities for reading package requirements from `pyproject.toml` and `requirements.txt` files.
|
||||
Types and functionality for working with Python packages, e.g., parsing wheel files.
|
||||
|
||||
## [uv-resolver](./uv-resolver)
|
||||
|
||||
Functionality for resolving Python packages and their dependencies.
|
||||
|
||||
## [uv-shell](./uv-shell)
|
||||
|
||||
Utilities for detecting and manipulating shell environments.
|
||||
|
||||
## [uv-types](./uv-types)
|
||||
## [uv-traits](./uv-traits)
|
||||
|
||||
Shared traits for uv, to avoid circular dependencies.
|
||||
|
||||
## [uv-pypi-types](./uv-pypi-types)
|
||||
## [pypi-types](./pypi-types)
|
||||
|
||||
General-purpose type definitions for types used in PyPI-compatible APIs.
|
||||
|
||||
## [uv-virtualenv](./uv-virtualenv)
|
||||
|
||||
A `venv` replacement to create virtual environments in Rust.
|
||||
|
||||
## [uv-warnings](./uv-warnings)
|
||||
|
||||
User-facing warnings for uv.
|
||||
|
||||
## [uv-workspace](./uv-workspace)
|
||||
|
||||
Workspace abstractions for uv.
|
||||
|
||||
## [uv-requirements-txt](./uv-requirements-txt)
|
||||
## [requirements-txt](./requirements-txt)
|
||||
|
||||
Functionality for parsing `requirements.txt` files.
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
[package]
|
||||
name = "bench"
|
||||
version = "0.0.0"
|
||||
description = "uv Micro-benchmarks"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
bench = false
|
||||
|
||||
[[bench]]
|
||||
name = "distribution-filename"
|
||||
path = "benches/distribution_filename.rs"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
distribution-filename = { path = "../distribution-filename" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
|
||||
criterion = { version = "0.5.1", default-features = false }
|
||||
|
|
@ -0,0 +1,162 @@
|
|||
use {distribution_filename::WheelFilename, platform_tags::Tags};
|
||||
|
||||
use bench::criterion::{
|
||||
criterion_group, criterion_main, measurement::WallTime, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
|
||||
/// A set of platform tags extracted from burntsushi's Archlinux workstation.
|
||||
/// We could just re-create these via `Tags::from_env`, but those might differ
|
||||
/// depending on the platform. This way, we always use the same data. It also
|
||||
/// lets us assert tag compatibility regardless of where the benchmarks run.
|
||||
const PLATFORM_TAGS: &[(&str, &str, &str)] = include!("../inputs/platform_tags.rs");
|
||||
|
||||
/// A set of wheel names used in the benchmarks below. We pick short and long
|
||||
/// names, as well as compatible and not-compatibles (with `PLATFORM_TAGS`)
|
||||
/// names.
|
||||
///
|
||||
/// The tuple is (name, filename, compatible) where `name` is a descriptive
|
||||
/// name for humans used in the benchmark definition. And `filename` is the
|
||||
/// actual wheel filename we want to benchmark operation on. And `compatible`
|
||||
/// indicates whether the tags in the wheel filename are expected to be
|
||||
/// compatible with the tags in `PLATFORM_TAGS`.
|
||||
const WHEEL_NAMES: &[(&str, &str, bool)] = &[
|
||||
// This tests a case with a very short name that is *not* compatible
|
||||
// with PLATFORM_TAGS. It only uses one tag for each component (one
|
||||
// Python version, one ABI and one platform).
|
||||
(
|
||||
"flyte-short-incompatible",
|
||||
"hypothesis-4.24.5-py2-none-any.whl",
|
||||
false,
|
||||
),
|
||||
// This tests a case with a very short name that *is* compatible with
|
||||
// PLATFORM_TAGS. It only uses one tag for each component (one Python
|
||||
// version, one ABI and one platform).
|
||||
(
|
||||
"flyte-short-compatible",
|
||||
"ipython-2.1.0-py3-none-any.whl",
|
||||
true,
|
||||
),
|
||||
// This tests a case with a long name that is *not* compatible. That
|
||||
// is, all platform tags need to be checked against the tags in the
|
||||
// wheel filename. This is essentially the worst possible practical
|
||||
// case.
|
||||
(
|
||||
"flyte-long-incompatible",
|
||||
"protobuf-3.5.2.post1-cp36-cp36m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl",
|
||||
false,
|
||||
),
|
||||
// This tests a case with a long name that *is* compatible. We
|
||||
// expect this to be (on average) quicker because the compatibility
|
||||
// check stops as soon as a positive match is found. (Where as the
|
||||
// incompatible case needs to check all tags.)
|
||||
(
|
||||
"flyte-long-compatible",
|
||||
"coverage-6.6.0b1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
true,
|
||||
),
|
||||
];
|
||||
|
||||
/// A list of names that are candidates for wheel filenames but will ultimately
|
||||
/// fail to parse.
|
||||
const INVALID_WHEEL_NAMES: &[(&str, &str)] = &[
|
||||
("flyte-short-extension", "mock-5.1.0.tar.gz"),
|
||||
(
|
||||
"flyte-long-extension",
|
||||
"Pillow-5.4.0.dev0-py3.7-macosx-10.13-x86_64.egg",
|
||||
),
|
||||
];
|
||||
|
||||
/// Benchmarks the construction of platform tags.
|
||||
///
|
||||
/// This only happens ~once per program startup. Originally, construction was
|
||||
/// trivial. But to speed up `WheelFilename::is_compatible`, we added some
|
||||
/// extra processing. We thus expect construction to become slower, but we
|
||||
/// write a benchmark to ensure it is still "reasonable."
|
||||
fn benchmark_build_platform_tags(c: &mut Criterion<WallTime>) {
|
||||
let tags: Vec<(String, String, String)> = PLATFORM_TAGS
|
||||
.iter()
|
||||
.map(|&(py, abi, plat)| (py.to_string(), abi.to_string(), plat.to_string()))
|
||||
.collect();
|
||||
|
||||
let mut group = c.benchmark_group("build_platform_tags");
|
||||
group.bench_function(BenchmarkId::from_parameter("burntsushi-archlinux"), |b| {
|
||||
b.iter(|| std::hint::black_box(Tags::new(tags.clone())));
|
||||
});
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks `WheelFilename::from_str`. This has been observed to take some
|
||||
/// non-trivial time in profiling (although, at time of writing, not as much
|
||||
/// as tag compatibility). In the process of optimizing tag compatibility,
|
||||
/// we tweaked wheel filename parsing. This benchmark was therefore added to
|
||||
/// ensure we didn't regress here.
|
||||
fn benchmark_wheelname_parsing(c: &mut Criterion<WallTime>) {
|
||||
let mut group = c.benchmark_group("wheelname_parsing");
|
||||
for (name, filename, _) in WHEEL_NAMES.iter().copied() {
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
filename
|
||||
.parse::<WheelFilename>()
|
||||
.expect("valid wheel filename");
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks `WheelFilename::from_str` when it fails. This routine is called
|
||||
/// on every filename in a package's metadata. A non-trivial portion of which
|
||||
/// are not wheel filenames. Ensuring that the error path is fast is thus
|
||||
/// probably a good idea.
|
||||
fn benchmark_wheelname_parsing_failure(c: &mut Criterion<WallTime>) {
|
||||
let mut group = c.benchmark_group("wheelname_parsing_failure");
|
||||
for (name, filename) in INVALID_WHEEL_NAMES.iter().copied() {
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
filename
|
||||
.parse::<WheelFilename>()
|
||||
.expect_err("invalid wheel filename");
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
/// Benchmarks the `WheelFilename::is_compatible` routine. This was revealed
|
||||
/// to be the #1 bottleneck in the resolver. The main issue was that the
|
||||
/// set of platform tags (generated once) is quite large, and the original
|
||||
/// implementation did an exhaustive search over each of them for each tag in
|
||||
/// the wheel filename.
|
||||
fn benchmark_wheelname_tag_compatibility(c: &mut Criterion<WallTime>) {
|
||||
let tags: Vec<(String, String, String)> = PLATFORM_TAGS
|
||||
.iter()
|
||||
.map(|&(py, abi, plat)| (py.to_string(), abi.to_string(), plat.to_string()))
|
||||
.collect();
|
||||
let tags = Tags::new(tags);
|
||||
|
||||
let mut group = c.benchmark_group("wheelname_tag_compatibility");
|
||||
for (name, filename, expected) in WHEEL_NAMES.iter().copied() {
|
||||
let wheelname: WheelFilename = filename.parse().expect("valid wheel filename");
|
||||
let len = u64::try_from(filename.len()).expect("length fits in u64");
|
||||
group.throughput(Throughput::Bytes(len));
|
||||
group.bench_function(BenchmarkId::from_parameter(name), |b| {
|
||||
b.iter(|| {
|
||||
assert_eq!(expected, wheelname.is_compatible(&tags));
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
distribution_filename,
|
||||
benchmark_build_platform_tags,
|
||||
benchmark_wheelname_parsing,
|
||||
benchmark_wheelname_parsing_failure,
|
||||
benchmark_wheelname_tag_compatibility,
|
||||
);
|
||||
criterion_main!(distribution_filename);
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
pub mod criterion {
|
||||
//! This module re-exports the criterion API unconditionally for now. It's
|
||||
//! intended that in the future this be a way to switch the backend to
|
||||
//! something else (like codspeed).
|
||||
|
||||
pub use criterion::*;
|
||||
}
|
||||
|
|
@ -1,25 +1,19 @@
|
|||
[package]
|
||||
name = "uv-cache-key"
|
||||
version = "0.0.8"
|
||||
description = "This is an internal component crate of uv"
|
||||
name = "cache-key"
|
||||
version = "0.0.1"
|
||||
description = "Generic functionality for caching paths, URLs, and other resources across platforms."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
uv-redacted = { workspace = true }
|
||||
|
||||
hex = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
seahash = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
|
@ -2,8 +2,8 @@ use std::borrow::Cow;
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::num::{
|
||||
NonZeroI8, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI128, NonZeroU8, NonZeroU16, NonZeroU32,
|
||||
NonZeroU64, NonZeroU128,
|
||||
NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroU128, NonZeroU16,
|
||||
NonZeroU32, NonZeroU64, NonZeroU8,
|
||||
};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
|
|
@ -0,0 +1,238 @@
|
|||
use std::fmt::{Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::Deref;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
||||
|
||||
/// A wrapper around `Url` which represents a "canonical" version of an original URL.
|
||||
///
|
||||
/// A "canonical" url is only intended for internal comparison purposes. It's to help paper over
|
||||
/// mistakes such as depending on `github.com/foo/bar` vs. `github.com/foo/bar.git`.
|
||||
///
|
||||
/// This is **only** for internal purposes and provides no means to actually read the underlying
|
||||
/// string value of the `Url` it contains. This is intentional, because all fetching should still
|
||||
/// happen within the context of the original URL.
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||
pub struct CanonicalUrl(Url);
|
||||
|
||||
impl CanonicalUrl {
|
||||
pub fn new(url: &Url) -> CanonicalUrl {
|
||||
let mut url = url.clone();
|
||||
|
||||
// Strip a trailing slash.
|
||||
if url.path().ends_with('/') {
|
||||
url.path_segments_mut().unwrap().pop_if_empty();
|
||||
}
|
||||
|
||||
// For GitHub URLs specifically, just lower-case everything. GitHub
|
||||
// treats both the same, but they hash differently, and we're gonna be
|
||||
// hashing them. This wants a more general solution, and also we're
|
||||
// almost certainly not using the same case conversion rules that GitHub
|
||||
// does. (See issue #84)
|
||||
if url.host_str() == Some("github.com") {
|
||||
url.set_scheme(url.scheme().to_lowercase().as_str())
|
||||
.unwrap();
|
||||
let path = url.path().to_lowercase();
|
||||
url.set_path(&path);
|
||||
}
|
||||
|
||||
// Repos can generally be accessed with or without `.git` extension.
|
||||
if let Some((prefix, suffix)) = url.path().rsplit_once('@') {
|
||||
// Ex) `git+https://github.com/pypa/sample-namespace-packages.git@2.0.0`
|
||||
let needs_chopping = std::path::Path::new(prefix)
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("git"));
|
||||
if needs_chopping {
|
||||
let prefix = &prefix[..prefix.len() - 4];
|
||||
url.set_path(&format!("{prefix}@{suffix}"));
|
||||
}
|
||||
} else {
|
||||
// Ex) `git+https://github.com/pypa/sample-namespace-packages.git`
|
||||
let needs_chopping = std::path::Path::new(url.path())
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("git"));
|
||||
if needs_chopping {
|
||||
let last = {
|
||||
let last = url.path_segments().unwrap().next_back().unwrap();
|
||||
last[..last.len() - 4].to_owned()
|
||||
};
|
||||
url.path_segments_mut().unwrap().pop().push(&last);
|
||||
}
|
||||
}
|
||||
|
||||
CanonicalUrl(url)
|
||||
}
|
||||
|
||||
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||
Ok(Self::new(&Url::parse(url)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheKey for CanonicalUrl {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.0.as_str().cache_key(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for CanonicalUrl {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.0.as_str().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for CanonicalUrl {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`CanonicalUrl`], but attempts to represent an underlying source repository, abstracting
|
||||
/// away details like the specific commit or branch, or the subdirectory to build within the
|
||||
/// repository.
|
||||
///
|
||||
/// For example, `https://github.com/pypa/package.git#subdirectory=pkg_a` and
|
||||
/// `https://github.com/pypa/package.git#subdirectory=pkg_b` would map to different
|
||||
/// [`CanonicalUrl`] values, but the same [`RepositoryUrl`], since they map to the same
|
||||
/// resource.
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
|
||||
pub struct RepositoryUrl(Url);
|
||||
|
||||
impl RepositoryUrl {
|
||||
pub fn new(url: &Url) -> RepositoryUrl {
|
||||
let mut url = CanonicalUrl::new(url).0;
|
||||
|
||||
// If a Git URL ends in a reference (like a branch, tag, or commit), remove it.
|
||||
if url.scheme().starts_with("git+") {
|
||||
if let Some((prefix, _)) = url.as_str().rsplit_once('@') {
|
||||
url = prefix.parse().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Drop any fragments and query parameters.
|
||||
url.set_fragment(None);
|
||||
url.set_query(None);
|
||||
|
||||
RepositoryUrl(url)
|
||||
}
|
||||
|
||||
pub fn parse(url: &str) -> Result<Self, url::ParseError> {
|
||||
Ok(Self::new(&Url::parse(url)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl CacheKey for RepositoryUrl {
|
||||
fn cache_key(&self, state: &mut CacheKeyHasher) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.0.as_str().cache_key(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for RepositoryUrl {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
// `as_str` gives the serialisation of a url (which has a spec) and so insulates against
|
||||
// possible changes in how the URL crate does hashing.
|
||||
self.0.as_str().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for RepositoryUrl {
|
||||
type Target = Url;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn canonical_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git@2.0.0")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different subdirectories.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
CanonicalUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should _not_ be considered equal if they request different commit tags.
|
||||
assert_ne!(
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0"
|
||||
)?,
|
||||
CanonicalUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0"
|
||||
)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repository_url() -> Result<(), url::ParseError> {
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal regardless of the `.git` suffix.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@2.0.0"
|
||||
)?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages@2.0.0")?,
|
||||
);
|
||||
|
||||
// Two URLs should be _not_ considered equal if they point to different repositories.
|
||||
assert_ne!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-packages.git")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different subdirectories.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_a")?,
|
||||
RepositoryUrl::parse("git+https://github.com/pypa/sample-namespace-packages.git#subdirectory=pkg_resources/pkg_b")?,
|
||||
);
|
||||
|
||||
// Two URLs should be considered equal if they map to the same repository, even if they
|
||||
// request different commit tags.
|
||||
assert_eq!(
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v1.0.0"
|
||||
)?,
|
||||
RepositoryUrl::parse(
|
||||
"git+https://github.com/pypa/sample-namespace-packages.git@v2.0.0"
|
||||
)?,
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
use std::hash::Hasher;
|
||||
|
||||
use crate::cache_key::{CacheKey, CacheKeyHasher};
|
||||
|
||||
/// Compute a hex string hash of a `CacheKey` object.
|
||||
///
|
||||
/// The value returned by [`digest`] should be stable across releases and platforms.
|
||||
pub fn digest<H: CacheKey>(hashable: &H) -> String {
|
||||
to_hex(cache_key_u64(hashable))
|
||||
}
|
||||
|
||||
/// Convert a u64 to a hex string.
|
||||
fn to_hex(num: u64) -> String {
|
||||
hex::encode(num.to_le_bytes())
|
||||
}
|
||||
|
||||
/// Compute a u64 hash of a [`CacheKey`] object.
|
||||
fn cache_key_u64<H: CacheKey>(hashable: &H) -> u64 {
|
||||
let mut hasher = CacheKeyHasher::new();
|
||||
hashable.cache_key(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
|
@ -1,7 +1,8 @@
|
|||
pub use cache_key::{CacheKey, CacheKeyHasher};
|
||||
pub use canonical_url::{CanonicalUrl, RepositoryUrl};
|
||||
pub use digest::{cache_digest, hash_digest};
|
||||
pub use digest::digest;
|
||||
pub use stable_hash::{StableHash, StableHasher};
|
||||
|
||||
mod cache_key;
|
||||
mod canonical_url;
|
||||
mod digest;
|
||||
mod stable_hash;
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
use std::hash::Hasher;
|
||||
|
||||
use seahash::SeaHasher;
|
||||
|
||||
/// A trait for types that can be hashed in a stable way across versions and platforms.
|
||||
pub trait StableHash {
|
||||
fn stable_hash(&self, state: &mut StableHasher);
|
||||
|
||||
fn stable_hash_slice(data: &[Self], state: &mut StableHasher)
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
for piece in data {
|
||||
piece.stable_hash(state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct StableHasher {
|
||||
inner: SeaHasher,
|
||||
}
|
||||
|
||||
impl StableHasher {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
inner: SeaHasher::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish(self) -> u64 {
|
||||
self.inner.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Hasher for StableHasher {
|
||||
#[inline]
|
||||
fn finish(&self) -> u64 {
|
||||
self.inner.finish()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write(&mut self, bytes: &[u8]) {
|
||||
self.inner.write(bytes);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_u8(&mut self, i: u8) {
|
||||
self.inner.write_u8(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_u16(&mut self, i: u16) {
|
||||
self.inner.write_u16(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_u32(&mut self, i: u32) {
|
||||
self.inner.write_u32(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_u64(&mut self, i: u64) {
|
||||
self.inner.write_u64(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_u128(&mut self, i: u128) {
|
||||
self.inner.write_u128(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_usize(&mut self, i: usize) {
|
||||
self.inner.write_usize(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_i8(&mut self, i: i8) {
|
||||
self.inner.write_i8(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_i16(&mut self, i: i16) {
|
||||
self.inner.write_i16(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_i32(&mut self, i: i32) {
|
||||
self.inner.write_i32(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_i64(&mut self, i: i64) {
|
||||
self.inner.write_i64(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_i128(&mut self, i: i128) {
|
||||
self.inner.write_i128(i);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn write_isize(&mut self, i: isize) {
|
||||
self.inner.write_isize(i);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
[package]
|
||||
name = "distribution-filename"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
rkyv = ["dep:rkyv", "pep440_rs/rkyv"]
|
||||
|
||||
[dependencies]
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
uv-normalize = { path = "../uv-normalize" }
|
||||
|
||||
rkyv = { workspace = true, features = ["strict", "validation"], optional = true }
|
||||
serde = { workspace = true, optional = true }
|
||||
thiserror = { workspace = true }
|
||||
url = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { version = "1.34.0" }
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
use pep440_rs::Version;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
pub use source_dist::{SourceDistExtension, SourceDistFilename, SourceDistFilenameError};
|
||||
pub use wheel::{WheelFilename, WheelFilenameError};
|
||||
|
||||
mod source_dist;
|
||||
mod wheel;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DistFilename {
|
||||
SourceDistFilename(SourceDistFilename),
|
||||
WheelFilename(WheelFilename),
|
||||
}
|
||||
|
||||
impl DistFilename {
|
||||
/// Parse a filename as wheel or source dist name.
|
||||
pub fn try_from_filename(filename: &str, package_name: &PackageName) -> Option<Self> {
|
||||
if let Ok(filename) = WheelFilename::from_str(filename) {
|
||||
Some(Self::WheelFilename(filename))
|
||||
} else if let Ok(filename) = SourceDistFilename::parse(filename, package_name) {
|
||||
Some(Self::SourceDistFilename(filename))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Like [`DistFilename::try_from_normalized_filename`], but without knowing the package name.
|
||||
///
|
||||
/// Source dist filenames can be ambiguous, e.g. `a-1-1.tar.gz`. Without knowing the package name, we assume that
|
||||
/// source dist filename version doesn't contain minus (the version is normalized).
|
||||
pub fn try_from_normalized_filename(filename: &str) -> Option<Self> {
|
||||
if let Ok(filename) = WheelFilename::from_str(filename) {
|
||||
Some(Self::WheelFilename(filename))
|
||||
} else if let Ok(filename) = SourceDistFilename::parsed_normalized_filename(filename) {
|
||||
Some(Self::SourceDistFilename(filename))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
DistFilename::SourceDistFilename(filename) => &filename.name,
|
||||
DistFilename::WheelFilename(filename) => &filename.name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &Version {
|
||||
match self {
|
||||
DistFilename::SourceDistFilename(filename) => &filename.version,
|
||||
DistFilename::WheelFilename(filename) => &filename.version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for DistFilename {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DistFilename::SourceDistFilename(filename) => Display::fmt(filename, f),
|
||||
DistFilename::WheelFilename(filename) => Display::fmt(filename, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
source: crates/distribution-filename/src/wheel.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-build-python-abi-platform.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
python_tag: [
|
||||
"python",
|
||||
],
|
||||
abi_tag: [
|
||||
"abi",
|
||||
],
|
||||
platform_tag: [
|
||||
"platform",
|
||||
],
|
||||
},
|
||||
)
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
source: crates/distribution-filename/src/wheel.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
python_tag: [
|
||||
"ab",
|
||||
"cd",
|
||||
"ef",
|
||||
],
|
||||
abi_tag: [
|
||||
"gh",
|
||||
],
|
||||
platform_tag: [
|
||||
"ij",
|
||||
"kl",
|
||||
"mn",
|
||||
"op",
|
||||
"qr",
|
||||
"st",
|
||||
],
|
||||
},
|
||||
)
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
source: crates/distribution-filename/src/wheel.rs
|
||||
expression: "WheelFilename::from_str(\"foo-1.2.3-foo-bar-baz.whl\")"
|
||||
---
|
||||
Ok(
|
||||
WheelFilename {
|
||||
name: PackageName(
|
||||
"foo",
|
||||
),
|
||||
version: "1.2.3",
|
||||
python_tag: [
|
||||
"foo",
|
||||
],
|
||||
abi_tag: [
|
||||
"bar",
|
||||
],
|
||||
platform_tag: [
|
||||
"baz",
|
||||
],
|
||||
},
|
||||
)
|
||||
|
|
@ -1,28 +1,69 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::SourceDistExtension;
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
|
||||
use pep440_rs::{Version, VersionParseError};
|
||||
use uv_normalize::{InvalidNameError, PackageName};
|
||||
use uv_pep440::{Version, VersionParseError};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(
|
||||
feature = "rkyv",
|
||||
derive(rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)
|
||||
)]
|
||||
#[cfg_attr(feature = "rkyv", archive(check_bytes))]
|
||||
#[cfg_attr(feature = "rkyv", archive_attr(derive(Debug)))]
|
||||
pub enum SourceDistExtension {
|
||||
Zip,
|
||||
TarGz,
|
||||
}
|
||||
|
||||
impl FromStr for SourceDistExtension {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(match s {
|
||||
"zip" => Self::Zip,
|
||||
"tar.gz" => Self::TarGz,
|
||||
other => return Err(other.to_string()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for SourceDistExtension {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
SourceDistExtension::Zip => f.write_str("zip"),
|
||||
SourceDistExtension::TarGz => f.write_str("tar.gz"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDistExtension {
|
||||
pub fn from_filename(filename: &str) -> Option<(&str, Self)> {
|
||||
if let Some(stem) = filename.strip_suffix(".zip") {
|
||||
return Some((stem, Self::Zip));
|
||||
}
|
||||
if let Some(stem) = filename.strip_suffix(".tar.gz") {
|
||||
return Some((stem, Self::TarGz));
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Note that this is a normalized and not an exact representation, keep the original string if you
|
||||
/// need the latter.
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
|
||||
#[cfg_attr(
|
||||
feature = "rkyv",
|
||||
derive(rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
#[cfg_attr(feature = "rkyv", archive(check_bytes))]
|
||||
#[cfg_attr(feature = "rkyv", archive_attr(derive(Debug)))]
|
||||
pub struct SourceDistFilename {
|
||||
pub name: PackageName,
|
||||
pub version: Version,
|
||||
|
|
@ -34,18 +75,14 @@ impl SourceDistFilename {
|
|||
/// these (consider e.g. `a-1-1.zip`)
|
||||
pub fn parse(
|
||||
filename: &str,
|
||||
extension: SourceDistExtension,
|
||||
package_name: &PackageName,
|
||||
) -> Result<Self, SourceDistFilenameError> {
|
||||
// Drop the extension (e.g., given `tar.gz`, drop `.tar.gz`).
|
||||
if filename.len() <= extension.name().len() + 1 {
|
||||
let Some((stem, extension)) = SourceDistExtension::from_filename(filename) else {
|
||||
return Err(SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::Extension,
|
||||
});
|
||||
}
|
||||
|
||||
let stem = &filename[..(filename.len() - (extension.name().len() + 1))];
|
||||
};
|
||||
|
||||
if stem.len() <= package_name.as_ref().len() + "-".len() {
|
||||
return Err(SourceDistFilenameError {
|
||||
|
|
@ -58,7 +95,7 @@ impl SourceDistFilename {
|
|||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::PackageName(err),
|
||||
})?;
|
||||
if actual_package_name != *package_name {
|
||||
if &actual_package_name != package_name {
|
||||
return Err(SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::Filename(package_name.clone()),
|
||||
|
|
@ -86,23 +123,13 @@ impl SourceDistFilename {
|
|||
/// Source dist filenames can be ambiguous, e.g. `a-1-1.tar.gz`. Without knowing the package name, we assume that
|
||||
/// source dist filename version doesn't contain minus (the version is normalized).
|
||||
pub fn parsed_normalized_filename(filename: &str) -> Result<Self, SourceDistFilenameError> {
|
||||
let Ok(extension) = SourceDistExtension::from_path(filename) else {
|
||||
let Some((stem, extension)) = SourceDistExtension::from_filename(filename) else {
|
||||
return Err(SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::Extension,
|
||||
});
|
||||
};
|
||||
|
||||
// Drop the extension (e.g., given `tar.gz`, drop `.tar.gz`).
|
||||
if filename.len() <= extension.name().len() + 1 {
|
||||
return Err(SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
kind: SourceDistFilenameErrorKind::Extension,
|
||||
});
|
||||
}
|
||||
|
||||
let stem = &filename[..(filename.len() - (extension.name().len() + 1))];
|
||||
|
||||
let Some((package_name, version)) = stem.rsplit_once('-') else {
|
||||
return Err(SourceDistFilenameError {
|
||||
filename: filename.to_string(),
|
||||
|
|
@ -131,13 +158,7 @@ impl SourceDistFilename {
|
|||
|
||||
impl Display for SourceDistFilename {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}-{}.{}",
|
||||
self.name.as_dist_info_name(),
|
||||
self.version,
|
||||
self.extension
|
||||
)
|
||||
write!(f, "{}-{}.{}", self.name, self.version, self.extension)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -161,7 +182,7 @@ impl Display for SourceDistFilenameError {
|
|||
enum SourceDistFilenameErrorKind {
|
||||
#[error("Name doesn't start with package name {0}")]
|
||||
Filename(PackageName),
|
||||
#[error("File extension is invalid")]
|
||||
#[error("Source distributions filenames must end with .zip or .tar.gz")]
|
||||
Extension,
|
||||
#[error("Version section is invalid")]
|
||||
Version(#[from] VersionParseError),
|
||||
|
|
@ -177,37 +198,20 @@ mod tests {
|
|||
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{SourceDistExtension, SourceDistFilename};
|
||||
use crate::SourceDistFilename;
|
||||
|
||||
/// Only test already normalized names since the parsing is lossy
|
||||
///
|
||||
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-file-name>
|
||||
/// <https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode>
|
||||
#[test]
|
||||
fn roundtrip() {
|
||||
for normalized in [
|
||||
"foo_lib-1.2.3.zip",
|
||||
"foo_lib-1.2.3a3.zip",
|
||||
"foo_lib-1.2.3.tar.gz",
|
||||
"foo_lib-1.2.3.tar.bz2",
|
||||
"foo_lib-1.2.3.tar.zst",
|
||||
"foo_lib-1.2.3.tar.xz",
|
||||
"foo_lib-1.2.3.tar.lz",
|
||||
"foo_lib-1.2.3.tar.lzma",
|
||||
"foo_lib-1.2.3.tgz",
|
||||
"foo_lib-1.2.3.tbz",
|
||||
"foo_lib-1.2.3.tlz",
|
||||
"foo_lib-1.2.3.txz",
|
||||
"foo-lib-1.2.3.zip",
|
||||
"foo-lib-1.2.3a3.zip",
|
||||
"foo-lib-1.2.3.tar.gz",
|
||||
] {
|
||||
let ext = SourceDistExtension::from_path(normalized).unwrap();
|
||||
assert_eq!(
|
||||
SourceDistFilename::parse(
|
||||
normalized,
|
||||
ext,
|
||||
&PackageName::from_str("foo_lib").unwrap()
|
||||
)
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
SourceDistFilename::parse(normalized, &PackageName::from_str("foo_lib").unwrap())
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
normalized
|
||||
);
|
||||
}
|
||||
|
|
@ -215,24 +219,18 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn errors() {
|
||||
for invalid in ["b-1.2.3.zip", "a-1.2.3-gamma.3.zip"] {
|
||||
let ext = SourceDistExtension::from_path(invalid).unwrap();
|
||||
for invalid in ["b-1.2.3.zip", "a-1.2.3-gamma.3.zip", "a-1.2.3.tar.zstd"] {
|
||||
assert!(
|
||||
SourceDistFilename::parse(invalid, ext, &PackageName::from_str("a").unwrap())
|
||||
.is_err()
|
||||
SourceDistFilename::parse(invalid, &PackageName::from_str("a").unwrap()).is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn name_too_long() {
|
||||
fn name_to_long() {
|
||||
assert!(
|
||||
SourceDistFilename::parse(
|
||||
"foo.zip",
|
||||
SourceDistExtension::Zip,
|
||||
&PackageName::from_str("foo-lib").unwrap()
|
||||
)
|
||||
.is_err()
|
||||
SourceDistFilename::parse("foo.zip", &PackageName::from_str("foo-lib").unwrap())
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,322 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
use serde::{de, Deserialize, Deserializer, Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
use pep440_rs::{Version, VersionParseError};
|
||||
use platform_tags::{TagCompatibility, Tags};
|
||||
use uv_normalize::{InvalidNameError, PackageName};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
#[cfg_attr(
|
||||
feature = "rkyv",
|
||||
derive(rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)
|
||||
)]
|
||||
#[cfg_attr(feature = "rkyv", archive(check_bytes))]
|
||||
#[cfg_attr(feature = "rkyv", archive_attr(derive(Debug)))]
|
||||
pub struct WheelFilename {
|
||||
pub name: PackageName,
|
||||
pub version: Version,
|
||||
pub python_tag: Vec<String>,
|
||||
pub abi_tag: Vec<String>,
|
||||
pub platform_tag: Vec<String>,
|
||||
}
|
||||
|
||||
impl FromStr for WheelFilename {
|
||||
type Err = WheelFilenameError;
|
||||
|
||||
fn from_str(filename: &str) -> Result<Self, Self::Err> {
|
||||
let stem = filename.strip_suffix(".whl").ok_or_else(|| {
|
||||
WheelFilenameError::InvalidWheelFileName(
|
||||
filename.to_string(),
|
||||
"Must end with .whl".to_string(),
|
||||
)
|
||||
})?;
|
||||
Self::parse(stem, filename)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for WheelFilename {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}-{}-{}.whl",
|
||||
self.name.as_dist_info_name(),
|
||||
self.version,
|
||||
self.get_tag()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl WheelFilename {
|
||||
/// Returns `true` if the wheel is compatible with the given tags.
|
||||
pub fn is_compatible(&self, compatible_tags: &Tags) -> bool {
|
||||
compatible_tags.is_compatible(&self.python_tag, &self.abi_tag, &self.platform_tag)
|
||||
}
|
||||
|
||||
/// Return the [`TagCompatibility`] of the wheel with the given tags
|
||||
pub fn compatibility(&self, compatible_tags: &Tags) -> TagCompatibility {
|
||||
compatible_tags.compatibility(&self.python_tag, &self.abi_tag, &self.platform_tag)
|
||||
}
|
||||
|
||||
/// Get the tag for this wheel.
|
||||
pub fn get_tag(&self) -> String {
|
||||
format!(
|
||||
"{}-{}-{}",
|
||||
self.python_tag.join("."),
|
||||
self.abi_tag.join("."),
|
||||
self.platform_tag.join(".")
|
||||
)
|
||||
}
|
||||
|
||||
/// The wheel filename without the extension.
|
||||
pub fn stem(&self) -> String {
|
||||
format!(
|
||||
"{}-{}-{}",
|
||||
self.name.as_dist_info_name(),
|
||||
self.version,
|
||||
self.get_tag()
|
||||
)
|
||||
}
|
||||
|
||||
/// Parse a wheel filename from the stem (e.g., `foo-1.2.3-py3-none-any`).
|
||||
pub fn from_stem(stem: &str) -> Result<Self, WheelFilenameError> {
|
||||
Self::parse(stem, stem)
|
||||
}
|
||||
|
||||
/// Parse a wheel filename from the stem (e.g., `foo-1.2.3-py3-none-any`).
|
||||
///
|
||||
/// The originating `filename` is used for high-fidelity error messages.
|
||||
fn parse(stem: &str, filename: &str) -> Result<Self, WheelFilenameError> {
|
||||
// The wheel filename should contain either five or six entries. If six, then the third
|
||||
// entry is the build tag. If five, then the third entry is the Python tag.
|
||||
// https://www.python.org/dev/peps/pep-0427/#file-name-convention
|
||||
//
|
||||
// 2023-11-08(burntsushi): It looks like the code below actually drops
|
||||
// the build tag if one is found. According to PEP 0427, the build tag
|
||||
// is used to break ties. This might mean that we generate identical
|
||||
// `WheelName` values for multiple distinct wheels, but it's not clear
|
||||
// if this is a problem in practice.
|
||||
let mut parts = stem.split('-');
|
||||
|
||||
let name = parts
|
||||
.next()
|
||||
.expect("split always yields 1 or more elements");
|
||||
|
||||
let Some(version) = parts.next() else {
|
||||
return Err(WheelFilenameError::InvalidWheelFileName(
|
||||
filename.to_string(),
|
||||
"Must have a version".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let Some(build_tag_or_python_tag) = parts.next() else {
|
||||
return Err(WheelFilenameError::InvalidWheelFileName(
|
||||
filename.to_string(),
|
||||
"Must have a Python tag".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let Some(python_tag_or_abi_tag) = parts.next() else {
|
||||
return Err(WheelFilenameError::InvalidWheelFileName(
|
||||
filename.to_string(),
|
||||
"Must have an ABI tag".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let Some(abi_tag_or_platform_tag) = parts.next() else {
|
||||
return Err(WheelFilenameError::InvalidWheelFileName(
|
||||
filename.to_string(),
|
||||
"Must have a platform tag".to_string(),
|
||||
));
|
||||
};
|
||||
|
||||
let (name, version, python_tag, abi_tag, platform_tag) =
|
||||
if let Some(platform_tag) = parts.next() {
|
||||
if parts.next().is_some() {
|
||||
return Err(WheelFilenameError::InvalidWheelFileName(
|
||||
filename.to_string(),
|
||||
"Must have 5 or 6 components, but has more".to_string(),
|
||||
));
|
||||
}
|
||||
(
|
||||
name,
|
||||
version,
|
||||
python_tag_or_abi_tag,
|
||||
abi_tag_or_platform_tag,
|
||||
platform_tag,
|
||||
)
|
||||
} else {
|
||||
(
|
||||
name,
|
||||
version,
|
||||
build_tag_or_python_tag,
|
||||
python_tag_or_abi_tag,
|
||||
abi_tag_or_platform_tag,
|
||||
)
|
||||
};
|
||||
|
||||
let name = PackageName::from_str(name)
|
||||
.map_err(|err| WheelFilenameError::InvalidPackageName(filename.to_string(), err))?;
|
||||
let version = Version::from_str(version)
|
||||
.map_err(|err| WheelFilenameError::InvalidVersion(filename.to_string(), err))?;
|
||||
Ok(WheelFilename {
|
||||
name,
|
||||
version,
|
||||
python_tag: python_tag.split('.').map(String::from).collect(),
|
||||
abi_tag: abi_tag.split('.').map(String::from).collect(),
|
||||
platform_tag: platform_tag.split('.').map(String::from).collect(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&Url> for WheelFilename {
|
||||
type Error = WheelFilenameError;
|
||||
|
||||
fn try_from(url: &Url) -> Result<Self, Self::Error> {
|
||||
let filename = url
|
||||
.path_segments()
|
||||
.ok_or_else(|| {
|
||||
WheelFilenameError::InvalidWheelFileName(
|
||||
url.to_string(),
|
||||
"URL must have a path".to_string(),
|
||||
)
|
||||
})?
|
||||
.last()
|
||||
.ok_or_else(|| {
|
||||
WheelFilenameError::InvalidWheelFileName(
|
||||
url.to_string(),
|
||||
"URL must contain a filename".to_string(),
|
||||
)
|
||||
})?;
|
||||
Self::from_str(filename)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
impl<'de> Deserialize<'de> for WheelFilename {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
FromStr::from_str(&s).map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde")]
|
||||
impl Serialize for WheelFilename {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum WheelFilenameError {
|
||||
#[error("The wheel filename \"{0}\" is invalid: {1}")]
|
||||
InvalidWheelFileName(String, String),
|
||||
#[error("The wheel filename \"{0}\" has an invalid version part: {1}")]
|
||||
InvalidVersion(String, VersionParseError),
|
||||
#[error("The wheel filename \"{0}\" has an invalid package name")]
|
||||
InvalidPackageName(String, InvalidNameError),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn err_not_whl_extension() {
|
||||
let err = WheelFilename::from_str("foo.rs").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "foo.rs" is invalid: Must end with .whl"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_empty() {
|
||||
let err = WheelFilename::from_str(".whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename ".whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_1_part_no_version() {
|
||||
let err = WheelFilename::from_str("foo.whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "foo.whl" is invalid: Must have a version"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_2_part_no_pythontag() {
|
||||
let err = WheelFilename::from_str("foo-version.whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "foo-version.whl" is invalid: Must have a Python tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_3_part_no_abitag() {
|
||||
let err = WheelFilename::from_str("foo-version-python.whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "foo-version-python.whl" is invalid: Must have an ABI tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_4_part_no_platformtag() {
|
||||
let err = WheelFilename::from_str("foo-version-python-abi.whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "foo-version-python-abi.whl" is invalid: Must have a platform tag"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_too_many_parts() {
|
||||
let err =
|
||||
WheelFilename::from_str("foo-1.2.3-build-python-abi-platform-oops.whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "foo-1.2.3-build-python-abi-platform-oops.whl" is invalid: Must have 5 or 6 components, but has more"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_package_name() {
|
||||
let err = WheelFilename::from_str("f!oo-1.2.3-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "f!oo-1.2.3-python-abi-platform.whl" has an invalid package name"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn err_invalid_version() {
|
||||
let err = WheelFilename::from_str("foo-x.y.z-python-abi-platform.whl").unwrap_err();
|
||||
insta::assert_display_snapshot!(err, @r###"The wheel filename "foo-x.y.z-python-abi-platform.whl" has an invalid version part: expected version to start with a number, but no leading ASCII digits were found"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_single_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str("foo-1.2.3-foo-bar-baz.whl"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_multiple_tags() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-ab.cd.ef-gh-ij.kl.mn.op.qr.st.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ok_build_tag() {
|
||||
insta::assert_debug_snapshot!(WheelFilename::from_str(
|
||||
"foo-1.2.3-build-python-abi-platform.whl"
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn from_and_to_string() {
|
||||
let wheel_names = &[
|
||||
"django_allauth-0.51.0-py3-none-any.whl",
|
||||
"osm2geojson-0.2.4-py3-none-any.whl",
|
||||
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
|
||||
];
|
||||
for wheel_name in wheel_names {
|
||||
assert_eq!(
|
||||
WheelFilename::from_str(wheel_name).unwrap().to_string(),
|
||||
*wheel_name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
[package]
|
||||
name = "distribution-types"
|
||||
version = "0.0.1"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
cache-key = { path = "../cache-key" }
|
||||
distribution-filename = { path = "../distribution-filename", features = ["serde"] }
|
||||
pep440_rs = { path = "../pep440-rs" }
|
||||
pep508_rs = { path = "../pep508-rs" }
|
||||
platform-tags = { path = "../platform-tags" }
|
||||
uv-fs = { path = "../uv-fs" }
|
||||
uv-git = { path = "../uv-git", features = ["vendored-openssl"] }
|
||||
uv-normalize = { path = "../uv-normalize" }
|
||||
pypi-types = { path = "../pypi-types" }
|
||||
|
||||
anyhow = { workspace = true }
|
||||
data-encoding = { workspace = true }
|
||||
fs-err = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
rkyv = { workspace = true, features = ["strict", "validation"] }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
url = { workspace = true }
|
||||
urlencoding = { workspace = true }
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::cached::CachedDist;
|
||||
use crate::installed::InstalledDist;
|
||||
use crate::{InstalledMetadata, InstalledVersion, Name};
|
||||
|
||||
/// A distribution which is either installable, is a wheel in our cache or is already installed.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum LocalDist {
|
||||
Cached(CachedDist),
|
||||
Installed(InstalledDist),
|
||||
}
|
||||
|
||||
impl Name for LocalDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Cached(dist) => dist.name(),
|
||||
Self::Installed(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledMetadata for LocalDist {
|
||||
fn installed_version(&self) -> InstalledVersion {
|
||||
match self {
|
||||
Self::Cached(dist) => dist.installed_version(),
|
||||
Self::Installed(dist) => dist.installed_version(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CachedDist> for LocalDist {
|
||||
fn from(dist: CachedDist) -> Self {
|
||||
Self::Cached(dist)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InstalledDist> for LocalDist {
|
||||
fn from(dist: InstalledDist) -> Self {
|
||||
Self::Installed(dist)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,195 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use distribution_filename::WheelFilename;
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::direct_url::{DirectUrl, LocalFileUrl};
|
||||
use crate::{
|
||||
BuiltDist, Dist, DistributionMetadata, InstalledMetadata, InstalledVersion, Name, SourceDist,
|
||||
VersionOrUrl,
|
||||
};
|
||||
|
||||
/// A built distribution (wheel) that exists in the local cache.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CachedDist {
|
||||
/// The distribution exists in a registry, like `PyPI`.
|
||||
Registry(CachedRegistryDist),
|
||||
/// The distribution exists at an arbitrary URL.
|
||||
Url(CachedDirectUrlDist),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedRegistryDist {
|
||||
pub filename: WheelFilename,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedDirectUrlDist {
|
||||
pub filename: WheelFilename,
|
||||
pub url: VerbatimUrl,
|
||||
pub path: PathBuf,
|
||||
pub editable: bool,
|
||||
}
|
||||
|
||||
impl CachedDist {
|
||||
/// Initialize a [`CachedDist`] from a [`Dist`].
|
||||
pub fn from_remote(remote: Dist, filename: WheelFilename, path: PathBuf) -> Self {
|
||||
match remote {
|
||||
Dist::Built(BuiltDist::Registry(_dist)) => {
|
||||
Self::Registry(CachedRegistryDist { filename, path })
|
||||
}
|
||||
Dist::Built(BuiltDist::DirectUrl(dist)) => Self::Url(CachedDirectUrlDist {
|
||||
filename,
|
||||
url: dist.url,
|
||||
path,
|
||||
editable: false,
|
||||
}),
|
||||
Dist::Built(BuiltDist::Path(dist)) => Self::Url(CachedDirectUrlDist {
|
||||
filename,
|
||||
url: dist.url,
|
||||
path,
|
||||
editable: false,
|
||||
}),
|
||||
Dist::Source(SourceDist::Registry(_dist)) => {
|
||||
Self::Registry(CachedRegistryDist { filename, path })
|
||||
}
|
||||
Dist::Source(SourceDist::DirectUrl(dist)) => Self::Url(CachedDirectUrlDist {
|
||||
filename,
|
||||
url: dist.url,
|
||||
path,
|
||||
editable: false,
|
||||
}),
|
||||
Dist::Source(SourceDist::Git(dist)) => Self::Url(CachedDirectUrlDist {
|
||||
filename,
|
||||
url: dist.url,
|
||||
path,
|
||||
editable: false,
|
||||
}),
|
||||
Dist::Source(SourceDist::Path(dist)) => Self::Url(CachedDirectUrlDist {
|
||||
filename,
|
||||
url: dist.url,
|
||||
path,
|
||||
editable: dist.editable,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Path`] at which the distribution is stored on-disk.
|
||||
pub fn path(&self) -> &Path {
|
||||
match self {
|
||||
Self::Registry(dist) => &dist.path,
|
||||
Self::Url(dist) => &dist.path,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`DirectUrl`] of the distribution, if it exists.
|
||||
pub fn direct_url(&self) -> Result<Option<DirectUrl>> {
|
||||
match self {
|
||||
CachedDist::Registry(_) => Ok(None),
|
||||
CachedDist::Url(dist) => {
|
||||
if dist.editable {
|
||||
assert_eq!(dist.url.scheme(), "file", "{}", dist.url);
|
||||
Ok(Some(DirectUrl::LocalFile(LocalFileUrl {
|
||||
url: dist.url.raw().clone(),
|
||||
editable: dist.editable,
|
||||
})))
|
||||
} else {
|
||||
DirectUrl::try_from(dist.url.raw()).map(Some)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn editable(&self) -> bool {
|
||||
match self {
|
||||
CachedDist::Registry(_) => false,
|
||||
CachedDist::Url(dist) => dist.editable,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn filename(&self) -> &WheelFilename {
|
||||
match self {
|
||||
CachedDist::Registry(dist) => &dist.filename,
|
||||
CachedDist::Url(dist) => &dist.filename,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CachedDirectUrlDist {
|
||||
/// Initialize a [`CachedDirectUrlDist`] from a [`WheelFilename`], [`url::Url`], and [`Path`].
|
||||
pub fn from_url(filename: WheelFilename, url: VerbatimUrl, path: PathBuf) -> Self {
|
||||
Self {
|
||||
filename,
|
||||
url,
|
||||
path,
|
||||
editable: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for CachedRegistryDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for CachedDirectUrlDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for CachedDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.name(),
|
||||
Self::Url(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for CachedRegistryDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Version(&self.filename.version)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for CachedDirectUrlDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Url(&self.url)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for CachedDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.version_or_url(),
|
||||
Self::Url(dist) => dist.version_or_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledMetadata for CachedRegistryDist {
|
||||
fn installed_version(&self) -> InstalledVersion {
|
||||
InstalledVersion::Version(&self.filename.version)
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledMetadata for CachedDirectUrlDist {
|
||||
fn installed_version(&self) -> InstalledVersion {
|
||||
InstalledVersion::Url(&self.url, &self.filename.version)
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledMetadata for CachedDist {
|
||||
fn installed_version(&self) -> InstalledVersion {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.installed_version(),
|
||||
Self::Url(dist) => dist.installed_version(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,249 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{Context, Error, Result};
|
||||
use url::Url;
|
||||
|
||||
use uv_git::{GitSha, GitUrl};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DirectUrl {
|
||||
/// The direct URL is a path to a local directory or file.
|
||||
LocalFile(LocalFileUrl),
|
||||
/// The direct URL is path to a Git repository.
|
||||
Git(DirectGitUrl),
|
||||
/// The direct URL is a URL to an archive.
|
||||
Archive(DirectArchiveUrl),
|
||||
}
|
||||
|
||||
/// A local path url
|
||||
///
|
||||
/// Examples:
|
||||
/// * `file:///home/ferris/my_project`
|
||||
#[derive(Debug)]
|
||||
pub struct LocalFileUrl {
|
||||
pub url: Url,
|
||||
pub editable: bool,
|
||||
}
|
||||
|
||||
/// A git repository url
|
||||
///
|
||||
/// Examples:
|
||||
/// * `git+https://git.example.com/MyProject.git`
|
||||
/// * `git+https://git.example.com/MyProject.git@v1.0#egg=pkg&subdirectory=pkg_dir`
|
||||
#[derive(Debug)]
|
||||
pub struct DirectGitUrl {
|
||||
pub url: GitUrl,
|
||||
pub subdirectory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// An archive url
|
||||
///
|
||||
/// Examples:
|
||||
/// * wheel: `https://download.pytorch.org/whl/torch-2.0.1-cp39-cp39-manylinux2014_aarch64.whl#sha256=423e0ae257b756bb45a4b49072046772d1ad0c592265c5080070e0767da4e490`
|
||||
/// * source dist, correctly named: `https://files.pythonhosted.org/packages/62/06/d5604a70d160f6a6ca5fd2ba25597c24abd5c5ca5f437263d177ac242308/tqdm-4.66.1.tar.gz`
|
||||
/// * source dist, only extension recognizable: `https://github.com/foo-labs/foo/archive/master.zip#egg=pkg&subdirectory=packages/bar`
|
||||
#[derive(Debug)]
|
||||
pub struct DirectArchiveUrl {
|
||||
pub url: Url,
|
||||
pub subdirectory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl TryFrom<&Url> for DirectGitUrl {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(url: &Url) -> Result<Self, Self::Error> {
|
||||
let subdirectory = get_subdirectory(url);
|
||||
|
||||
let url = url
|
||||
.as_str()
|
||||
.strip_prefix("git+")
|
||||
.context("Missing git+ prefix for Git URL")?;
|
||||
let url = Url::parse(url)?;
|
||||
let url = GitUrl::try_from(url)?;
|
||||
Ok(Self { url, subdirectory })
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Url> for DirectArchiveUrl {
|
||||
fn from(url: &Url) -> Self {
|
||||
Self {
|
||||
url: url.clone(),
|
||||
subdirectory: get_subdirectory(url),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// If the URL points to a subdirectory, extract it, as in (git):
|
||||
/// `git+https://git.example.com/MyProject.git@v1.0#subdirectory=pkg_dir`
|
||||
/// `git+https://git.example.com/MyProject.git@v1.0#egg=pkg&subdirectory=pkg_dir`
|
||||
/// or (direct archive url):
|
||||
/// `https://github.com/foo-labs/foo/archive/master.zip#subdirectory=packages/bar`
|
||||
/// `https://github.com/foo-labs/foo/archive/master.zip#egg=pkg&subdirectory=packages/bar`
|
||||
fn get_subdirectory(url: &Url) -> Option<PathBuf> {
|
||||
let fragment = url.fragment()?;
|
||||
let subdirectory = fragment
|
||||
.split('&')
|
||||
.find_map(|fragment| fragment.strip_prefix("subdirectory="))?;
|
||||
Some(PathBuf::from(subdirectory))
|
||||
}
|
||||
|
||||
/// Return the Git reference of the given URL, if it exists.
|
||||
pub fn git_reference(url: &Url) -> Result<Option<GitSha>, Error> {
|
||||
let DirectGitUrl { url, .. } = DirectGitUrl::try_from(url)?;
|
||||
Ok(url.precise())
|
||||
}
|
||||
|
||||
impl TryFrom<&Url> for DirectUrl {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(url: &Url) -> Result<Self, Self::Error> {
|
||||
if let Some((prefix, ..)) = url.scheme().split_once('+') {
|
||||
match prefix {
|
||||
"git" => Ok(Self::Git(DirectGitUrl::try_from(url)?)),
|
||||
_ => Err(Error::msg(format!(
|
||||
"Unsupported URL prefix `{prefix}` in URL: {url}",
|
||||
))),
|
||||
}
|
||||
} else if url.scheme().eq_ignore_ascii_case("file") {
|
||||
Ok(Self::LocalFile(LocalFileUrl {
|
||||
url: url.clone(),
|
||||
editable: false,
|
||||
}))
|
||||
} else {
|
||||
Ok(Self::Archive(DirectArchiveUrl::from(url)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&DirectUrl> for pypi_types::DirectUrl {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(value: &DirectUrl) -> std::result::Result<Self, Self::Error> {
|
||||
match value {
|
||||
DirectUrl::LocalFile(value) => pypi_types::DirectUrl::try_from(value),
|
||||
DirectUrl::Git(value) => pypi_types::DirectUrl::try_from(value),
|
||||
DirectUrl::Archive(value) => pypi_types::DirectUrl::try_from(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&LocalFileUrl> for pypi_types::DirectUrl {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(value: &LocalFileUrl) -> Result<Self, Self::Error> {
|
||||
Ok(pypi_types::DirectUrl::LocalDirectory {
|
||||
url: value.url.clone(),
|
||||
dir_info: pypi_types::DirInfo {
|
||||
editable: value.editable.then_some(true),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&DirectArchiveUrl> for pypi_types::DirectUrl {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(value: &DirectArchiveUrl) -> Result<Self, Self::Error> {
|
||||
Ok(pypi_types::DirectUrl::ArchiveUrl {
|
||||
url: value.url.clone(),
|
||||
archive_info: pypi_types::ArchiveInfo {
|
||||
hash: None,
|
||||
hashes: None,
|
||||
},
|
||||
subdirectory: value.subdirectory.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&DirectGitUrl> for pypi_types::DirectUrl {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(value: &DirectGitUrl) -> Result<Self, Self::Error> {
|
||||
Ok(pypi_types::DirectUrl::VcsUrl {
|
||||
url: value.url.repository().clone(),
|
||||
vcs_info: pypi_types::VcsInfo {
|
||||
vcs: pypi_types::VcsKind::Git,
|
||||
commit_id: value.url.precise().as_ref().map(ToString::to_string),
|
||||
requested_revision: value.url.reference().map(ToString::to_string),
|
||||
},
|
||||
subdirectory: value.subdirectory.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DirectUrl> for Url {
|
||||
fn from(value: DirectUrl) -> Self {
|
||||
match value {
|
||||
DirectUrl::LocalFile(value) => value.into(),
|
||||
DirectUrl::Git(value) => value.into(),
|
||||
DirectUrl::Archive(value) => value.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LocalFileUrl> for Url {
|
||||
fn from(value: LocalFileUrl) -> Self {
|
||||
value.url
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DirectArchiveUrl> for Url {
|
||||
fn from(value: DirectArchiveUrl) -> Self {
|
||||
let mut url = value.url;
|
||||
if let Some(subdirectory) = value.subdirectory {
|
||||
url.set_fragment(Some(&format!("subdirectory={}", subdirectory.display())));
|
||||
}
|
||||
url
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DirectGitUrl> for Url {
|
||||
fn from(value: DirectGitUrl) -> Self {
|
||||
let mut url = Url::parse(&format!("{}{}", "git+", Url::from(value.url).as_str()))
|
||||
.expect("Git URL is invalid");
|
||||
if let Some(subdirectory) = value.subdirectory {
|
||||
url.set_fragment(Some(&format!("subdirectory={}", subdirectory.display())));
|
||||
}
|
||||
url
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use anyhow::Result;
|
||||
use url::Url;
|
||||
|
||||
use crate::direct_url::DirectUrl;
|
||||
|
||||
#[test]
|
||||
fn direct_url_from_url() -> Result<()> {
|
||||
let expected = Url::parse("file:///path/to/directory")?;
|
||||
let actual = Url::from(DirectUrl::try_from(&expected)?);
|
||||
assert_eq!(expected, actual);
|
||||
|
||||
let expected = Url::parse("git+https://github.com/pallets/flask.git")?;
|
||||
let actual = Url::from(DirectUrl::try_from(&expected)?);
|
||||
assert_eq!(expected, actual);
|
||||
|
||||
let expected = Url::parse("git+https://github.com/pallets/flask.git#subdirectory=pkg_dir")?;
|
||||
let actual = Url::from(DirectUrl::try_from(&expected)?);
|
||||
assert_eq!(expected, actual);
|
||||
|
||||
let expected = Url::parse("git+https://github.com/pallets/flask.git@2.0.0")?;
|
||||
let actual = Url::from(DirectUrl::try_from(&expected)?);
|
||||
assert_eq!(expected, actual);
|
||||
|
||||
let expected =
|
||||
Url::parse("git+https://github.com/pallets/flask.git@2.0.0#subdirectory=pkg_dir")?;
|
||||
let actual = Url::from(DirectUrl::try_from(&expected)?);
|
||||
assert_eq!(expected, actual);
|
||||
|
||||
// TODO(charlie): Preserve other fragments.
|
||||
let expected =
|
||||
Url::parse("git+https://github.com/pallets/flask.git#egg=flask&subdirectory=pkg_dir")?;
|
||||
let actual = Url::from(DirectUrl::try_from(&expected)?);
|
||||
assert_ne!(expected, actual);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,43 @@
|
|||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use url::Url;
|
||||
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use uv_normalize::ExtraName;
|
||||
|
||||
use crate::Verbatim;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LocalEditable {
|
||||
/// The underlying [`EditableRequirement`] from the `requirements.txt` file.
|
||||
pub url: VerbatimUrl,
|
||||
/// Either the path to the editable or its checkout.
|
||||
pub path: PathBuf,
|
||||
/// The extras that should be installed.
|
||||
pub extras: Vec<ExtraName>,
|
||||
}
|
||||
|
||||
impl LocalEditable {
|
||||
/// Return the editable as a [`Url`].
|
||||
pub fn url(&self) -> &VerbatimUrl {
|
||||
&self.url
|
||||
}
|
||||
|
||||
/// Return the resolved path to the editable.
|
||||
pub fn raw(&self) -> &Url {
|
||||
self.url.raw()
|
||||
}
|
||||
}
|
||||
|
||||
impl Verbatim for LocalEditable {
|
||||
fn verbatim(&self) -> Cow<'_, str> {
|
||||
self.url.verbatim()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LocalEditable {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
std::fmt::Display::fmt(&self.url, f)
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
use url::Url;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
Utf8(#[from] std::string::FromUtf8Error),
|
||||
|
||||
#[error(transparent)]
|
||||
WheelFilename(#[from] distribution_filename::WheelFilenameError),
|
||||
|
||||
#[error("Unable to extract filename from URL: {0}")]
|
||||
UrlFilename(Url),
|
||||
|
||||
#[error("Distribution not found at: {0}")]
|
||||
NotFound(Url),
|
||||
}
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
|
||||
use pep440_rs::{VersionSpecifiers, VersionSpecifiersParseError};
|
||||
use pypi_types::{DistInfoMetadata, Hashes, Yanked};
|
||||
|
||||
/// Error converting [`pypi_types::File`] to [`distribution_type::File`].
|
||||
#[derive(Debug, Error)]
|
||||
pub enum FileConversionError {
|
||||
#[error("Failed to parse 'requires-python': {0}")]
|
||||
RequiresPython(String, #[source] VersionSpecifiersParseError),
|
||||
#[error("Failed to parse URL: {0}")]
|
||||
Url(String, #[source] url::ParseError),
|
||||
}
|
||||
|
||||
/// Internal analog to [`pypi_types::File`].
|
||||
#[derive(
|
||||
Debug, Clone, Serialize, Deserialize, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize,
|
||||
)]
|
||||
#[archive(check_bytes)]
|
||||
#[archive_attr(derive(Debug))]
|
||||
pub struct File {
|
||||
pub dist_info_metadata: Option<DistInfoMetadata>,
|
||||
pub filename: String,
|
||||
pub hashes: Hashes,
|
||||
pub requires_python: Option<VersionSpecifiers>,
|
||||
pub size: Option<u64>,
|
||||
// N.B. We don't use a chrono DateTime<Utc> here because it's a little
|
||||
// annoying to do so with rkyv. Since we only use this field for doing
|
||||
// comparisons in testing, we just store it as a UTC timestamp in
|
||||
// milliseconds.
|
||||
pub upload_time_utc_ms: Option<i64>,
|
||||
pub url: FileLocation,
|
||||
pub yanked: Option<Yanked>,
|
||||
}
|
||||
|
||||
impl File {
|
||||
/// `TryFrom` instead of `From` to filter out files with invalid requires python version specifiers
|
||||
pub fn try_from(file: pypi_types::File, base: &str) -> Result<Self, FileConversionError> {
|
||||
Ok(Self {
|
||||
dist_info_metadata: file.dist_info_metadata,
|
||||
filename: file.filename,
|
||||
hashes: file.hashes,
|
||||
requires_python: file
|
||||
.requires_python
|
||||
.transpose()
|
||||
.map_err(|err| FileConversionError::RequiresPython(err.line().clone(), err))?,
|
||||
size: file.size,
|
||||
upload_time_utc_ms: file.upload_time.map(|dt| dt.timestamp_millis()),
|
||||
url: if file.url.contains("://") {
|
||||
FileLocation::AbsoluteUrl(file.url)
|
||||
} else {
|
||||
FileLocation::RelativeUrl(base.to_string(), file.url)
|
||||
},
|
||||
yanked: file.yanked,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// While a registry file is generally a remote URL, it can also be a file if it comes from a directory flat indexes.
|
||||
#[derive(
|
||||
Debug, Clone, Serialize, Deserialize, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize,
|
||||
)]
|
||||
#[archive(check_bytes)]
|
||||
#[archive_attr(derive(Debug))]
|
||||
pub enum FileLocation {
|
||||
/// URL relative to the base URL.
|
||||
RelativeUrl(String, String),
|
||||
/// Absolute URL.
|
||||
AbsoluteUrl(String),
|
||||
/// Absolute path to a file.
|
||||
Path(#[with(rkyv::with::AsString)] PathBuf),
|
||||
}
|
||||
|
||||
impl Display for FileLocation {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FileLocation::RelativeUrl(_base, url) => Display::fmt(&url, f),
|
||||
FileLocation::AbsoluteUrl(url) => Display::fmt(&url, f),
|
||||
FileLocation::Path(path) => Display::fmt(&path.display(), f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
|
||||
use url::Url;
|
||||
|
||||
use pep440_rs::Version;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
/// A unique identifier for a package (e.g., `black==23.10.0`).
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub enum PackageId {
|
||||
NameVersion(PackageName, Version),
|
||||
Url(String),
|
||||
}
|
||||
|
||||
impl PackageId {
|
||||
/// Create a new [`PackageId`] from a package name and version.
|
||||
pub fn from_registry(name: PackageName, version: Version) -> Self {
|
||||
Self::NameVersion(name, version)
|
||||
}
|
||||
|
||||
/// Create a new [`PackageId`] from a URL.
|
||||
pub fn from_url(url: &Url) -> Self {
|
||||
Self::Url(cache_key::digest(&cache_key::CanonicalUrl::new(url)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for PackageId {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
PackageId::NameVersion(name, version) => write!(f, "{name}-{version}"),
|
||||
PackageId::Url(url) => write!(f, "{url}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A unique identifier for a distribution (e.g., `black-23.10.0-py3-none-any.whl`).
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct DistributionId(String);
|
||||
|
||||
impl DistributionId {
|
||||
pub fn new(id: impl Into<String>) -> Self {
|
||||
Self(id.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionId {
|
||||
pub fn as_str(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// A unique identifier for a resource, like a URL or a Git repository.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct ResourceId(String);
|
||||
|
||||
impl ResourceId {
|
||||
pub fn new(id: impl Into<String>) -> Self {
|
||||
Self(id.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&PackageId> for PackageId {
|
||||
/// Required for `WaitMap::wait`.
|
||||
fn from(value: &PackageId) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&DistributionId> for DistributionId {
|
||||
/// Required for `WaitMap::wait`.
|
||||
fn from(value: &DistributionId) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ResourceId> for ResourceId {
|
||||
/// Required for `WaitMap::wait`.
|
||||
fn from(value: &ResourceId) -> Self {
|
||||
value.clone()
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,298 @@
|
|||
use std::fmt::{Display, Formatter};
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
use pep508_rs::split_scheme;
|
||||
use uv_fs::normalize_url_path;
|
||||
|
||||
static PYPI_URL: Lazy<Url> = Lazy::new(|| Url::parse("https://pypi.org/simple").unwrap());
|
||||
|
||||
/// The url of an index, newtype'd to avoid mixing it with file urls.
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub enum IndexUrl {
|
||||
Pypi,
|
||||
Url(Url),
|
||||
}
|
||||
|
||||
impl Display for IndexUrl {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
IndexUrl::Pypi => Display::fmt(&*PYPI_URL, f),
|
||||
IndexUrl::Url(url) => Display::fmt(url, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for IndexUrl {
|
||||
type Err = url::ParseError;
|
||||
|
||||
fn from_str(url: &str) -> Result<Self, Self::Err> {
|
||||
Ok(Self::from(Url::parse(url)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Url> for IndexUrl {
|
||||
fn from(url: Url) -> Self {
|
||||
if url == *PYPI_URL {
|
||||
Self::Pypi
|
||||
} else {
|
||||
Self::Url(url)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IndexUrl> for Url {
|
||||
fn from(index: IndexUrl) -> Self {
|
||||
match index {
|
||||
IndexUrl::Pypi => PYPI_URL.clone(),
|
||||
IndexUrl::Url(url) => url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for IndexUrl {
|
||||
type Target = Url;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match &self {
|
||||
IndexUrl::Pypi => &PYPI_URL,
|
||||
IndexUrl::Url(url) => url,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A directory with distributions or a URL to an HTML file with a flat listing of distributions.
|
||||
///
|
||||
/// Also known as `--find-links`.
|
||||
#[derive(Debug, Clone, Hash, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub enum FlatIndexLocation {
|
||||
Path(PathBuf),
|
||||
Url(Url),
|
||||
}
|
||||
|
||||
impl FromStr for FlatIndexLocation {
|
||||
type Err = url::ParseError;
|
||||
|
||||
/// Parse a raw string for a `--find-links` entry, which could be a URL or a local path.
|
||||
///
|
||||
/// For example:
|
||||
/// - `file:///home/ferris/project/scripts/...`
|
||||
/// - `file:../ferris/`
|
||||
/// - `../ferris/`
|
||||
/// - `https://download.pytorch.org/whl/torch_stable.html`
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if let Some((scheme, path)) = split_scheme(s) {
|
||||
if scheme == "file" {
|
||||
// Ex) `file:///home/ferris/project/scripts/...` or `file:../ferris/`
|
||||
let path = path.strip_prefix("//").unwrap_or(path);
|
||||
|
||||
// Transform, e.g., `/C:/Users/ferris/wheel-0.42.0.tar.gz` to `C:\Users\ferris\wheel-0.42.0.tar.gz`.
|
||||
let path = normalize_url_path(path);
|
||||
|
||||
let path = PathBuf::from(path.as_ref());
|
||||
Ok(Self::Path(path))
|
||||
} else {
|
||||
// Ex) `https://download.pytorch.org/whl/torch_stable.html`
|
||||
let url = Url::parse(s)?;
|
||||
Ok(Self::Url(url))
|
||||
}
|
||||
} else {
|
||||
// Ex) `../ferris/`
|
||||
let path = PathBuf::from(s);
|
||||
Ok(Self::Path(path))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FlatIndexLocation {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FlatIndexLocation::Path(path) => Display::fmt(&path.display(), f),
|
||||
FlatIndexLocation::Url(url) => Display::fmt(url, f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The index locations to use for fetching packages.
|
||||
///
|
||||
/// "pip treats all package sources equally" (<https://github.com/pypa/pip/issues/8606#issuecomment-788754817>),
|
||||
/// and so do we, i.e., you can't rely that on any particular order of querying indices.
|
||||
///
|
||||
/// If the fields are none and empty, ignore the package index, instead rely on local archives and
|
||||
/// caches.
|
||||
///
|
||||
/// From a pip perspective, this type merges `--index-url`, `--extra-index-url`, and `--find-links`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndexLocations {
|
||||
index: Option<IndexUrl>,
|
||||
extra_index: Vec<IndexUrl>,
|
||||
flat_index: Vec<FlatIndexLocation>,
|
||||
}
|
||||
|
||||
impl Default for IndexLocations {
|
||||
/// By default, use the `PyPI` index.
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
index: Some(IndexUrl::Pypi),
|
||||
extra_index: Vec::new(),
|
||||
flat_index: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexLocations {
|
||||
/// Determine the index URLs to use for fetching packages.
|
||||
pub fn from_args(
|
||||
index: IndexUrl,
|
||||
extra_index: Vec<IndexUrl>,
|
||||
flat_index: Vec<FlatIndexLocation>,
|
||||
no_index: bool,
|
||||
) -> Self {
|
||||
if no_index {
|
||||
Self {
|
||||
index: None,
|
||||
extra_index: Vec::new(),
|
||||
flat_index,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
index: Some(index),
|
||||
extra_index,
|
||||
flat_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Combine a set of index locations.
|
||||
///
|
||||
/// If either the current or the other index locations have `no_index` set, the result will
|
||||
/// have `no_index` set.
|
||||
///
|
||||
/// If the current index location has an `index` set, it will be preserved.
|
||||
#[must_use]
|
||||
pub fn combine(
|
||||
self,
|
||||
index: Option<IndexUrl>,
|
||||
extra_index: Vec<IndexUrl>,
|
||||
flat_index: Vec<FlatIndexLocation>,
|
||||
no_index: bool,
|
||||
) -> Self {
|
||||
if no_index {
|
||||
Self {
|
||||
index: None,
|
||||
extra_index: Vec::new(),
|
||||
flat_index,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
index: self.index.or(index),
|
||||
extra_index: self.extra_index.into_iter().chain(extra_index).collect(),
|
||||
flat_index: self.flat_index.into_iter().chain(flat_index).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IndexLocations {
|
||||
/// Return an iterator over all [`IndexUrl`] entries.
|
||||
pub fn indexes(&'a self) -> impl Iterator<Item = &'a IndexUrl> + 'a {
|
||||
self.index.iter().chain(self.extra_index.iter())
|
||||
}
|
||||
|
||||
/// Return the primary [`IndexUrl`] entry.
|
||||
pub fn index(&'a self) -> Option<&'a IndexUrl> {
|
||||
self.index.as_ref()
|
||||
}
|
||||
|
||||
/// Return an iterator over the extra [`IndexUrl`] entries.
|
||||
pub fn extra_index(&'a self) -> impl Iterator<Item = &'a IndexUrl> + 'a {
|
||||
self.extra_index.iter()
|
||||
}
|
||||
|
||||
/// Return an iterator over the [`FlatIndexLocation`] entries.
|
||||
pub fn flat_index(&'a self) -> impl Iterator<Item = &'a FlatIndexLocation> + 'a {
|
||||
self.flat_index.iter()
|
||||
}
|
||||
|
||||
/// Clone the index locations into a [`IndexUrls`] instance.
|
||||
pub fn index_urls(&'a self) -> IndexUrls {
|
||||
IndexUrls {
|
||||
index: self.index.clone(),
|
||||
extra_index: self.extra_index.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The index URLs to use for fetching packages.
|
||||
///
|
||||
/// From a pip perspective, this type merges `--index-url` and `--extra-index-url`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct IndexUrls {
|
||||
index: Option<IndexUrl>,
|
||||
extra_index: Vec<IndexUrl>,
|
||||
}
|
||||
|
||||
impl Default for IndexUrls {
|
||||
/// By default, use the `PyPI` index.
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
index: Some(IndexUrl::Pypi),
|
||||
extra_index: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IndexUrls {
|
||||
/// Return an iterator over the [`IndexUrl`] entries.
|
||||
pub fn indexes(&'a self) -> impl Iterator<Item = &'a IndexUrl> + 'a {
|
||||
self.index.iter().chain(self.extra_index.iter())
|
||||
}
|
||||
|
||||
/// Return `true` if no index is configured.
|
||||
pub fn no_index(&self) -> bool {
|
||||
self.index.is_none() && self.extra_index.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IndexLocations> for IndexUrls {
|
||||
fn from(locations: IndexLocations) -> Self {
|
||||
Self {
|
||||
index: locations.index,
|
||||
extra_index: locations.extra_index,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(unix)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_find_links() {
|
||||
assert_eq!(
|
||||
FlatIndexLocation::from_str("file:///home/ferris/project/scripts/...").unwrap(),
|
||||
FlatIndexLocation::Path(PathBuf::from("/home/ferris/project/scripts/..."))
|
||||
);
|
||||
assert_eq!(
|
||||
FlatIndexLocation::from_str("file:../ferris/").unwrap(),
|
||||
FlatIndexLocation::Path(PathBuf::from("../ferris/"))
|
||||
);
|
||||
assert_eq!(
|
||||
FlatIndexLocation::from_str("../ferris/").unwrap(),
|
||||
FlatIndexLocation::Path(PathBuf::from("../ferris/"))
|
||||
);
|
||||
assert_eq!(
|
||||
FlatIndexLocation::from_str("https://download.pytorch.org/whl/torch_stable.html")
|
||||
.unwrap(),
|
||||
FlatIndexLocation::Url(
|
||||
Url::parse("https://download.pytorch.org/whl/torch_stable.html").unwrap()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,163 @@
|
|||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use fs_err as fs;
|
||||
use url::Url;
|
||||
|
||||
use pep440_rs::Version;
|
||||
use uv_fs::Normalized;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{InstalledMetadata, InstalledVersion, Name};
|
||||
|
||||
/// A built distribution (wheel) that is installed in a virtual environment.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum InstalledDist {
|
||||
/// The distribution was derived from a registry, like `PyPI`.
|
||||
Registry(InstalledRegistryDist),
|
||||
/// The distribution was derived from an arbitrary URL.
|
||||
Url(InstalledDirectUrlDist),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct InstalledRegistryDist {
|
||||
pub name: PackageName,
|
||||
pub version: Version,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct InstalledDirectUrlDist {
|
||||
pub name: PackageName,
|
||||
pub version: Version,
|
||||
pub url: Url,
|
||||
pub editable: bool,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl InstalledDist {
|
||||
/// Try to parse a distribution from a `.dist-info` directory name (like `django-5.0a1.dist-info`).
|
||||
///
|
||||
/// See: <https://packaging.python.org/en/latest/specifications/recording-installed-packages/#recording-installed-packages>
|
||||
pub fn try_from_path(path: &Path) -> Result<Option<Self>> {
|
||||
if path.extension().is_some_and(|ext| ext == "dist-info") {
|
||||
let Some(file_stem) = path.file_stem() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some(file_stem) = file_stem.to_str() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let Some((name, version)) = file_stem.split_once('-') else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let name = PackageName::from_str(name)?;
|
||||
let version = Version::from_str(version).map_err(|err| anyhow!(err))?;
|
||||
return if let Some(direct_url) = Self::direct_url(path)? {
|
||||
Ok(Some(Self::Url(InstalledDirectUrlDist {
|
||||
name,
|
||||
version,
|
||||
editable: matches!(&direct_url, pypi_types::DirectUrl::LocalDirectory { dir_info, .. } if dir_info.editable == Some(true)),
|
||||
url: Url::from(direct_url),
|
||||
path: path.to_path_buf(),
|
||||
})))
|
||||
} else {
|
||||
Ok(Some(Self::Registry(InstalledRegistryDist {
|
||||
name,
|
||||
version,
|
||||
path: path.to_path_buf(),
|
||||
})))
|
||||
};
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Return the [`Path`] at which the distribution is stored on-disk.
|
||||
pub fn path(&self) -> &Path {
|
||||
match self {
|
||||
Self::Registry(dist) => &dist.path,
|
||||
Self::Url(dist) => &dist.path,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Version`] of the distribution.
|
||||
pub fn version(&self) -> &Version {
|
||||
match self {
|
||||
Self::Registry(dist) => &dist.version,
|
||||
Self::Url(dist) => &dist.version,
|
||||
}
|
||||
}
|
||||
|
||||
/// Read the `direct_url.json` file from a `.dist-info` directory.
|
||||
fn direct_url(path: &Path) -> Result<Option<pypi_types::DirectUrl>> {
|
||||
let path = path.join("direct_url.json");
|
||||
let Ok(file) = fs_err::File::open(path) else {
|
||||
return Ok(None);
|
||||
};
|
||||
let direct_url = serde_json::from_reader::<fs_err::File, pypi_types::DirectUrl>(file)?;
|
||||
Ok(Some(direct_url))
|
||||
}
|
||||
|
||||
/// Read the `METADATA` file from a `.dist-info` directory.
|
||||
pub fn metadata(&self) -> Result<pypi_types::Metadata21> {
|
||||
let path = self.path().join("METADATA");
|
||||
let contents = fs::read(&path)?;
|
||||
pypi_types::Metadata21::parse(&contents).with_context(|| {
|
||||
format!(
|
||||
"Failed to parse METADATA file at: {}",
|
||||
path.normalized_display()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/// Return the [`Url`] of the distribution, if it is editable.
|
||||
pub fn as_editable(&self) -> Option<&Url> {
|
||||
match self {
|
||||
Self::Registry(_) => None,
|
||||
Self::Url(dist) => dist.editable.then_some(&dist.url),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for InstalledRegistryDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for InstalledDirectUrlDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for InstalledDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.name(),
|
||||
Self::Url(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledMetadata for InstalledRegistryDist {
|
||||
fn installed_version(&self) -> InstalledVersion {
|
||||
InstalledVersion::Version(&self.version)
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledMetadata for InstalledDirectUrlDist {
|
||||
fn installed_version(&self) -> InstalledVersion {
|
||||
InstalledVersion::Url(&self.url, &self.version)
|
||||
}
|
||||
}
|
||||
|
||||
impl InstalledMetadata for InstalledDist {
|
||||
fn installed_version(&self) -> InstalledVersion {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.installed_version(),
|
||||
Self::Url(dist) => dist.installed_version(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,914 @@
|
|||
//! ## Type hierarchy
|
||||
//!
|
||||
//! When we receive the requirements from `pip sync`, we check which requirements already fulfilled
|
||||
//! in the users environment ([`InstalledDist`]), whether the matching package is in our wheel cache
|
||||
//! ([`CachedDist`]) or whether we need to download, (potentially build) and install it ([`Dist`]).
|
||||
//! These three variants make up [`BuiltDist`].
|
||||
//!
|
||||
//! ## `Dist`
|
||||
//! A [`Dist`] is either a built distribution (a wheel), or a source distribution that exists at
|
||||
//! some location. We translate every PEP 508 requirement e.g. from `requirements.txt` or from
|
||||
//! `pyproject.toml`'s `[project] dependencies` into a [`Dist`] by checking each index.
|
||||
//! * [`BuiltDist`]: A wheel, with its three possible origins:
|
||||
//! * [`RegistryBuiltDist`]
|
||||
//! * [`DirectUrlBuiltDist`]
|
||||
//! * [`PathBuiltDist`]
|
||||
//! * [`SourceDist`]: A source distribution, with its four possible origins:
|
||||
//! * [`RegistrySourceDist`]
|
||||
//! * [`DirectUrlSourceDist`]
|
||||
//! * [`GitSourceDist`]
|
||||
//! * [`PathSourceDist`]
|
||||
//!
|
||||
//! ## `CachedDist`
|
||||
//! A [`CachedDist`] is a built distribution (wheel) that exists in the local cache, with the two
|
||||
//! possible origins we currently track:
|
||||
//! * [`CachedRegistryDist`]
|
||||
//! * [`CachedDirectUrlDist`]
|
||||
//!
|
||||
//! ## `InstalledDist`
|
||||
//! An [`InstalledDist`] is built distribution (wheel) that is installed in a virtual environment,
|
||||
//! with the two possible origins we currently track:
|
||||
//! * [`InstalledRegistryDist`]
|
||||
//! * [`InstalledDirectUrlDist`]
|
||||
//!
|
||||
//! Since we read this information from [`direct_url.json`](https://packaging.python.org/en/latest/specifications/direct-url-data-structure/), it doesn't match the information [`Dist`] exactly.
|
||||
use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use url::Url;
|
||||
|
||||
use distribution_filename::{DistFilename, SourceDistFilename, WheelFilename};
|
||||
use pep440_rs::Version;
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
pub use crate::any::*;
|
||||
pub use crate::cached::*;
|
||||
pub use crate::direct_url::*;
|
||||
pub use crate::editable::*;
|
||||
pub use crate::error::*;
|
||||
pub use crate::file::*;
|
||||
pub use crate::id::*;
|
||||
pub use crate::index_url::*;
|
||||
pub use crate::installed::*;
|
||||
pub use crate::prioritized_distribution::*;
|
||||
pub use crate::resolution::*;
|
||||
pub use crate::traits::*;
|
||||
|
||||
mod any;
|
||||
mod cached;
|
||||
mod direct_url;
|
||||
mod editable;
|
||||
mod error;
|
||||
mod file;
|
||||
mod id;
|
||||
mod index_url;
|
||||
mod installed;
|
||||
mod prioritized_distribution;
|
||||
mod resolution;
|
||||
mod traits;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum VersionOrUrl<'a> {
|
||||
/// A PEP 440 version specifier, used to identify a distribution in a registry.
|
||||
Version(&'a Version),
|
||||
/// A URL, used to identify a distribution at an arbitrary location.
|
||||
Url(&'a VerbatimUrl),
|
||||
}
|
||||
|
||||
impl Verbatim for VersionOrUrl<'_> {
|
||||
fn verbatim(&self) -> Cow<'_, str> {
|
||||
match self {
|
||||
VersionOrUrl::Version(version) => Cow::Owned(format!("=={version}")),
|
||||
VersionOrUrl::Url(url) => Cow::Owned(format!(" @ {}", url.verbatim())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for VersionOrUrl<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
VersionOrUrl::Version(version) => write!(f, "=={version}"),
|
||||
VersionOrUrl::Url(url) => write!(f, " @ {url}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum InstalledVersion<'a> {
|
||||
/// A PEP 440 version specifier, used to identify a distribution in a registry.
|
||||
Version(&'a Version),
|
||||
/// A URL, used to identify a distribution at an arbitrary location, along with the version
|
||||
/// specifier to which it resolved.
|
||||
Url(&'a Url, &'a Version),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for InstalledVersion<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
InstalledVersion::Version(version) => write!(f, "=={version}"),
|
||||
InstalledVersion::Url(url, version) => write!(f, "=={version} (from {url})"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Either a built distribution, a wheel, or a source distribution that exists at some location
|
||||
///
|
||||
/// The location can be index, url or path (wheel) or index, url, path or git (source distribution)
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Dist {
|
||||
Built(BuiltDist),
|
||||
Source(SourceDist),
|
||||
}
|
||||
|
||||
/// A wheel, with its three possible origins (index, url, path)
|
||||
#[derive(Debug, Clone)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum BuiltDist {
|
||||
Registry(RegistryBuiltDist),
|
||||
DirectUrl(DirectUrlBuiltDist),
|
||||
Path(PathBuiltDist),
|
||||
}
|
||||
|
||||
/// A source distribution, with its possible origins (index, url, path, git)
|
||||
#[derive(Debug, Clone)]
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum SourceDist {
|
||||
Registry(RegistrySourceDist),
|
||||
DirectUrl(DirectUrlSourceDist),
|
||||
Git(GitSourceDist),
|
||||
Path(PathSourceDist),
|
||||
}
|
||||
|
||||
/// A built distribution (wheel) that exists in a registry, like `PyPI`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RegistryBuiltDist {
|
||||
pub filename: WheelFilename,
|
||||
pub file: Box<File>,
|
||||
pub index: IndexUrl,
|
||||
}
|
||||
|
||||
/// A built distribution (wheel) that exists at an arbitrary URL.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DirectUrlBuiltDist {
|
||||
/// We require that wheel urls end in the full wheel filename, e.g.
|
||||
/// `https://example.org/packages/flask-3.0.0-py3-none-any.whl`
|
||||
pub filename: WheelFilename,
|
||||
pub url: VerbatimUrl,
|
||||
}
|
||||
|
||||
/// A built distribution (wheel) that exists in a local directory.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PathBuiltDist {
|
||||
pub filename: WheelFilename,
|
||||
pub url: VerbatimUrl,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
/// A source distribution that exists in a registry, like `PyPI`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RegistrySourceDist {
|
||||
pub filename: SourceDistFilename,
|
||||
pub file: Box<File>,
|
||||
pub index: IndexUrl,
|
||||
}
|
||||
|
||||
/// A source distribution that exists at an arbitrary URL.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DirectUrlSourceDist {
|
||||
/// Unlike [`DirectUrlBuiltDist`], we can't require a full filename with a version here, people
|
||||
/// like using e.g. `foo @ https://github.com/org/repo/archive/master.zip`
|
||||
pub name: PackageName,
|
||||
pub url: VerbatimUrl,
|
||||
}
|
||||
|
||||
/// A source distribution that exists in a Git repository.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GitSourceDist {
|
||||
pub name: PackageName,
|
||||
pub url: VerbatimUrl,
|
||||
}
|
||||
|
||||
/// A source distribution that exists in a local directory.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PathSourceDist {
|
||||
pub name: PackageName,
|
||||
pub url: VerbatimUrl,
|
||||
pub path: PathBuf,
|
||||
pub editable: bool,
|
||||
}
|
||||
|
||||
impl Dist {
|
||||
/// Create a [`Dist`] for a registry-based distribution.
|
||||
pub fn from_registry(filename: DistFilename, file: File, index: IndexUrl) -> Self {
|
||||
match filename {
|
||||
DistFilename::WheelFilename(filename) => {
|
||||
Self::Built(BuiltDist::Registry(RegistryBuiltDist {
|
||||
filename,
|
||||
file: Box::new(file),
|
||||
index,
|
||||
}))
|
||||
}
|
||||
DistFilename::SourceDistFilename(filename) => {
|
||||
Self::Source(SourceDist::Registry(RegistrySourceDist {
|
||||
filename,
|
||||
file: Box::new(file),
|
||||
index,
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a [`Dist`] for a URL-based distribution.
|
||||
pub fn from_url(name: PackageName, url: VerbatimUrl) -> Result<Self, Error> {
|
||||
if url.scheme().starts_with("git+") {
|
||||
return Ok(Self::Source(SourceDist::Git(GitSourceDist { name, url })));
|
||||
}
|
||||
|
||||
if url.scheme().eq_ignore_ascii_case("file") {
|
||||
// Store the canonicalized path, which also serves to validate that it exists.
|
||||
let path = match url
|
||||
.to_file_path()
|
||||
.map_err(|()| Error::UrlFilename(url.to_url()))?
|
||||
.canonicalize()
|
||||
{
|
||||
Ok(path) => path,
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Err(Error::NotFound(url.to_url()));
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
};
|
||||
|
||||
return if path
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("whl"))
|
||||
{
|
||||
Ok(Self::Built(BuiltDist::Path(PathBuiltDist {
|
||||
filename: WheelFilename::from_str(&url.filename()?)?,
|
||||
url,
|
||||
path,
|
||||
})))
|
||||
} else {
|
||||
Ok(Self::Source(SourceDist::Path(PathSourceDist {
|
||||
name,
|
||||
url,
|
||||
path,
|
||||
editable: false,
|
||||
})))
|
||||
};
|
||||
}
|
||||
|
||||
if Path::new(url.path())
|
||||
.extension()
|
||||
.is_some_and(|ext| ext.eq_ignore_ascii_case("whl"))
|
||||
{
|
||||
Ok(Self::Built(BuiltDist::DirectUrl(DirectUrlBuiltDist {
|
||||
filename: WheelFilename::from_str(&url.filename()?)?,
|
||||
url,
|
||||
})))
|
||||
} else {
|
||||
Ok(Self::Source(SourceDist::DirectUrl(DirectUrlSourceDist {
|
||||
name,
|
||||
url,
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a [`Dist`] for a local editable distribution.
|
||||
pub fn from_editable(name: PackageName, editable: LocalEditable) -> Result<Self, Error> {
|
||||
let LocalEditable { url, path, .. } = editable;
|
||||
Ok(Self::Source(SourceDist::Path(PathSourceDist {
|
||||
name,
|
||||
url,
|
||||
path,
|
||||
editable: true,
|
||||
})))
|
||||
}
|
||||
|
||||
/// Returns the [`File`] instance, if this dist is from a registry with simple json api support
|
||||
pub fn file(&self) -> Option<&File> {
|
||||
match self {
|
||||
Dist::Built(built) => built.file(),
|
||||
Dist::Source(source) => source.file(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> Option<&Version> {
|
||||
match self {
|
||||
Dist::Built(wheel) => Some(wheel.version()),
|
||||
Dist::Source(source_dist) => source_dist.version(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BuiltDist {
|
||||
/// Returns the [`File`] instance, if this dist is from a registry with simple json api support
|
||||
pub fn file(&self) -> Option<&File> {
|
||||
match self {
|
||||
BuiltDist::Registry(registry) => Some(®istry.file),
|
||||
BuiltDist::DirectUrl(_) | BuiltDist::Path(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> &Version {
|
||||
match self {
|
||||
BuiltDist::Registry(wheel) => &wheel.filename.version,
|
||||
BuiltDist::DirectUrl(wheel) => &wheel.filename.version,
|
||||
BuiltDist::Path(wheel) => &wheel.filename.version,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceDist {
|
||||
/// Returns the [`File`] instance, if this dist is from a registry with simple json api support
|
||||
pub fn file(&self) -> Option<&File> {
|
||||
match self {
|
||||
SourceDist::Registry(registry) => Some(®istry.file),
|
||||
SourceDist::DirectUrl(_) | SourceDist::Git(_) | SourceDist::Path(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version(&self) -> Option<&Version> {
|
||||
match self {
|
||||
SourceDist::Registry(source_dist) => Some(&source_dist.filename.version),
|
||||
SourceDist::DirectUrl(_) | SourceDist::Git(_) | SourceDist::Path(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn with_url(self, url: Url) -> Self {
|
||||
match self {
|
||||
SourceDist::DirectUrl(dist) => SourceDist::DirectUrl(DirectUrlSourceDist {
|
||||
url: VerbatimUrl::unknown(url),
|
||||
..dist
|
||||
}),
|
||||
SourceDist::Git(dist) => SourceDist::Git(GitSourceDist {
|
||||
url: VerbatimUrl::unknown(url),
|
||||
..dist
|
||||
}),
|
||||
dist => dist,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for RegistryBuiltDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for DirectUrlBuiltDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for PathBuiltDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for RegistrySourceDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.filename.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for DirectUrlSourceDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for GitSourceDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for PathSourceDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for SourceDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.name(),
|
||||
Self::DirectUrl(dist) => dist.name(),
|
||||
Self::Git(dist) => dist.name(),
|
||||
Self::Path(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for BuiltDist {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.name(),
|
||||
Self::DirectUrl(dist) => dist.name(),
|
||||
Self::Path(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Name for Dist {
|
||||
fn name(&self) -> &PackageName {
|
||||
match self {
|
||||
Self::Built(dist) => dist.name(),
|
||||
Self::Source(dist) => dist.name(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for RegistryBuiltDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Version(&self.filename.version)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for DirectUrlBuiltDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Url(&self.url)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for PathBuiltDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Url(&self.url)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for RegistrySourceDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Version(&self.filename.version)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for DirectUrlSourceDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Url(&self.url)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for GitSourceDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Url(&self.url)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for PathSourceDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
VersionOrUrl::Url(&self.url)
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for SourceDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.version_or_url(),
|
||||
Self::DirectUrl(dist) => dist.version_or_url(),
|
||||
Self::Git(dist) => dist.version_or_url(),
|
||||
Self::Path(dist) => dist.version_or_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for BuiltDist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.version_or_url(),
|
||||
Self::DirectUrl(dist) => dist.version_or_url(),
|
||||
Self::Path(dist) => dist.version_or_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DistributionMetadata for Dist {
|
||||
fn version_or_url(&self) -> VersionOrUrl {
|
||||
match self {
|
||||
Self::Built(dist) => dist.version_or_url(),
|
||||
Self::Source(dist) => dist.version_or_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for File {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
Ok(Cow::Borrowed(&self.filename))
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.size
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for Url {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
// Identify the last segment of the URL as the filename.
|
||||
let filename = self
|
||||
.path_segments()
|
||||
.and_then(Iterator::last)
|
||||
.ok_or_else(|| Error::UrlFilename(self.clone()))?;
|
||||
|
||||
// Decode the filename, which may be percent-encoded.
|
||||
let filename = urlencoding::decode(filename)?;
|
||||
|
||||
Ok(filename)
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for RegistryBuiltDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
self.file.filename()
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.file.size()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for RegistrySourceDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
self.file.filename()
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.file.size()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for DirectUrlBuiltDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
self.url.filename()
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.url.size()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for DirectUrlSourceDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
self.url.filename()
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.url.size()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for GitSourceDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
// The filename is the last segment of the URL, before any `@`.
|
||||
match self.url.filename()? {
|
||||
Cow::Borrowed(filename) => {
|
||||
if let Some((_, filename)) = filename.rsplit_once('@') {
|
||||
Ok(Cow::Borrowed(filename))
|
||||
} else {
|
||||
Ok(Cow::Borrowed(filename))
|
||||
}
|
||||
}
|
||||
Cow::Owned(filename) => {
|
||||
if let Some((_, filename)) = filename.rsplit_once('@') {
|
||||
Ok(Cow::Owned(filename.to_owned()))
|
||||
} else {
|
||||
Ok(Cow::Owned(filename))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.url.size()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for PathBuiltDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
self.url.filename()
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.url.size()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for PathSourceDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
self.url.filename()
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
self.url.size()
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for SourceDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.filename(),
|
||||
Self::DirectUrl(dist) => dist.filename(),
|
||||
Self::Git(dist) => dist.filename(),
|
||||
Self::Path(dist) => dist.filename(),
|
||||
}
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.size(),
|
||||
Self::DirectUrl(dist) => dist.size(),
|
||||
Self::Git(dist) => dist.size(),
|
||||
Self::Path(dist) => dist.size(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for BuiltDist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.filename(),
|
||||
Self::DirectUrl(dist) => dist.filename(),
|
||||
Self::Path(dist) => dist.filename(),
|
||||
}
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.size(),
|
||||
Self::DirectUrl(dist) => dist.size(),
|
||||
Self::Path(dist) => dist.size(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RemoteSource for Dist {
|
||||
fn filename(&self) -> Result<Cow<'_, str>, Error> {
|
||||
match self {
|
||||
Self::Built(dist) => dist.filename(),
|
||||
Self::Source(dist) => dist.filename(),
|
||||
}
|
||||
}
|
||||
|
||||
fn size(&self) -> Option<u64> {
|
||||
match self {
|
||||
Self::Built(dist) => dist.size(),
|
||||
Self::Source(dist) => dist.size(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for Url {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
DistributionId::new(cache_key::digest(&cache_key::CanonicalUrl::new(self)))
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
ResourceId::new(cache_key::digest(&cache_key::RepositoryUrl::new(self)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for File {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
if let Some(hash) = self.hashes.as_str() {
|
||||
DistributionId::new(hash)
|
||||
} else {
|
||||
self.url.distribution_id()
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
if let Some(hash) = self.hashes.as_str() {
|
||||
ResourceId::new(hash)
|
||||
} else {
|
||||
self.url.resource_id()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for Path {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
DistributionId::new(cache_key::digest(&self))
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
ResourceId::new(cache_key::digest(&self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for String {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
DistributionId::new(cache_key::digest(&self))
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
ResourceId::new(cache_key::digest(&self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for &str {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
DistributionId::new(cache_key::digest(&self))
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
ResourceId::new(cache_key::digest(&self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for (&str, &str) {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
DistributionId::new(cache_key::digest(&self))
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
ResourceId::new(cache_key::digest(&self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for (&Url, &str) {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
DistributionId::new(cache_key::digest(&self))
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
ResourceId::new(cache_key::digest(&self))
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for FileLocation {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
FileLocation::RelativeUrl(base, url) => (base.as_str(), url.as_str()).distribution_id(),
|
||||
FileLocation::AbsoluteUrl(url) => url.distribution_id(),
|
||||
FileLocation::Path(path) => path.distribution_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
match self {
|
||||
FileLocation::RelativeUrl(base, url) => (base.as_str(), url.as_str()).resource_id(),
|
||||
FileLocation::AbsoluteUrl(url) => url.resource_id(),
|
||||
FileLocation::Path(path) => path.resource_id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for RegistryBuiltDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.file.distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.file.resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for RegistrySourceDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.file.distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.file.resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for DirectUrlBuiltDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.url.distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.url.resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for DirectUrlSourceDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.url.distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.url.resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for PathBuiltDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.url.distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.url.resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for PathSourceDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.url.distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.url.resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for GitSourceDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
self.url.distribution_id()
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
self.url.resource_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for SourceDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.distribution_id(),
|
||||
Self::DirectUrl(dist) => dist.distribution_id(),
|
||||
Self::Git(dist) => dist.distribution_id(),
|
||||
Self::Path(dist) => dist.distribution_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.resource_id(),
|
||||
Self::DirectUrl(dist) => dist.resource_id(),
|
||||
Self::Git(dist) => dist.resource_id(),
|
||||
Self::Path(dist) => dist.resource_id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for BuiltDist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.distribution_id(),
|
||||
Self::DirectUrl(dist) => dist.distribution_id(),
|
||||
Self::Path(dist) => dist.distribution_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
match self {
|
||||
Self::Registry(dist) => dist.resource_id(),
|
||||
Self::DirectUrl(dist) => dist.resource_id(),
|
||||
Self::Path(dist) => dist.resource_id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Identifier for Dist {
|
||||
fn distribution_id(&self) -> DistributionId {
|
||||
match self {
|
||||
Self::Built(dist) => dist.distribution_id(),
|
||||
Self::Source(dist) => dist.distribution_id(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resource_id(&self) -> ResourceId {
|
||||
match self {
|
||||
Self::Built(dist) => dist.resource_id(),
|
||||
Self::Source(dist) => dist.resource_id(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{BuiltDist, Dist, SourceDist};
|
||||
|
||||
/// Ensure that we don't accidentally grow the `Dist` sizes.
|
||||
#[test]
|
||||
fn dist_size() {
|
||||
// At time of writing, Unix is at 240, Windows is at 248.
|
||||
assert!(
|
||||
std::mem::size_of::<Dist>() <= 248,
|
||||
"{}",
|
||||
std::mem::size_of::<Dist>()
|
||||
);
|
||||
assert!(
|
||||
std::mem::size_of::<BuiltDist>() <= 248,
|
||||
"{}",
|
||||
std::mem::size_of::<BuiltDist>()
|
||||
);
|
||||
// At time of writing, unix is at 168, windows is at 176.
|
||||
assert!(
|
||||
std::mem::size_of::<SourceDist>() <= 176,
|
||||
"{}",
|
||||
std::mem::size_of::<SourceDist>()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,348 @@
|
|||
use pep440_rs::VersionSpecifiers;
|
||||
use platform_tags::{IncompatibleTag, TagCompatibility, TagPriority};
|
||||
use pypi_types::{Hashes, Yanked};
|
||||
|
||||
use crate::Dist;
|
||||
|
||||
/// A collection of distributions that have been filtered by relevance.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct PrioritizedDist(Box<PrioritizedDistInner>);
|
||||
|
||||
/// [`PrioritizedDist`] is boxed because [`Dist`] is large.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
struct PrioritizedDistInner {
|
||||
/// An arbitrary source distribution for the package version.
|
||||
source: Option<DistMetadata>,
|
||||
/// The highest-priority, installable wheel for the package version.
|
||||
compatible_wheel: Option<(DistMetadata, TagPriority)>,
|
||||
/// The most-relevant, incompatible wheel for the package version.
|
||||
incompatible_wheel: Option<(DistMetadata, IncompatibleWheel)>,
|
||||
/// The hashes for each distribution.
|
||||
hashes: Vec<Hashes>,
|
||||
/// If exclude newer filtered files from this distribution
|
||||
exclude_newer: bool,
|
||||
}
|
||||
|
||||
/// A distribution that can be used for both resolution and installation.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CompatibleDist<'a> {
|
||||
/// The distribution should be resolved and installed using a source distribution.
|
||||
SourceDist(&'a DistMetadata),
|
||||
/// The distribution should be resolved and installed using a wheel distribution.
|
||||
CompatibleWheel(&'a DistMetadata, TagPriority),
|
||||
/// The distribution should be resolved using an incompatible wheel distribution, but
|
||||
/// installed using a source distribution.
|
||||
IncompatibleWheel {
|
||||
source_dist: &'a DistMetadata,
|
||||
wheel: &'a DistMetadata,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum WheelCompatibility {
|
||||
Incompatible(IncompatibleWheel),
|
||||
Compatible(TagPriority),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Clone)]
|
||||
pub enum IncompatibleWheel {
|
||||
Tag(IncompatibleTag),
|
||||
RequiresPython,
|
||||
NoBinary,
|
||||
}
|
||||
|
||||
/// A [`Dist`] and metadata about it required for downstream filtering.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DistMetadata {
|
||||
/// The distribution.
|
||||
pub dist: Dist,
|
||||
/// The version of Python required by the distribution.
|
||||
pub requires_python: Option<VersionSpecifiers>,
|
||||
/// If the distribution file is yanked.
|
||||
pub yanked: Yanked,
|
||||
}
|
||||
|
||||
impl PrioritizedDist {
|
||||
/// Create a new [`PrioritizedDist`] from the given wheel distribution.
|
||||
pub fn from_built(
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
yanked: Yanked,
|
||||
hash: Option<Hashes>,
|
||||
compatibility: WheelCompatibility,
|
||||
) -> Self {
|
||||
match compatibility {
|
||||
WheelCompatibility::Compatible(priority) => Self(Box::new(PrioritizedDistInner {
|
||||
source: None,
|
||||
compatible_wheel: Some((
|
||||
DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
},
|
||||
priority,
|
||||
)),
|
||||
incompatible_wheel: None,
|
||||
hashes: hash.map(|hash| vec![hash]).unwrap_or_default(),
|
||||
exclude_newer: false,
|
||||
})),
|
||||
WheelCompatibility::Incompatible(incompatibility) => {
|
||||
Self(Box::new(PrioritizedDistInner {
|
||||
source: None,
|
||||
compatible_wheel: None,
|
||||
incompatible_wheel: Some((
|
||||
DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
},
|
||||
incompatibility,
|
||||
)),
|
||||
hashes: hash.map(|hash| vec![hash]).unwrap_or_default(),
|
||||
exclude_newer: false,
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new [`PrioritizedDist`] from the given source distribution.
|
||||
pub fn from_source(
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
yanked: Yanked,
|
||||
hash: Option<Hashes>,
|
||||
) -> Self {
|
||||
Self(Box::new(PrioritizedDistInner {
|
||||
source: Some(DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
}),
|
||||
compatible_wheel: None,
|
||||
incompatible_wheel: None,
|
||||
hashes: hash.map(|hash| vec![hash]).unwrap_or_default(),
|
||||
exclude_newer: false,
|
||||
}))
|
||||
}
|
||||
|
||||
/// Insert the given built distribution into the [`PrioritizedDist`].
|
||||
pub fn insert_built(
|
||||
&mut self,
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
yanked: Yanked,
|
||||
hash: Option<Hashes>,
|
||||
compatibility: WheelCompatibility,
|
||||
) {
|
||||
match compatibility {
|
||||
// Prefer the highest-priority, compatible wheel.
|
||||
WheelCompatibility::Compatible(priority) => {
|
||||
if let Some((.., existing_priority)) = &self.0.compatible_wheel {
|
||||
if priority > *existing_priority {
|
||||
self.0.compatible_wheel = Some((
|
||||
DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
},
|
||||
priority,
|
||||
));
|
||||
}
|
||||
} else {
|
||||
self.0.compatible_wheel = Some((
|
||||
DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
},
|
||||
priority,
|
||||
));
|
||||
}
|
||||
}
|
||||
// Track the most relevant incompatible wheel
|
||||
WheelCompatibility::Incompatible(incompatibility) => {
|
||||
if let Some((.., existing_incompatibility)) = &self.0.incompatible_wheel {
|
||||
if incompatibility > *existing_incompatibility {
|
||||
self.0.incompatible_wheel = Some((
|
||||
DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
},
|
||||
incompatibility,
|
||||
));
|
||||
}
|
||||
} else {
|
||||
self.0.incompatible_wheel = Some((
|
||||
DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
},
|
||||
incompatibility,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(hash) = hash {
|
||||
self.0.hashes.push(hash);
|
||||
}
|
||||
}
|
||||
|
||||
/// Insert the given source distribution into the [`PrioritizedDist`].
|
||||
pub fn insert_source(
|
||||
&mut self,
|
||||
dist: Dist,
|
||||
requires_python: Option<VersionSpecifiers>,
|
||||
yanked: Yanked,
|
||||
hash: Option<Hashes>,
|
||||
) {
|
||||
if self.0.source.is_none() {
|
||||
self.0.source = Some(DistMetadata {
|
||||
dist,
|
||||
requires_python,
|
||||
yanked,
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(hash) = hash {
|
||||
self.0.hashes.push(hash);
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the highest-priority distribution for the package version, if any.
|
||||
pub fn get(&self) -> Option<CompatibleDist> {
|
||||
match (
|
||||
&self.0.compatible_wheel,
|
||||
&self.0.source,
|
||||
&self.0.incompatible_wheel,
|
||||
) {
|
||||
// Prefer the highest-priority, platform-compatible wheel.
|
||||
(Some((wheel, tag_priority)), _, _) => {
|
||||
Some(CompatibleDist::CompatibleWheel(wheel, *tag_priority))
|
||||
}
|
||||
// If we have a compatible source distribution and an incompatible wheel, return the
|
||||
// wheel. We assume that all distributions have the same metadata for a given package
|
||||
// version. If a compatible source distribution exists, we assume we can build it, but
|
||||
// using the wheel is faster.
|
||||
(_, Some(source_dist), Some((wheel, _))) => {
|
||||
Some(CompatibleDist::IncompatibleWheel { source_dist, wheel })
|
||||
}
|
||||
// Otherwise, if we have a source distribution, return it.
|
||||
(_, Some(source_dist), _) => Some(CompatibleDist::SourceDist(source_dist)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the source distribution, if any.
|
||||
pub fn source(&self) -> Option<&DistMetadata> {
|
||||
self.0.source.as_ref()
|
||||
}
|
||||
|
||||
/// Return the compatible built distribution, if any.
|
||||
pub fn compatible_wheel(&self) -> Option<&(DistMetadata, TagPriority)> {
|
||||
self.0.compatible_wheel.as_ref()
|
||||
}
|
||||
|
||||
/// Return the incompatible built distribution, if any.
|
||||
pub fn incompatible_wheel(&self) -> Option<&(DistMetadata, IncompatibleWheel)> {
|
||||
self.0.incompatible_wheel.as_ref()
|
||||
}
|
||||
|
||||
/// Set the `exclude_newer` flag
|
||||
pub fn set_exclude_newer(&mut self) {
|
||||
self.0.exclude_newer = true;
|
||||
}
|
||||
|
||||
/// Check if any distributions were excluded by the `exclude_newer` option
|
||||
pub fn exclude_newer(&self) -> bool {
|
||||
self.0.exclude_newer
|
||||
}
|
||||
|
||||
/// Return the hashes for each distribution.
|
||||
pub fn hashes(&self) -> &[Hashes] {
|
||||
&self.0.hashes
|
||||
}
|
||||
|
||||
/// Returns true if and only if this distribution does not contain any
|
||||
/// source distributions or wheels.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.source.is_none()
|
||||
&& self.0.compatible_wheel.is_none()
|
||||
&& self.0.incompatible_wheel.is_none()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> CompatibleDist<'a> {
|
||||
/// Return the [`DistMetadata`] to use during resolution.
|
||||
pub fn for_resolution(&self) -> &DistMetadata {
|
||||
match *self {
|
||||
CompatibleDist::SourceDist(sdist) => sdist,
|
||||
CompatibleDist::CompatibleWheel(wheel, _) => wheel,
|
||||
CompatibleDist::IncompatibleWheel {
|
||||
source_dist: _,
|
||||
wheel,
|
||||
} => wheel,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`DistMetadata`] to use during installation.
|
||||
pub fn for_installation(&self) -> &DistMetadata {
|
||||
match *self {
|
||||
CompatibleDist::SourceDist(sdist) => sdist,
|
||||
CompatibleDist::CompatibleWheel(wheel, _) => wheel,
|
||||
CompatibleDist::IncompatibleWheel {
|
||||
source_dist,
|
||||
wheel: _,
|
||||
} => source_dist,
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the [`Yanked`] status of the distribution.
|
||||
///
|
||||
/// It is possible for files to have a different yank status per PEP 592 but in the official
|
||||
/// PyPI warehouse this cannot happen.
|
||||
///
|
||||
/// Here, we will treat the distribution is yanked if the file we will install with
|
||||
/// is yanked.
|
||||
///
|
||||
/// PEP 592: <https://peps.python.org/pep-0592/#warehouse-pypi-implementation-notes>
|
||||
pub fn yanked(&self) -> &Yanked {
|
||||
&self.for_installation().yanked
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for WheelCompatibility {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
match (self, other) {
|
||||
(Self::Compatible(p_self), Self::Compatible(p_other)) => p_self.cmp(p_other),
|
||||
(Self::Incompatible(_), Self::Compatible(_)) => std::cmp::Ordering::Less,
|
||||
(Self::Compatible(_), Self::Incompatible(_)) => std::cmp::Ordering::Greater,
|
||||
(Self::Incompatible(t_self), Self::Incompatible(t_other)) => t_self.cmp(t_other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for WheelCompatibility {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(WheelCompatibility::cmp(self, other))
|
||||
}
|
||||
}
|
||||
|
||||
impl WheelCompatibility {
|
||||
pub fn is_compatible(&self) -> bool {
|
||||
matches!(self, Self::Compatible(_))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TagCompatibility> for WheelCompatibility {
|
||||
fn from(value: TagCompatibility) -> Self {
|
||||
match value {
|
||||
TagCompatibility::Compatible(priority) => WheelCompatibility::Compatible(priority),
|
||||
TagCompatibility::Incompatible(tag) => {
|
||||
WheelCompatibility::Incompatible(IncompatibleWheel::Tag(tag))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
use rustc_hash::FxHashMap;
|
||||
|
||||
use pep508_rs::Requirement;
|
||||
use uv_normalize::PackageName;
|
||||
|
||||
use crate::{BuiltDist, Dist, PathSourceDist, SourceDist};
|
||||
|
||||
/// A set of packages pinned at specific versions.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Resolution(FxHashMap<PackageName, Dist>);
|
||||
|
||||
impl Resolution {
|
||||
/// Create a new resolution from the given pinned packages.
|
||||
pub fn new(packages: FxHashMap<PackageName, Dist>) -> Self {
|
||||
Self(packages)
|
||||
}
|
||||
|
||||
/// Return the distribution for the given package name, if it exists.
|
||||
pub fn get(&self, package_name: &PackageName) -> Option<&Dist> {
|
||||
self.0.get(package_name)
|
||||
}
|
||||
|
||||
/// Iterate over the [`PackageName`] entities in this resolution.
|
||||
pub fn packages(&self) -> impl Iterator<Item = &PackageName> {
|
||||
self.0.keys()
|
||||
}
|
||||
|
||||
/// Iterate over the [`Dist`] entities in this resolution.
|
||||
pub fn distributions(&self) -> impl Iterator<Item = &Dist> {
|
||||
self.0.values()
|
||||
}
|
||||
|
||||
/// Iterate over the [`Dist`] entities in this resolution.
|
||||
pub fn into_distributions(self) -> impl Iterator<Item = Dist> {
|
||||
self.0.into_values()
|
||||
}
|
||||
|
||||
/// Return the number of distributions in this resolution.
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Return `true` if there are no pinned packages in this resolution.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Return the set of [`Requirement`]s that this resolution represents, exclusive of any
|
||||
/// editable requirements.
|
||||
pub fn requirements(&self) -> Vec<Requirement> {
|
||||
let mut requirements = self
|
||||
.0
|
||||
.values()
|
||||
.filter_map(|dist| match dist {
|
||||
Dist::Source(SourceDist::Path(PathSourceDist { editable: true, .. })) => None,
|
||||
dist => Some(Requirement::from(dist.clone())),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
requirements.sort_unstable_by(|a, b| a.name.cmp(&b.name));
|
||||
requirements
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Dist> for Requirement {
|
||||
fn from(dist: Dist) -> Self {
|
||||
match dist {
|
||||
Dist::Built(BuiltDist::Registry(wheel)) => Requirement {
|
||||
name: wheel.filename.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::VersionSpecifier(
|
||||
pep440_rs::VersionSpecifiers::from(
|
||||
pep440_rs::VersionSpecifier::equals_version(wheel.filename.version),
|
||||
),
|
||||
)),
|
||||
marker: None,
|
||||
},
|
||||
Dist::Built(BuiltDist::DirectUrl(wheel)) => Requirement {
|
||||
name: wheel.filename.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(wheel.url)),
|
||||
marker: None,
|
||||
},
|
||||
Dist::Built(BuiltDist::Path(wheel)) => Requirement {
|
||||
name: wheel.filename.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(wheel.url)),
|
||||
marker: None,
|
||||
},
|
||||
Dist::Source(SourceDist::Registry(sdist)) => Requirement {
|
||||
name: sdist.filename.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::VersionSpecifier(
|
||||
pep440_rs::VersionSpecifiers::from(
|
||||
pep440_rs::VersionSpecifier::equals_version(sdist.filename.version),
|
||||
),
|
||||
)),
|
||||
marker: None,
|
||||
},
|
||||
Dist::Source(SourceDist::DirectUrl(sdist)) => Requirement {
|
||||
name: sdist.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(sdist.url)),
|
||||
marker: None,
|
||||
},
|
||||
Dist::Source(SourceDist::Git(sdist)) => Requirement {
|
||||
name: sdist.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(sdist.url)),
|
||||
marker: None,
|
||||
},
|
||||
Dist::Source(SourceDist::Path(sdist)) => Requirement {
|
||||
name: sdist.name,
|
||||
extras: vec![],
|
||||
version_or_url: Some(pep508_rs::VersionOrUrl::Url(sdist.url)),
|
||||
marker: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,16 +1,16 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use pep508_rs::VerbatimUrl;
|
||||
use uv_normalize::PackageName;
|
||||
use uv_pep508::VerbatimUrl;
|
||||
|
||||
use crate::error::Error;
|
||||
use crate::{
|
||||
BuiltDist, CachedDirectUrlDist, CachedDist, CachedRegistryDist, DirectUrlBuiltDist,
|
||||
DirectUrlSourceDist, DirectorySourceDist, Dist, DistributionId, GitSourceDist,
|
||||
InstalledDirectUrlDist, InstalledDist, InstalledEggInfoDirectory, InstalledEggInfoFile,
|
||||
InstalledLegacyEditable, InstalledRegistryDist, InstalledVersion, LocalDist, PackageId,
|
||||
PathBuiltDist, PathSourceDist, RegistryBuiltWheel, RegistrySourceDist, ResourceId, SourceDist,
|
||||
VersionId, VersionOrUrlRef,
|
||||
DirectUrlSourceDist, Dist, DistributionId, GitSourceDist, InstalledDirectUrlDist,
|
||||
InstalledDist, InstalledRegistryDist, InstalledVersion, LocalDist, PackageId, PathBuiltDist,
|
||||
PathSourceDist, RegistryBuiltDist, RegistrySourceDist, ResourceId, SourceDist, VersionOrUrl,
|
||||
};
|
||||
|
||||
pub trait Name {
|
||||
|
|
@ -21,34 +21,21 @@ pub trait Name {
|
|||
/// Metadata that can be resolved from a requirements specification alone (i.e., prior to building
|
||||
/// or installing the distribution).
|
||||
pub trait DistributionMetadata: Name {
|
||||
/// Return a [`uv_pep440::Version`], for registry-based distributions, or a [`url::Url`],
|
||||
/// Return a [`pep440_rs::Version`], for registry-based distributions, or a [`url::Url`],
|
||||
/// for URL-based distributions.
|
||||
fn version_or_url(&self) -> VersionOrUrlRef<'_>;
|
||||
fn version_or_url(&self) -> VersionOrUrl;
|
||||
|
||||
/// Returns a unique identifier for the package at the given version (e.g., `black==23.10.0`).
|
||||
/// Returns a unique identifier for the package.
|
||||
///
|
||||
/// Note that this is not equivalent to a unique identifier for the _distribution_, as multiple
|
||||
/// registry-based distributions (e.g., different wheels for the same package and version)
|
||||
/// will return the same version ID, but different distribution IDs.
|
||||
fn version_id(&self) -> VersionId {
|
||||
match self.version_or_url() {
|
||||
VersionOrUrlRef::Version(version) => {
|
||||
VersionId::from_registry(self.name().clone(), version.clone())
|
||||
}
|
||||
VersionOrUrlRef::Url(url) => VersionId::from_url(url),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a unique identifier for a package. A package can either be identified by a name
|
||||
/// (e.g., `black`) or a URL (e.g., `git+https://github.com/psf/black`).
|
||||
///
|
||||
/// Note that this is not equivalent to a unique identifier for the _distribution_, as multiple
|
||||
/// registry-based distributions (e.g., different wheels for the same package and version)
|
||||
/// will return the same version ID, but different distribution IDs.
|
||||
/// will return the same package ID, but different distribution IDs.
|
||||
fn package_id(&self) -> PackageId {
|
||||
match self.version_or_url() {
|
||||
VersionOrUrlRef::Version(_) => PackageId::from_registry(self.name().clone()),
|
||||
VersionOrUrlRef::Url(url) => PackageId::from_url(url),
|
||||
VersionOrUrl::Version(version) => {
|
||||
PackageId::from_registry(self.name().clone(), version.clone())
|
||||
}
|
||||
VersionOrUrl::Url(url) => PackageId::from_url(url),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -56,7 +43,7 @@ pub trait DistributionMetadata: Name {
|
|||
/// Metadata that can be resolved from a built distribution.
|
||||
pub trait InstalledMetadata: Name {
|
||||
/// Return the resolved version of the installed distribution.
|
||||
fn installed_version(&self) -> InstalledVersion<'_>;
|
||||
fn installed_version(&self) -> InstalledVersion;
|
||||
}
|
||||
|
||||
pub trait RemoteSource {
|
||||
|
|
@ -70,17 +57,6 @@ pub trait RemoteSource {
|
|||
pub trait Identifier {
|
||||
/// Return a unique resource identifier for the distribution, like a SHA-256 hash of the
|
||||
/// distribution's contents.
|
||||
///
|
||||
/// A distribution is a specific archive of a package at a specific version. For a given package
|
||||
/// version, there may be multiple distributions, e.g., source distribution, along with
|
||||
/// multiple binary distributions (wheels) for different platforms. As a concrete example,
|
||||
/// `black-23.10.0-py3-none-any.whl` would represent a (binary) distribution of the `black` package
|
||||
/// at version `23.10.0`.
|
||||
///
|
||||
/// The distribution ID is used to uniquely identify a distribution. Ideally, the distribution
|
||||
/// ID should be a hash of the distribution's contents, though in practice, it's only required
|
||||
/// that the ID is unique within a single invocation of the resolver (and so, e.g., a hash of
|
||||
/// the URL would also be sufficient).
|
||||
fn distribution_id(&self) -> DistributionId;
|
||||
|
||||
/// Return a unique resource identifier for the underlying resource backing the distribution.
|
||||
|
|
@ -190,24 +166,6 @@ impl std::fmt::Display for InstalledRegistryDist {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for InstalledEggInfoFile {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}{}", self.name(), self.installed_version())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for InstalledEggInfoDirectory {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}{}", self.name(), self.installed_version())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for InstalledLegacyEditable {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}{}", self.name(), self.installed_version())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PathBuiltDist {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}{}", self.name(), self.version_or_url())
|
||||
|
|
@ -220,13 +178,7 @@ impl std::fmt::Display for PathSourceDist {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for DirectorySourceDist {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}{}", self.name(), self.version_or_url())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for RegistryBuiltWheel {
|
||||
impl std::fmt::Display for RegistryBuiltDist {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}{}", self.name(), self.version_or_url())
|
||||
}
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
[package]
|
||||
name = "gourgeist"
|
||||
version = "0.0.4"
|
||||
publish = false
|
||||
description = "virtualenv creation implemented in rust"
|
||||
keywords = ["virtualenv", "venv", "python"]
|
||||
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
homepage = { workspace = true }
|
||||
documentation = { workspace = true }
|
||||
repository = { workspace = true }
|
||||
authors = { workspace = true }
|
||||
license = { workspace = true }
|
||||
|
||||
[[bin]]
|
||||
name = "gourgeist"
|
||||
required-features = ["cli"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
platform-host = { path = "../platform-host" }
|
||||
uv-cache = { path = "../uv-cache" }
|
||||
uv-interpreter = { path = "../uv-interpreter" }
|
||||
|
||||
anstream = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
clap = { workspace = true, features = ["derive"], optional = true }
|
||||
directories = { workspace = true }
|
||||
fs-err = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-subscriber = { workspace = true, optional = true }
|
||||
which = { workspace = true }
|
||||
|
||||
[features]
|
||||
cli = ["clap", "tracing-subscriber"]
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
# Gourgeist
|
||||
|
||||
Gourgeist is a rust library to create python virtual environments. It also has a CLI.
|
||||
|
||||
It currently supports only unix (linux/mac), windows support is missing.
|
||||
|
||||
## Rust
|
||||
|
||||
```rust
|
||||
use camino::Utf8PathBuf;
|
||||
use gourgeist::{create_venv, get_interpreter_info, parse_python_cli};
|
||||
|
||||
let location = cli.path.unwrap_or(Utf8PathBuf::from(".venv"));
|
||||
let python = parse_python_cli(cli.python)?;
|
||||
let data = get_interpreter_info(&python)?;
|
||||
create_venv(&location, &python, &data, cli.bare)?;
|
||||
```
|
||||
|
||||
## CLI
|
||||
|
||||
Use `python` as base for a virtualenv `.venv`:
|
||||
```bash
|
||||
gourgeist
|
||||
```
|
||||
|
||||
Or use custom defaults:
|
||||
```bash
|
||||
gourgeist -p 3.11 my_env
|
||||
```
|
||||
|
||||
## Jessie's gourgeist
|
||||
|
||||

|
||||
|
|
@ -0,0 +1,14 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(git rev-parse --show-toplevel)"
|
||||
|
||||
virtualenv --version
|
||||
|
||||
cargo build --profile profiling --bin gourgeist --features cli
|
||||
|
||||
hyperfine --warmup 1 --shell none --prepare "rm -rf target/venv-benchmark" \
|
||||
"target/profiling/gourgeist -p 3.11 target/venv-benchmark" \
|
||||
"virtualenv -p 3.11 --no-seed target/venv-benchmark"
|
||||
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
virtualenv_command() {
|
||||
virtualenv -p 3.11 compare_venv # --no-pip --no-setuptools --no-wheel
|
||||
}
|
||||
rust_command() {
|
||||
cargo run -- -p 3.11 compare_venv # --bare
|
||||
}
|
||||
|
||||
rm -rf compare_venv
|
||||
virtualenv_command
|
||||
rm compare_venv/.gitignore
|
||||
git -C compare_venv init
|
||||
git -C compare_venv add -A
|
||||
git -C compare_venv commit -q -m "Initial commit"
|
||||
rm -r compare_venv/* # This skips the hidden .git
|
||||
mkdir -p target
|
||||
mv compare_venv target/compare_venv2
|
||||
rust_command
|
||||
rm compare_venv/.gitignore
|
||||
cp -r compare_venv/* target/compare_venv2
|
||||
rm -r compare_venv
|
||||
mv target/compare_venv2 compare_venv
|
||||
git -C compare_venv/ status
|
||||
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
print(sys.executable)
|
||||
print(sys.version)
|
||||
print(sys.base_prefix)
|
||||
print(sys.prefix)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"styles": {
|
||||
"theme": "axo_light"
|
||||
},
|
||||
"build": {
|
||||
"path_prefix": "gourgeist"
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,7 @@
|
|||
"""Patches that are applied at runtime to the virtual environment."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
|
@ -9,7 +11,7 @@ VIRTUALENV_PATCH_FILE = os.path.join(__file__)
|
|||
def patch_dist(dist):
|
||||
"""
|
||||
Distutils allows user to configure some arguments via a configuration file:
|
||||
https://docs.python.org/3.11/install/index.html#distutils-configuration-files.
|
||||
https://docs.python.org/3/install/index.html#distutils-configuration-files.
|
||||
|
||||
Some of this arguments though don't make sense in context of the virtual environment files, let's fix them up.
|
||||
""" # noqa: D205
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
|
||||
if [ "${BASH_SOURCE-}" = "$0" ]; then
|
||||
echo "You must source this script: \$ source $0" >&2
|
||||
exit 33
|
||||
fi
|
||||
|
||||
deactivate () {
|
||||
unset -f pydoc >/dev/null 2>&1 || true
|
||||
|
||||
# reset old environment variables
|
||||
# ! [ -z ${VAR+_} ] returns true if VAR is declared at all
|
||||
if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
|
||||
PATH="$_OLD_VIRTUAL_PATH"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
|
||||
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# The hash command must be called to get it to forget past
|
||||
# commands. Without forgetting past commands the $PATH changes
|
||||
# we made may not be respected
|
||||
hash -r 2>/dev/null
|
||||
|
||||
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
|
||||
PS1="$_OLD_VIRTUAL_PS1"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV='{{ VIRTUAL_ENV_TEMPLATE_STRING }}'
|
||||
if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then
|
||||
VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV")
|
||||
fi
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
if [ "x" != x ] ; then
|
||||
VIRTUAL_ENV_PROMPT=""
|
||||
else
|
||||
VIRTUAL_ENV_PROMPT=$(basename "$VIRTUAL_ENV")
|
||||
fi
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
if ! [ -z "${PYTHONHOME+_}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1-}"
|
||||
PS1="(${VIRTUAL_ENV_PROMPT}) ${PS1-}"
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# Make sure to unalias pydoc if it's already there
|
||||
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true
|
||||
|
||||
pydoc () {
|
||||
python -m pydoc "$@"
|
||||
}
|
||||
|
||||
# The hash command must be called to get it to forget past
|
||||
# commands. Without forgetting past commands the $PATH changes
|
||||
# we made may not be respected
|
||||
hash -r 2>/dev/null
|
||||
|
|
@ -22,22 +22,10 @@
|
|||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
if ! [ -z "${SCRIPT_PATH+_}" ] ; then
|
||||
_OLD_SCRIPT_PATH="$SCRIPT_PATH"
|
||||
fi
|
||||
|
||||
# Get script path (only used if environment is relocatable).
|
||||
if [ -n "${BASH_VERSION:+x}" ] ; then
|
||||
SCRIPT_PATH="${BASH_SOURCE[0]}"
|
||||
if [ "$SCRIPT_PATH" = "$0" ]; then
|
||||
# Only bash has a reasonably robust check for source'dness.
|
||||
echo "You must source this script: \$ source $0" >&2
|
||||
exit 33
|
||||
fi
|
||||
elif [ -n "${ZSH_VERSION:+x}" ] ; then
|
||||
SCRIPT_PATH="${(%):-%x}"
|
||||
elif [ -n "${KSH_VERSION:+x}" ] ; then
|
||||
SCRIPT_PATH="${.sh.file}"
|
||||
if [ "${BASH_SOURCE-}" = "$0" ]; then
|
||||
echo "You must source this script: \$ source $0" >&2
|
||||
exit 33
|
||||
fi
|
||||
|
||||
deactivate () {
|
||||
|
|
@ -84,22 +72,12 @@ if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath
|
|||
fi
|
||||
export VIRTUAL_ENV
|
||||
|
||||
# Unset the `SCRIPT_PATH` variable, now that the `VIRTUAL_ENV` variable
|
||||
# has been set. This is important for relocatable environments.
|
||||
if ! [ -z "${_OLD_SCRIPT_PATH+_}" ] ; then
|
||||
SCRIPT_PATH="$_OLD_SCRIPT_PATH"
|
||||
export SCRIPT_PATH
|
||||
unset _OLD_SCRIPT_PATH
|
||||
else
|
||||
unset SCRIPT_PATH
|
||||
fi
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/{{ BIN_NAME }}:$PATH"
|
||||
export PATH
|
||||
|
||||
if [ "x{{ VIRTUAL_PROMPT }}" != x ] ; then
|
||||
VIRTUAL_ENV_PROMPT="{{ VIRTUAL_PROMPT }}"
|
||||
if [ "x" != x ] ; then
|
||||
VIRTUAL_ENV_PROMPT=""
|
||||
else
|
||||
VIRTUAL_ENV_PROMPT=$(basename "$VIRTUAL_ENV")
|
||||
fi
|
||||
|
|
@ -127,4 +105,4 @@ pydoc () {
|
|||
# The hash command must be called to get it to forget past
|
||||
# commands. Without forgetting past commands the $PATH changes
|
||||
# we made may not be respected
|
||||
hash -r 2>/dev/null || true
|
||||
hash -r 2>/dev/null
|
||||
|
|
@ -19,16 +19,9 @@
|
|||
@REM OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
@REM WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
@REM This file is UTF-8 encoded, so we need to update the current code page while executing it
|
||||
@for /f "tokens=2 delims=:." %%a in ('"%SystemRoot%\System32\chcp.com"') do @set _OLD_CODEPAGE=%%a
|
||||
@set "VIRTUAL_ENV={{ VIRTUAL_ENV_DIR }}"
|
||||
|
||||
@if defined _OLD_CODEPAGE (
|
||||
"%SystemRoot%\System32\chcp.com" 65001 > nul
|
||||
)
|
||||
|
||||
@for %%i in ("{{ VIRTUAL_ENV_DIR }}") do @set "VIRTUAL_ENV=%%~fi"
|
||||
|
||||
@set "VIRTUAL_ENV_PROMPT={{ VIRTUAL_PROMPT }}"
|
||||
@set "VIRTUAL_ENV_PROMPT=venv"
|
||||
@if NOT DEFINED VIRTUAL_ENV_PROMPT (
|
||||
@for %%d in ("%VIRTUAL_ENV%") do @set "VIRTUAL_ENV_PROMPT=%%~nxd"
|
||||
)
|
||||
|
|
@ -63,9 +56,4 @@
|
|||
@set "_OLD_VIRTUAL_PATH=%PATH%"
|
||||
:ENDIFVPATH2
|
||||
|
||||
@set "PATH=%VIRTUAL_ENV%\{{ BIN_NAME }};%PATH%"
|
||||
|
||||
@if defined _OLD_CODEPAGE (
|
||||
"%SystemRoot%\System32\chcp.com" %_OLD_CODEPAGE% > nul
|
||||
@set _OLD_CODEPAGE=
|
||||
)
|
||||
@set "PATH=%VIRTUAL_ENV%\{{ BIN_NAME }};%PATH%"
|
||||
|
|
@ -38,8 +38,8 @@ setenv PATH "$VIRTUAL_ENV:q/{{ BIN_NAME }}:$PATH:q"
|
|||
|
||||
|
||||
|
||||
if ('{{ VIRTUAL_PROMPT }}' != "") then
|
||||
setenv VIRTUAL_ENV_PROMPT '{{ VIRTUAL_PROMPT }}'
|
||||
if ('' != "") then
|
||||
setenv VIRTUAL_ENV_PROMPT ''
|
||||
else
|
||||
setenv VIRTUAL_ENV_PROMPT "$VIRTUAL_ENV:t:q"
|
||||
endif
|
||||
|
|
@ -39,7 +39,7 @@ function deactivate -d 'Exit virtualenv mode and return to the normal environmen
|
|||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (string sub -s 1 -l 1 $FISH_VERSION) -lt 3
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH")
|
||||
else
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
|
|
@ -82,8 +82,8 @@ deactivate nondestructive
|
|||
set -gx VIRTUAL_ENV '{{ VIRTUAL_ENV_DIR }}'
|
||||
|
||||
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
||||
if test (string sub -s 1 -l 1 $FISH_VERSION) -lt 3
|
||||
set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH)
|
||||
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
||||
set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH)
|
||||
else
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
end
|
||||
|
|
@ -91,8 +91,8 @@ set -gx PATH "$VIRTUAL_ENV"'/{{ BIN_NAME }}' $PATH
|
|||
|
||||
# Prompt override provided?
|
||||
# If not, just use the environment name.
|
||||
if test -n '{{ VIRTUAL_PROMPT }}'
|
||||
set -gx VIRTUAL_ENV_PROMPT '{{ VIRTUAL_PROMPT }}'
|
||||
if test -n ''
|
||||
set -gx VIRTUAL_ENV_PROMPT ''
|
||||
else
|
||||
set -gx VIRTUAL_ENV_PROMPT (basename "$VIRTUAL_ENV")
|
||||
end
|
||||
|
|
@ -0,0 +1,117 @@
|
|||
# Copyright (c) 2020-202x The virtualenv developers
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
# virtualenv activation module
|
||||
# Activate with `overlay use activate.nu`
|
||||
# Deactivate with `deactivate`, as usual
|
||||
#
|
||||
# To customize the overlay name, you can call `overlay use activate.nu as foo`,
|
||||
# but then simply `deactivate` won't work because it is just an alias to hide
|
||||
# the "activate" overlay. You'd need to call `overlay hide foo` manually.
|
||||
|
||||
export-env {
|
||||
def is-string [x] {
|
||||
($x | describe) == 'string'
|
||||
}
|
||||
|
||||
def has-env [...names] {
|
||||
$names | each {|n|
|
||||
$n in $env
|
||||
} | all {|i| $i == true}
|
||||
}
|
||||
|
||||
# Emulates a `test -z`, but btter as it handles e.g 'false'
|
||||
def is-env-true [name: string] {
|
||||
if (has-env $name) {
|
||||
# Try to parse 'true', '0', '1', and fail if not convertible
|
||||
let parsed = (do -i { $env | get $name | into bool })
|
||||
if ($parsed | describe) == 'bool' {
|
||||
$parsed
|
||||
} else {
|
||||
not ($env | get -i $name | is-empty)
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
let virtual_env = '{{ VIRTUAL_ENV_DIR }}'
|
||||
let bin = '{{ BIN_NAME }}'
|
||||
|
||||
let is_windows = ($nu.os-info.family) == 'windows'
|
||||
let path_name = (if (has-env 'Path') {
|
||||
'Path'
|
||||
} else {
|
||||
'PATH'
|
||||
}
|
||||
)
|
||||
|
||||
let venv_path = ([$virtual_env $bin] | path join)
|
||||
let new_path = ($env | get $path_name | prepend $venv_path)
|
||||
|
||||
# If there is no default prompt, then use the env name instead
|
||||
let virtual_env_prompt = (if ('' | is-empty) {
|
||||
($virtual_env | path basename)
|
||||
} else {
|
||||
''
|
||||
})
|
||||
|
||||
let new_env = {
|
||||
$path_name : $new_path
|
||||
VIRTUAL_ENV : $virtual_env
|
||||
VIRTUAL_ENV_PROMPT : $virtual_env_prompt
|
||||
}
|
||||
|
||||
let new_env = (if (is-env-true 'VIRTUAL_ENV_DISABLE_PROMPT') {
|
||||
$new_env
|
||||
} else {
|
||||
# Creating the new prompt for the session
|
||||
let virtual_prefix = $'(char lparen)($virtual_env_prompt)(char rparen) '
|
||||
|
||||
# Back up the old prompt builder
|
||||
let old_prompt_command = (if (has-env 'PROMPT_COMMAND') {
|
||||
$env.PROMPT_COMMAND
|
||||
} else {
|
||||
''
|
||||
})
|
||||
|
||||
let new_prompt = (if (has-env 'PROMPT_COMMAND') {
|
||||
if 'closure' in ($old_prompt_command | describe) {
|
||||
{|| $'($virtual_prefix)(do $old_prompt_command)' }
|
||||
} else {
|
||||
{|| $'($virtual_prefix)($old_prompt_command)' }
|
||||
}
|
||||
} else {
|
||||
{|| $'($virtual_prefix)' }
|
||||
})
|
||||
|
||||
$new_env | merge {
|
||||
PROMPT_COMMAND : $new_prompt
|
||||
VIRTUAL_PREFIX : $virtual_prefix
|
||||
}
|
||||
})
|
||||
|
||||
# Environment variables that will be loaded as the virtual env
|
||||
load-env $new_env
|
||||
}
|
||||
|
||||
export alias pydoc = python -m pydoc
|
||||
export alias deactivate = overlay hide activate
|
||||
|
|
@ -58,8 +58,8 @@ deactivate -nondestructive
|
|||
$VIRTUAL_ENV = $BASE_DIR
|
||||
$env:VIRTUAL_ENV = $VIRTUAL_ENV
|
||||
|
||||
if ("{{ VIRTUAL_PROMPT }}" -ne "") {
|
||||
$env:VIRTUAL_ENV_PROMPT = "{{ VIRTUAL_PROMPT }}"
|
||||
if ("" -ne "") {
|
||||
$env:VIRTUAL_ENV_PROMPT = ""
|
||||
}
|
||||
else {
|
||||
$env:VIRTUAL_ENV_PROMPT = $( Split-Path $env:VIRTUAL_ENV -Leaf )
|
||||
|
|
@ -67,7 +67,7 @@ else {
|
|||
|
||||
New-Variable -Scope global -Name _OLD_VIRTUAL_PATH -Value $env:PATH
|
||||
|
||||
$env:PATH = "$env:VIRTUAL_ENV/{{ BIN_NAME }}{{ PATH_SEP }}" + $env:PATH
|
||||
$env:PATH = "$env:VIRTUAL_ENV/{{ BIN_NAME }};" + $env:PATH
|
||||
if (!$env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
function global:_old_virtual_prompt {
|
||||
""
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue