Merge branch 'main' into zb/no-sources-package

This commit is contained in:
Zanie Blue 2025-07-31 07:34:18 -05:00
commit 25c00e104b
158 changed files with 5544 additions and 1731 deletions

View File

@ -82,7 +82,7 @@ jobs:
run: rustup component add rustfmt
- name: "Install uv"
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "rustfmt"
run: cargo fmt --all --check
@ -188,7 +188,7 @@ jobs:
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: "Install cargo shear"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
with:
tool: cargo-shear
- run: cargo shear
@ -213,12 +213,12 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "Install required Python versions"
run: uv python install
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
with:
tool: cargo-nextest
@ -249,12 +249,12 @@ jobs:
- name: "Install Rust toolchain"
run: rustup show
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "Install required Python versions"
run: uv python install
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
with:
tool: cargo-nextest
@ -286,7 +286,7 @@ jobs:
run: |
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.UV_WORKSPACE }}" -Recurse
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- name: "Install required Python versions"
run: uv python install
@ -299,7 +299,7 @@ jobs:
run: rustup show
- name: "Install cargo nextest"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
with:
tool: cargo-nextest
@ -352,7 +352,7 @@ jobs:
rustup component add rust-src --target ${{ matrix.target-arch }}-pc-windows-msvc
- name: "Install cargo-bloat"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
with:
tool: cargo-bloat
@ -439,7 +439,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
@ -2523,7 +2523,7 @@ jobs:
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
with:
tool: cargo-codspeed
@ -2539,7 +2539,7 @@ jobs:
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
- name: "Run benchmarks"
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}
@ -2560,7 +2560,7 @@ jobs:
run: rustup show
- name: "Install codspeed"
uses: taiki-e/install-action@c99cc51b309eee71a866715cfa08c922f11cf898 # v2.56.19
uses: taiki-e/install-action@a416ddeedbd372e614cc1386e8b642692f66865e # v2.57.1
with:
tool: cargo-codspeed
@ -2576,7 +2576,7 @@ jobs:
run: cargo codspeed build --profile profiling --features codspeed -p uv-bench
- name: "Run benchmarks"
uses: CodSpeedHQ/action@c28fe9fbe7d57a3da1b7834ae3761c1d8217612d # v3.7.0
uses: CodSpeedHQ/action@0b6e7a3d96c9d2a6057e7bcea6b45aaf2f7ce60b # v3.8.0
with:
run: cargo codspeed run
token: ${{ secrets.CODSPEED_TOKEN }}

View File

@ -22,7 +22,7 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: wheels_uv-*
@ -41,7 +41,7 @@ jobs:
id-token: write
steps:
- name: "Install uv"
uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
- uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
pattern: wheels_uv_build-*

View File

@ -69,7 +69,7 @@ jobs:
# we specify bash to get pipefail; it guards against the `curl` command
# failing. otherwise `sh` won't catch that `curl` returned non-0
shell: bash
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7-prerelease.1/cargo-dist-installer.sh | sh"
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/astral-sh/cargo-dist/releases/download/v0.28.7-prerelease.2/cargo-dist-installer.sh | sh"
- name: Cache dist
uses: actions/upload-artifact@6027e3dd177782cd8ab9af838c04fd81a07f1d47
with:

View File

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f # v6.4.1
- uses: astral-sh/setup-uv@e92bafb6253dcd438e0484186d7669ea7a8ca1cc # v6.4.3
with:
version: "latest"
enable-cache: true

View File

@ -42,7 +42,7 @@ repos:
types_or: [yaml, json5]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.4
rev: v0.12.5
hooks:
- id: ruff-format
- id: ruff

View File

@ -3,6 +3,38 @@
<!-- prettier-ignore-start -->
## 0.8.4
### Enhancements
- Improve styling of warning cause chains ([#14934](https://github.com/astral-sh/uv/pull/14934))
- Extend wheel filtering to Android tags ([#14977](https://github.com/astral-sh/uv/pull/14977))
- Perform wheel lockfile filtering based on platform and OS intersection ([#14976](https://github.com/astral-sh/uv/pull/14976))
- Clarify messaging when a new resolution needs to be performed ([#14938](https://github.com/astral-sh/uv/pull/14938))
### Preview features
- Add support for extending package's build dependencies with `extra-build-dependencies` ([#14735](https://github.com/astral-sh/uv/pull/14735))
- Split preview mode into separate feature flags ([#14823](https://github.com/astral-sh/uv/pull/14823))
### Configuration
- Add support for package specific `exclude-newer` dates via `exclude-newer-package` ([#14489](https://github.com/astral-sh/uv/pull/14489))
### Bug fixes
- Avoid invalidating lockfile when path or workspace dependencies define explicit indexes ([#14876](https://github.com/astral-sh/uv/pull/14876))
- Copy entrypoints that have a shebang that differs in `python` vs `python3` ([#14970](https://github.com/astral-sh/uv/pull/14970))
- Fix incorrect file permissions in wheel packages ([#14930](https://github.com/astral-sh/uv/pull/14930))
- Update validation for `environments` and `required-environments` in `uv.toml` ([#14905](https://github.com/astral-sh/uv/pull/14905))
### Documentation
- Show `uv_build` in projects documentation ([#14968](https://github.com/astral-sh/uv/pull/14968))
- Add `UV_` prefix to installer environment variables ([#14964](https://github.com/astral-sh/uv/pull/14964))
- Un-hide `uv` from `--build-backend` options ([#14939](https://github.com/astral-sh/uv/pull/14939))
- Update documentation for preview flags ([#14902](https://github.com/astral-sh/uv/pull/14902))
## 0.8.3
### Python

62
Cargo.lock generated
View File

@ -716,7 +716,7 @@ dependencies = [
"ciborium",
"clap",
"codspeed",
"criterion-plot",
"criterion-plot 0.5.0",
"is-terminal",
"itertools 0.10.5",
"num-traits",
@ -749,7 +749,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]
@ -843,15 +843,15 @@ dependencies = [
[[package]]
name = "criterion"
version = "0.6.0"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bf7af66b0989381bd0be551bd7cc91912a655a58c6918420c9527b1fd8b4679"
checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928"
dependencies = [
"anes",
"cast",
"ciborium",
"clap",
"criterion-plot",
"criterion-plot 0.6.0",
"itertools 0.13.0",
"num-traits",
"oorandom",
@ -873,6 +873,16 @@ dependencies = [
"itertools 0.10.5",
]
[[package]]
name = "criterion-plot"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338"
dependencies = [
"cast",
"itertools 0.13.0",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
@ -4153,9 +4163,9 @@ source = "git+https://github.com/astral-sh/tl.git?rev=6e25b2ee2513d75385101a8ff9
[[package]]
name = "tokio"
version = "1.46.1"
version = "1.47.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17"
checksum = "43864ed400b6043a4757a25c7a64a8efde741aed79a056a2fb348a406701bb35"
dependencies = [
"backtrace",
"bytes",
@ -4166,9 +4176,9 @@ dependencies = [
"pin-project-lite",
"signal-hook-registry",
"slab",
"socket2 0.5.10",
"socket2 0.6.0",
"tokio-macros",
"windows-sys 0.52.0",
"windows-sys 0.59.0",
]
[[package]]
@ -4645,7 +4655,7 @@ dependencies = [
[[package]]
name = "uv"
version = "0.8.3"
version = "0.8.4"
dependencies = [
"anstream",
"anyhow",
@ -4722,6 +4732,7 @@ dependencies = [
"uv-pep440",
"uv-pep508",
"uv-performance-memory-allocator",
"uv-platform",
"uv-platform-tags",
"uv-publish",
"uv-pypi-types",
@ -4811,7 +4822,7 @@ dependencies = [
[[package]]
name = "uv-build"
version = "0.8.3"
version = "0.8.4"
dependencies = [
"anyhow",
"uv-build-backend",
@ -5505,6 +5516,7 @@ dependencies = [
"tracing",
"unicode-width 0.2.1",
"unscanny",
"uv-cache-key",
"version-ranges",
]
@ -5528,6 +5540,7 @@ dependencies = [
"tracing-test",
"unicode-width 0.2.1",
"url",
"uv-cache-key",
"uv-fs",
"uv-normalize",
"uv-pep440",
@ -5543,6 +5556,23 @@ dependencies = [
"tikv-jemallocator",
]
[[package]]
name = "uv-platform"
version = "0.0.1"
dependencies = [
"fs-err",
"goblin",
"indoc",
"procfs",
"regex",
"target-lexicon",
"thiserror 2.0.12",
"tracing",
"uv-fs",
"uv-platform-tags",
"uv-static",
]
[[package]]
name = "uv-platform-tags"
version = "0.0.1"
@ -5616,6 +5646,7 @@ dependencies = [
"toml_edit",
"tracing",
"url",
"uv-cache-key",
"uv-distribution-filename",
"uv-git-types",
"uv-normalize",
@ -5636,14 +5667,12 @@ dependencies = [
"dunce",
"fs-err",
"futures",
"goblin",
"indexmap",
"indoc",
"insta",
"itertools 0.14.0",
"once_cell",
"owo-colors",
"procfs",
"ref-cast",
"regex",
"reqwest",
@ -5677,6 +5706,7 @@ dependencies = [
"uv-install-wheel",
"uv-pep440",
"uv-pep508",
"uv-platform",
"uv-platform-tags",
"uv-pypi-types",
"uv-redacted",
@ -5837,6 +5867,8 @@ dependencies = [
"thiserror 2.0.12",
"toml",
"url",
"uv-configuration",
"uv-distribution-types",
"uv-pep440",
"uv-pep508",
"uv-pypi-types",
@ -6007,7 +6039,7 @@ dependencies = [
[[package]]
name = "uv-version"
version = "0.8.3"
version = "0.8.4"
[[package]]
name = "uv-virtualenv"
@ -6332,7 +6364,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]

View File

@ -49,6 +49,7 @@ uv-once-map = { path = "crates/uv-once-map" }
uv-options-metadata = { path = "crates/uv-options-metadata" }
uv-pep440 = { path = "crates/uv-pep440", features = ["tracing", "rkyv", "version-ranges"] }
uv-pep508 = { path = "crates/uv-pep508", features = ["non-pep508-extensions"] }
uv-platform = { path = "crates/uv-platform" }
uv-platform-tags = { path = "crates/uv-platform-tags" }
uv-publish = { path = "crates/uv-publish" }
uv-pypi-types = { path = "crates/uv-pypi-types" }

View File

@ -43,7 +43,7 @@ uv-workspace = { workspace = true }
anyhow = { workspace = true }
codspeed-criterion-compat = { version = "3.0.2", default-features = false, optional = true }
criterion = { version = "0.6.0", default-features = false, features = [
criterion = { version = "0.7.0", default-features = false, features = [
"async_tokio",
] }
jiff = { workspace = true }

View File

@ -99,8 +99,8 @@ mod resolver {
use uv_pypi_types::{Conflicts, ResolverMarkerEnvironment};
use uv_python::Interpreter;
use uv_resolver::{
FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement, Resolver,
ResolverEnvironment, ResolverOutput,
ExcludeNewer, FlatIndex, InMemoryIndex, Manifest, OptionsBuilder, PythonRequirement,
Resolver, ResolverEnvironment, ResolverOutput,
};
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
use uv_workspace::WorkspaceCache;
@ -141,11 +141,12 @@ mod resolver {
universal: bool,
) -> Result<ResolverOutput> {
let build_isolation = BuildIsolation::default();
let extra_build_requires = uv_distribution::ExtraBuildRequires::default();
let build_options = BuildOptions::default();
let concurrency = Concurrency::default();
let config_settings = ConfigSettings::default();
let config_settings_package = PackageConfigSettings::default();
let exclude_newer = Some(
let exclude_newer = ExcludeNewer::global(
jiff::civil::date(2024, 9, 1)
.to_zoned(jiff::tz::TimeZone::UTC)
.unwrap()
@ -187,6 +188,7 @@ mod resolver {
&config_settings,
&config_settings_package,
build_isolation,
&extra_build_requires,
LinkMode::default(),
&build_options,
&hashes,

View File

@ -622,7 +622,7 @@ mod tests {
// Check that the wheel is reproducible across platforms.
assert_snapshot!(
format!("{:x}", sha2::Sha256::digest(fs_err::read(&wheel_path).unwrap())),
@"ac3f68ac448023bca26de689d80401bff57f764396ae802bf4666234740ffbe3"
@"342bf60c8406144f459358cde92408686c1631fe22389d042ce80379e589d6ec"
);
assert_snapshot!(build.wheel_contents.join("\n"), @r"
built_by_uv-0.1.0.data/data/

View File

@ -621,8 +621,8 @@ impl ZipDirectoryWriter {
path: &str,
executable_bit: bool,
) -> Result<Box<dyn Write + 'slf>, Error> {
// 644 is the default of the zip crate.
let permissions = if executable_bit { 775 } else { 664 };
// Set file permissions: 644 (rw-r--r--) for regular files, 755 (rwxr-xr-x) for executables
let permissions = if executable_bit { 0o755 } else { 0o644 };
let options = zip::write::SimpleFileOptions::default()
.unix_permissions(permissions)
.compression_method(self.compression);
@ -634,7 +634,10 @@ impl ZipDirectoryWriter {
impl DirectoryWriter for ZipDirectoryWriter {
fn write_bytes(&mut self, path: &str, bytes: &[u8]) -> Result<(), Error> {
trace!("Adding {}", path);
let options = zip::write::SimpleFileOptions::default().compression_method(self.compression);
// Set appropriate permissions for metadata files (644 = rw-r--r--)
let options = zip::write::SimpleFileOptions::default()
.unix_permissions(0o644)
.compression_method(self.compression);
self.writer.start_file(path, options)?;
self.writer.write_all(bytes)?;

View File

@ -4,6 +4,7 @@
mod error;
use std::borrow::Cow;
use std::ffi::OsString;
use std::fmt::Formatter;
use std::fmt::Write;
@ -42,6 +43,7 @@ use uv_static::EnvVars;
use uv_types::{AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, SourceBuildTrait};
use uv_warnings::warn_user_once;
use uv_workspace::WorkspaceCache;
use uv_workspace::pyproject::ExtraBuildDependencies;
pub use crate::error::{Error, MissingHeaderCause};
@ -281,6 +283,7 @@ impl SourceBuild {
workspace_cache: &WorkspaceCache,
config_settings: ConfigSettings,
build_isolation: BuildIsolation<'_>,
extra_build_dependencies: &ExtraBuildDependencies,
build_stack: &BuildStack,
build_kind: BuildKind,
mut environment_variables: FxHashMap<OsString, OsString>,
@ -297,7 +300,6 @@ impl SourceBuild {
};
let default_backend: Pep517Backend = DEFAULT_BACKEND.clone();
// Check if we have a PEP 517 build backend.
let (pep517_backend, project) = Self::extract_pep517_backend(
&source_tree,
@ -322,6 +324,14 @@ impl SourceBuild {
.or(fallback_package_version)
.cloned();
let extra_build_dependencies: Vec<Requirement> = package_name
.as_ref()
.and_then(|name| extra_build_dependencies.get(name).cloned())
.unwrap_or_default()
.into_iter()
.map(Requirement::from)
.collect();
// Create a virtual environment, or install into the shared environment if requested.
let venv = if let Some(venv) = build_isolation.shared_environment(package_name.as_ref()) {
venv.clone()
@ -344,11 +354,18 @@ impl SourceBuild {
if build_isolation.is_isolated(package_name.as_ref()) {
debug!("Resolving build requirements");
let dependency_sources = if extra_build_dependencies.is_empty() {
"`build-system.requires`"
} else {
"`build-system.requires` and `extra-build-dependencies`"
};
let resolved_requirements = Self::get_resolved_requirements(
build_context,
source_build_context,
&default_backend,
&pep517_backend,
extra_build_dependencies,
build_stack,
)
.await?;
@ -356,7 +373,7 @@ impl SourceBuild {
build_context
.install(&resolved_requirements, &venv, build_stack)
.await
.map_err(|err| Error::RequirementsInstall("`build-system.requires`", err.into()))?;
.map_err(|err| Error::RequirementsInstall(dependency_sources, err.into()))?;
} else {
debug!("Proceeding without build isolation");
}
@ -471,10 +488,13 @@ impl SourceBuild {
source_build_context: SourceBuildContext,
default_backend: &Pep517Backend,
pep517_backend: &Pep517Backend,
extra_build_dependencies: Vec<Requirement>,
build_stack: &BuildStack,
) -> Result<Resolution, Error> {
Ok(
if pep517_backend.requirements == default_backend.requirements {
if pep517_backend.requirements == default_backend.requirements
&& extra_build_dependencies.is_empty()
{
let mut resolution = source_build_context.default_resolution.lock().await;
if let Some(resolved_requirements) = &*resolution {
resolved_requirements.clone()
@ -489,12 +509,25 @@ impl SourceBuild {
resolved_requirements
}
} else {
let (requirements, dependency_sources) = if extra_build_dependencies.is_empty() {
(
Cow::Borrowed(&pep517_backend.requirements),
"`build-system.requires`",
)
} else {
// If there are extra build dependencies, we need to resolve them together with
// the backend requirements.
let mut requirements = pep517_backend.requirements.clone();
requirements.extend(extra_build_dependencies);
(
Cow::Owned(requirements),
"`build-system.requires` and `extra-build-dependencies`",
)
};
build_context
.resolve(&pep517_backend.requirements, build_stack)
.resolve(&requirements, build_stack)
.await
.map_err(|err| {
Error::RequirementsResolve("`build-system.requires`", err.into())
})?
.map_err(|err| Error::RequirementsResolve(dependency_sources, err.into()))?
},
)
}
@ -597,6 +630,7 @@ impl SourceBuild {
);
}
}
default_backend.clone()
};
Ok((backend, pyproject_toml.project))

View File

@ -1,6 +1,6 @@
[package]
name = "uv-build"
version = "0.8.3"
version = "0.8.4"
edition.workspace = true
rust-version.workspace = true
homepage.workspace = true

View File

@ -1,6 +1,6 @@
[project]
name = "uv-build"
version = "0.8.3"
version = "0.8.4"
description = "The uv build backend"
authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
requires-python = ">=3.8"

View File

@ -985,6 +985,8 @@ pub enum CacheBucket {
Builds,
/// Reusable virtual environments used to invoke Python tools.
Environments,
/// Cached Python downloads
Python,
}
impl CacheBucket {
@ -1007,6 +1009,7 @@ impl CacheBucket {
Self::Archive => "archive-v0",
Self::Builds => "builds-v0",
Self::Environments => "environments-v2",
Self::Python => "python-v0",
}
}
@ -1108,7 +1111,12 @@ impl CacheBucket {
let root = cache.bucket(self);
summary += rm_rf(root)?;
}
Self::Git | Self::Interpreter | Self::Archive | Self::Builds | Self::Environments => {
Self::Git
| Self::Interpreter
| Self::Archive
| Self::Builds
| Self::Environments
| Self::Python => {
// Nothing to do.
}
}

View File

@ -20,7 +20,10 @@ use uv_pep508::{MarkerTree, Requirement};
use uv_pypi_types::VerbatimParsedUrl;
use uv_python::{PythonDownloads, PythonPreference, PythonVersion};
use uv_redacted::DisplaySafeUrl;
use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode};
use uv_resolver::{
AnnotationStyle, ExcludeNewerPackageEntry, ExcludeNewerTimestamp, ForkStrategy, PrereleaseMode,
ResolutionMode,
};
use uv_static::EnvVars;
use uv_torch::TorchMode;
use uv_workspace::pyproject_mut::AddBoundsKind;
@ -2749,7 +2752,16 @@ pub struct VenvArgs {
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same
/// format (e.g., `2006-12-02`) in your system's configured time zone.
#[arg(long, env = EnvVars::UV_EXCLUDE_NEWER)]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
/// Limit candidate packages for a specific package to those that were uploaded prior to the given date.
///
/// Accepts package-date pairs in the format `PACKAGE=DATE`, where `DATE` is an RFC 3339 timestamp
/// (e.g., `2006-12-02T02:07:43Z`) or local date (e.g., `2006-12-02`) in your system's configured time zone.
///
/// Can be provided multiple times for different packages.
#[arg(long)]
pub exclude_newer_package: Option<Vec<ExcludeNewerPackageEntry>>,
/// The method to use when installing packages from the global cache.
///
@ -4478,6 +4490,10 @@ pub struct ToolInstallArgs {
#[arg(long)]
pub with_editable: Vec<comma::CommaSeparatedRequirements>,
/// Install executables from the following packages.
#[arg(long)]
pub with_executables_from: Vec<comma::CommaSeparatedRequirements>,
/// Constrain versions using the given requirements files.
///
/// Constraints files are `requirements.txt`-like files that only control the _version_ of a
@ -4777,7 +4793,16 @@ pub struct ToolUpgradeArgs {
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same
/// format (e.g., `2006-12-02`) in your system's configured time zone.
#[arg(long, env = EnvVars::UV_EXCLUDE_NEWER, help_heading = "Resolver options")]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
/// Limit candidate packages for specific packages to those that were uploaded prior to the given date.
///
/// Accepts package-date pairs in the format `PACKAGE=DATE`, where `DATE` is an RFC 3339 timestamp
/// (e.g., `2006-12-02T02:07:43Z`) or local date (e.g., `2006-12-02`) in your system's configured time zone.
///
/// Can be provided multiple times for different packages.
#[arg(long, help_heading = "Resolver options")]
pub exclude_newer_package: Option<Vec<ExcludeNewerPackageEntry>>,
/// The method to use when installing packages from the global cache.
///
@ -5576,7 +5601,16 @@ pub struct InstallerArgs {
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same
/// format (e.g., `2006-12-02`) in your system's configured time zone.
#[arg(long, env = EnvVars::UV_EXCLUDE_NEWER, help_heading = "Resolver options")]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
/// Limit candidate packages for specific packages to those that were uploaded prior to the given date.
///
/// Accepts package-date pairs in the format `PACKAGE=DATE`, where `DATE` is an RFC 3339 timestamp
/// (e.g., `2006-12-02T02:07:43Z`) or local date (e.g., `2006-12-02`) in your system's configured time zone.
///
/// Can be provided multiple times for different packages.
#[arg(long, help_heading = "Resolver options")]
pub exclude_newer_package: Option<Vec<ExcludeNewerPackageEntry>>,
/// The method to use when installing packages from the global cache.
///
@ -5781,7 +5815,16 @@ pub struct ResolverArgs {
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same
/// format (e.g., `2006-12-02`) in your system's configured time zone.
#[arg(long, env = EnvVars::UV_EXCLUDE_NEWER, help_heading = "Resolver options")]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
/// Limit candidate packages for a specific package to those that were uploaded prior to the given date.
///
/// Accepts package-date pairs in the format `PACKAGE=DATE`, where `DATE` is an RFC 3339 timestamp
/// (e.g., `2006-12-02T02:07:43Z`) or local date (e.g., `2006-12-02`) in your system's configured time zone.
///
/// Can be provided multiple times for different packages.
#[arg(long, help_heading = "Resolver options")]
pub exclude_newer_package: Option<Vec<ExcludeNewerPackageEntry>>,
/// The method to use when installing packages from the global cache.
///
@ -5982,7 +6025,16 @@ pub struct ResolverInstallerArgs {
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same
/// format (e.g., `2006-12-02`) in your system's configured time zone.
#[arg(long, env = EnvVars::UV_EXCLUDE_NEWER, help_heading = "Resolver options")]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
/// Limit candidate packages for specific packages to those that were uploaded prior to the given date.
///
/// Accepts package-date pairs in the format `PACKAGE=DATE`, where `DATE` is an RFC 3339 timestamp
/// (e.g., `2006-12-02T02:07:43Z`) or local date (e.g., `2006-12-02`) in your system's configured time zone.
///
/// Can be provided multiple times for different packages.
#[arg(long, help_heading = "Resolver options")]
pub exclude_newer_package: Option<Vec<ExcludeNewerPackageEntry>>,
/// The method to use when installing packages from the global cache.
///
@ -6075,7 +6127,7 @@ pub struct FetchArgs {
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same
/// format (e.g., `2006-12-02`) in your system's configured time zone.
#[arg(long, env = EnvVars::UV_EXCLUDE_NEWER, help_heading = "Resolver options")]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
}
#[derive(Args)]

View File

@ -2,7 +2,7 @@ use anstream::eprintln;
use uv_cache::Refresh;
use uv_configuration::{ConfigSettings, PackageConfigSettings};
use uv_resolver::PrereleaseMode;
use uv_resolver::{ExcludeNewer, ExcludeNewerPackage, PrereleaseMode};
use uv_settings::{Combine, PipOptions, ResolverInstallerOptions, ResolverOptions};
use uv_warnings::owo_colors::OwoColorize;
@ -70,6 +70,7 @@ impl From<ResolverArgs> for PipOptions {
link_mode,
no_sources,
no_sources_package,
exclude_newer_package,
} = args;
Self {
@ -94,6 +95,7 @@ impl From<ResolverArgs> for PipOptions {
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
no_build_isolation_package: Some(no_build_isolation_package),
exclude_newer,
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
link_mode,
no_sources: if no_sources { Some(true) } else { None },
no_sources_package: Some(no_sources_package),
@ -121,6 +123,7 @@ impl From<InstallerArgs> for PipOptions {
no_compile_bytecode,
no_sources,
no_sources_package,
exclude_newer_package,
} = args;
Self {
@ -137,6 +140,7 @@ impl From<InstallerArgs> for PipOptions {
}),
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
exclude_newer,
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
link_mode,
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
no_sources: if no_sources { Some(true) } else { None },
@ -173,6 +177,7 @@ impl From<ResolverInstallerArgs> for PipOptions {
no_compile_bytecode,
no_sources,
no_sources_package,
exclude_newer_package,
} = args;
Self {
@ -199,6 +204,7 @@ impl From<ResolverInstallerArgs> for PipOptions {
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
no_build_isolation_package: Some(no_build_isolation_package),
exclude_newer,
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
link_mode,
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
no_sources: if no_sources { Some(true) } else { None },
@ -292,6 +298,7 @@ pub fn resolver_options(
link_mode,
no_sources,
no_sources_package,
exclude_newer_package,
} = resolver_args;
let BuildOptionsArgs {
@ -354,7 +361,11 @@ pub fn resolver_options(
}),
no_build_isolation: flag(no_build_isolation, build_isolation, "build-isolation"),
no_build_isolation_package: Some(no_build_isolation_package),
exclude_newer,
extra_build_dependencies: None,
exclude_newer: ExcludeNewer::from_args(
exclude_newer,
exclude_newer_package.unwrap_or_default(),
),
link_mode,
no_build: flag(no_build, build, "build"),
no_build_package: Some(no_build_package),
@ -390,6 +401,7 @@ pub fn resolver_installer_options(
no_build_isolation_package,
build_isolation,
exclude_newer,
exclude_newer_package,
link_mode,
compile_bytecode,
no_compile_bytecode,
@ -473,7 +485,9 @@ pub fn resolver_installer_options(
} else {
Some(no_build_isolation_package)
},
extra_build_dependencies: None,
exclude_newer,
exclude_newer_package: exclude_newer_package.map(ExcludeNewerPackage::from_iter),
link_mode,
compile_bytecode: flag(compile_bytecode, no_compile_bytecode, "compile-bytecode"),
no_build: flag(no_build, build, "build"),

View File

@ -14,6 +14,7 @@ bitflags::bitflags! {
const JSON_OUTPUT = 1 << 2;
const PYLOCK = 1 << 3;
const ADD_BOUNDS = 1 << 4;
const EXTRA_BUILD_DEPENDENCIES = 1 << 5;
}
}
@ -28,6 +29,7 @@ impl PreviewFeatures {
Self::JSON_OUTPUT => "json-output",
Self::PYLOCK => "pylock",
Self::ADD_BOUNDS => "add-bounds",
Self::EXTRA_BUILD_DEPENDENCIES => "extra-build-dependencies",
_ => panic!("`flag_as_str` can only be used for exactly one feature flag"),
}
}
@ -70,6 +72,7 @@ impl FromStr for PreviewFeatures {
"json-output" => Self::JSON_OUTPUT,
"pylock" => Self::PYLOCK,
"add-bounds" => Self::ADD_BOUNDS,
"extra-build-dependencies" => Self::EXTRA_BUILD_DEPENDENCIES,
_ => {
warn_user_once!("Unknown preview feature: `{part}`");
continue;
@ -232,6 +235,10 @@ mod tests {
assert_eq!(PreviewFeatures::JSON_OUTPUT.flag_as_str(), "json-output");
assert_eq!(PreviewFeatures::PYLOCK.flag_as_str(), "pylock");
assert_eq!(PreviewFeatures::ADD_BOUNDS.flag_as_str(), "add-bounds");
assert_eq!(
PreviewFeatures::EXTRA_BUILD_DEPENDENCIES.flag_as_str(),
"extra-build-dependencies"
);
}
#[test]

View File

@ -4,11 +4,7 @@
#[cfg_attr(feature = "clap", derive(clap::ValueEnum))]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub enum ProjectBuildBackend {
#[cfg_attr(
feature = "clap",
value(alias = "uv-build", alias = "uv_build", hide = true)
)]
#[cfg_attr(feature = "schemars", schemars(skip))]
#[cfg_attr(feature = "clap", value(alias = "uv-build", alias = "uv_build"))]
/// Use uv as the project build backend.
Uv,
#[serde(alias = "hatchling")]

View File

@ -22,6 +22,7 @@ use uv_configuration::{
};
use uv_configuration::{BuildOutput, Concurrency};
use uv_distribution::DistributionDatabase;
use uv_distribution::ExtraBuildRequires;
use uv_distribution_filename::DistFilename;
use uv_distribution_types::{
CachedDist, DependencyMetadata, Identifier, IndexCapabilities, IndexLocations,
@ -88,12 +89,13 @@ pub struct BuildDispatch<'a> {
shared_state: SharedState,
dependency_metadata: &'a DependencyMetadata,
build_isolation: BuildIsolation<'a>,
extra_build_requires: &'a ExtraBuildRequires,
link_mode: uv_install_wheel::LinkMode,
build_options: &'a BuildOptions,
config_settings: &'a ConfigSettings,
config_settings_package: &'a PackageConfigSettings,
hasher: &'a HashStrategy,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
source_build_context: SourceBuildContext,
build_extra_env_vars: FxHashMap<OsString, OsString>,
sources: NoSources,
@ -116,10 +118,11 @@ impl<'a> BuildDispatch<'a> {
config_settings: &'a ConfigSettings,
config_settings_package: &'a PackageConfigSettings,
build_isolation: BuildIsolation<'a>,
extra_build_requires: &'a ExtraBuildRequires,
link_mode: uv_install_wheel::LinkMode,
build_options: &'a BuildOptions,
hasher: &'a HashStrategy,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
sources: NoSources,
workspace_cache: WorkspaceCache,
concurrency: Concurrency,
@ -138,6 +141,7 @@ impl<'a> BuildDispatch<'a> {
config_settings,
config_settings_package,
build_isolation,
extra_build_requires,
link_mode,
build_options,
hasher,
@ -219,6 +223,10 @@ impl BuildContext for BuildDispatch<'_> {
&self.workspace_cache
}
fn extra_build_dependencies(&self) -> &uv_workspace::pyproject::ExtraBuildDependencies {
&self.extra_build_requires.extra_build_dependencies
}
async fn resolve<'data>(
&'data self,
requirements: &'data [Requirement],
@ -231,7 +239,7 @@ impl BuildContext for BuildDispatch<'_> {
let resolver = Resolver::new(
Manifest::simple(requirements.to_vec()).with_constraints(self.constraints.clone()),
OptionsBuilder::new()
.exclude_newer(self.exclude_newer)
.exclude_newer(self.exclude_newer.clone())
.index_strategy(self.index_strategy)
.build_options(self.build_options.clone())
.flexibility(Flexibility::Fixed)
@ -452,6 +460,7 @@ impl BuildContext for BuildDispatch<'_> {
self.workspace_cache(),
config_settings,
self.build_isolation,
&self.extra_build_requires.extra_build_dependencies,
&build_stack,
build_kind,
self.build_extra_env_vars.clone(),

View File

@ -408,9 +408,6 @@ impl<'a> IndexLocations {
} else {
let mut indexes = vec![];
// TODO(charlie): By only yielding the first default URL, we'll drop credentials if,
// e.g., an authenticated default URL is provided in a configuration file, but an
// unauthenticated default URL is present in the receipt.
let mut seen = FxHashSet::default();
let mut default = false;
for index in {

View File

@ -3,8 +3,8 @@ pub use download::LocalWheel;
pub use error::Error;
pub use index::{BuiltWheelIndex, RegistryWheelIndex};
pub use metadata::{
ArchiveMetadata, BuildRequires, FlatRequiresDist, LoweredRequirement, LoweringError, Metadata,
MetadataError, RequiresDist, SourcedDependencyGroups,
ArchiveMetadata, BuildRequires, ExtraBuildRequires, FlatRequiresDist, LoweredRequirement,
LoweringError, Metadata, MetadataError, RequiresDist, SourcedDependencyGroups,
};
pub use reporter::Reporter;
pub use source::prune;

View File

@ -4,7 +4,8 @@ use std::path::Path;
use uv_configuration::NoSources;
use uv_distribution_types::{IndexLocations, Requirement};
use uv_normalize::PackageName;
use uv_workspace::pyproject::ToolUvSources;
use uv_pypi_types::VerbatimParsedUrl;
use uv_workspace::pyproject::{ExtraBuildDependencies, ToolUvSources};
use uv_workspace::{
DiscoveryOptions, MemberDiscovery, ProjectWorkspace, Workspace, WorkspaceCache,
};
@ -213,3 +214,93 @@ impl BuildRequires {
})
}
}
/// Lowered extra build dependencies with source resolution applied.
#[derive(Debug, Clone, Default)]
pub struct ExtraBuildRequires {
pub extra_build_dependencies: ExtraBuildDependencies,
}
impl ExtraBuildRequires {
/// Lower extra build dependencies from a workspace, applying source resolution.
pub fn from_workspace(
extra_build_dependencies: ExtraBuildDependencies,
workspace: &Workspace,
index_locations: &IndexLocations,
source_strategy: &NoSources,
) -> Result<Self, MetadataError> {
match source_strategy {
NoSources::None => {
// Collect project sources and indexes
let project_indexes = workspace
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.index.as_deref())
.unwrap_or(&[]);
let empty_sources = BTreeMap::default();
let project_sources = workspace
.pyproject_toml()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.map(ToolUvSources::inner)
.unwrap_or(&empty_sources);
// Lower each package's extra build dependencies
let mut result = ExtraBuildDependencies::default();
for (package_name, requirements) in extra_build_dependencies {
let lowered: Vec<uv_pep508::Requirement<VerbatimParsedUrl>> = requirements
.into_iter()
.flat_map(|requirement| {
let requirement_name = requirement.name.clone();
let extra = requirement.marker.top_level_extra_name();
let group = None;
LoweredRequirement::from_requirement(
requirement,
None,
workspace.install_path(),
project_sources,
project_indexes,
extra.as_deref(),
group,
index_locations,
workspace,
None,
)
.map(
move |requirement| match requirement {
Ok(requirement) => Ok(requirement.into_inner().into()),
Err(err) => Err(MetadataError::LoweringError(
requirement_name.clone(),
Box::new(err),
)),
},
)
})
.collect::<Result<Vec<_>, _>>()?;
result.insert(package_name, lowered);
}
Ok(Self {
extra_build_dependencies: result,
})
}
NoSources::All | NoSources::Packages(_) => {
// Without source resolution, just return the dependencies as-is
Ok(Self {
extra_build_dependencies,
})
}
}
}
/// Create from pre-lowered dependencies (for non-workspace contexts).
pub fn from_lowered(extra_build_dependencies: ExtraBuildDependencies) -> Self {
Self {
extra_build_dependencies,
}
}
}

View File

@ -11,7 +11,7 @@ use uv_pypi_types::{HashDigests, ResolutionMetadata};
use uv_workspace::dependency_groups::DependencyGroupError;
use uv_workspace::{WorkspaceCache, WorkspaceError};
pub use crate::metadata::build_requires::BuildRequires;
pub use crate::metadata::build_requires::{BuildRequires, ExtraBuildRequires};
pub use crate::metadata::dependency_groups::SourcedDependencyGroups;
pub use crate::metadata::lowering::LoweredRequirement;
pub use crate::metadata::lowering::LoweringError;

View File

@ -404,6 +404,20 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
}
}
/// Determine the extra build dependencies for the given package name.
fn extra_build_dependencies_for(
&self,
name: Option<&PackageName>,
) -> &[uv_pep508::Requirement<uv_pypi_types::VerbatimParsedUrl>] {
name.and_then(|name| {
self.build_context
.extra_build_dependencies()
.get(name)
.map(|v| v.as_slice())
})
.unwrap_or(&[])
}
/// Build a source distribution from a remote URL.
async fn url<'data>(
&self,
@ -438,12 +452,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let cache_shard = cache_shard.shard(revision.id());
let source_dist_entry = cache_shard.entry(SOURCE);
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// If the cache contains a compatible wheel, return it.
@ -614,12 +629,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
}
}
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// Otherwise, we either need to build the metadata.
@ -827,12 +843,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
let cache_shard = cache_shard.shard(revision.id());
let source_entry = cache_shard.entry(SOURCE);
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// If the cache contains a compatible wheel, return it.
@ -989,12 +1006,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
});
}
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// Otherwise, we need to build a wheel.
@ -1131,12 +1149,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// freshness, since entries have to be fresher than the revision itself.
let cache_shard = cache_shard.shard(revision.id());
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// If the cache contains a compatible wheel, return it.
@ -1319,12 +1338,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
));
}
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// Otherwise, we need to build a wheel.
@ -1524,12 +1544,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
// Acquire the advisory lock.
let _lock = cache_shard.lock().await.map_err(Error::CacheWrite)?;
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// If the cache contains a compatible wheel, return it.
@ -1827,12 +1848,13 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> {
));
}
// If there are build settings, we need to scope to a cache shard.
// If there are build settings or extra build dependencies, we need to scope to a cache shard.
let config_settings = self.config_settings_for(source.name());
let cache_shard = if config_settings.is_empty() {
let extra_build_deps = self.extra_build_dependencies_for(source.name());
let cache_shard = if config_settings.is_empty() && extra_build_deps.is_empty() {
cache_shard
} else {
cache_shard.shard(cache_digest(&config_settings))
cache_shard.shard(cache_digest(&(&config_settings, extra_build_deps)))
};
// Otherwise, we need to build a wheel.

View File

@ -20,6 +20,7 @@ serde = { workspace = true, features = ["derive"] }
tracing = { workspace = true, optional = true }
unicode-width = { workspace = true }
unscanny = { workspace = true }
uv-cache-key = { workspace = true }
# Adds conversions from [`VersionSpecifiers`] to [`version_ranges::Ranges`]
version-ranges = { workspace = true, optional = true }

View File

@ -27,7 +27,7 @@ PEP 440 has a lot of unintuitive features, including:
- An epoch that you can prefix the version with, e.g., `1!1.2.3`. Lower epoch always means lower
version (`1.0 <=2!0.1`)
- Post versions, which can be attached to both stable releases and pre-releases
- Dev versions, which can be attached to sbpth table releases and pre-releases. When attached to a
- Dev versions, which can be attached to both stable releases and pre-releases. When attached to a
pre-release the dev version is ordered just below the normal pre-release, however when attached to
a stable version, the dev version is sorted before a pre-releases
- Pre-release handling is a mess: "Pre-releases of any kind, including developmental releases, are

View File

@ -10,6 +10,7 @@ use std::{
str::FromStr,
sync::Arc,
};
use uv_cache_key::{CacheKey, CacheKeyHasher};
/// One of `~=` `==` `!=` `<=` `>=` `<` `>` `===`
#[derive(Eq, Ord, PartialEq, PartialOrd, Debug, Hash, Clone, Copy)]
@ -114,6 +115,24 @@ impl Operator {
pub fn is_star(self) -> bool {
matches!(self, Self::EqualStar | Self::NotEqualStar)
}
/// Returns the string representation of this operator.
pub fn as_str(self) -> &'static str {
match self {
Self::Equal => "==",
// Beware, this doesn't print the star
Self::EqualStar => "==",
#[allow(deprecated)]
Self::ExactEqual => "===",
Self::NotEqual => "!=",
Self::NotEqualStar => "!=",
Self::TildeEqual => "~=",
Self::LessThan => "<",
Self::LessThanEqual => "<=",
Self::GreaterThan => ">",
Self::GreaterThanEqual => ">=",
}
}
}
impl FromStr for Operator {
@ -150,21 +169,7 @@ impl FromStr for Operator {
impl std::fmt::Display for Operator {
/// Note the `EqualStar` is also `==`.
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let operator = match self {
Self::Equal => "==",
// Beware, this doesn't print the star
Self::EqualStar => "==",
#[allow(deprecated)]
Self::ExactEqual => "===",
Self::NotEqual => "!=",
Self::NotEqualStar => "!=",
Self::TildeEqual => "~=",
Self::LessThan => "<",
Self::LessThanEqual => "<=",
Self::GreaterThan => ">",
Self::GreaterThanEqual => ">=",
};
let operator = self.as_str();
write!(f, "{operator}")
}
}
@ -930,6 +935,46 @@ impl Hash for Version {
}
}
impl CacheKey for Version {
fn cache_key(&self, state: &mut CacheKeyHasher) {
self.epoch().cache_key(state);
let release = self.release();
release.len().cache_key(state);
for segment in release.iter() {
segment.cache_key(state);
}
if let Some(pre) = self.pre() {
1u8.cache_key(state);
match pre.kind {
PrereleaseKind::Alpha => 0u8.cache_key(state),
PrereleaseKind::Beta => 1u8.cache_key(state),
PrereleaseKind::Rc => 2u8.cache_key(state),
}
pre.number.cache_key(state);
} else {
0u8.cache_key(state);
}
if let Some(post) = self.post() {
1u8.cache_key(state);
post.cache_key(state);
} else {
0u8.cache_key(state);
}
if let Some(dev) = self.dev() {
1u8.cache_key(state);
dev.cache_key(state);
} else {
0u8.cache_key(state);
}
self.local().cache_key(state);
}
}
impl PartialOrd<Self> for Version {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
@ -1711,6 +1756,23 @@ impl std::fmt::Display for LocalVersionSlice<'_> {
}
}
impl CacheKey for LocalVersionSlice<'_> {
fn cache_key(&self, state: &mut CacheKeyHasher) {
match self {
LocalVersionSlice::Segments(segments) => {
0u8.cache_key(state);
segments.len().cache_key(state);
for segment in *segments {
segment.cache_key(state);
}
}
LocalVersionSlice::Max => {
1u8.cache_key(state);
}
}
}
}
impl PartialOrd for LocalVersionSlice<'_> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
@ -1777,6 +1839,21 @@ impl std::fmt::Display for LocalSegment {
}
}
impl CacheKey for LocalSegment {
fn cache_key(&self, state: &mut CacheKeyHasher) {
match self {
Self::String(string) => {
0u8.cache_key(state);
string.cache_key(state);
}
Self::Number(number) => {
1u8.cache_key(state);
number.cache_key(state);
}
}
}
}
impl PartialOrd for LocalSegment {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))

View File

@ -48,6 +48,11 @@ impl VersionSpecifiers {
Self(Box::new([]))
}
/// The number of specifiers.
pub fn len(&self) -> usize {
self.0.len()
}
/// Whether all specifiers match the given version.
pub fn contains(&self, version: &Version) -> bool {
self.iter().all(|specifier| specifier.contains(version))

View File

@ -19,6 +19,7 @@ doctest = false
workspace = true
[dependencies]
uv-cache-key = { workspace = true }
uv-fs = { workspace = true }
uv-normalize = { workspace = true }
uv-pep440 = { workspace = true }

View File

@ -26,6 +26,7 @@ use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use thiserror::Error;
use url::Url;
use uv_cache_key::{CacheKey, CacheKeyHasher};
use cursor::Cursor;
pub use marker::{
@ -251,6 +252,49 @@ impl<T: Pep508Url> Serialize for Requirement<T> {
}
}
impl<T: Pep508Url> CacheKey for Requirement<T> {
fn cache_key(&self, state: &mut CacheKeyHasher) {
self.name.as_str().cache_key(state);
self.extras.len().cache_key(state);
for extra in &self.extras {
extra.as_str().cache_key(state);
}
// TODO(zanieb): We inline cache key handling for the child types here, but we could
// move the implementations to the children. The intent here was to limit the scope of
// types exposing the `CacheKey` trait for now.
if let Some(version_or_url) = &self.version_or_url {
1u8.cache_key(state);
match version_or_url {
VersionOrUrl::VersionSpecifier(spec) => {
0u8.cache_key(state);
spec.len().cache_key(state);
for specifier in spec.iter() {
specifier.operator().as_str().cache_key(state);
specifier.version().cache_key(state);
}
}
VersionOrUrl::Url(url) => {
1u8.cache_key(state);
url.cache_key(state);
}
}
} else {
0u8.cache_key(state);
}
if let Some(marker) = self.marker.contents() {
1u8.cache_key(state);
marker.to_string().cache_key(state);
} else {
0u8.cache_key(state);
}
// `origin` is intentionally omitted
}
}
impl<T: Pep508Url> Requirement<T> {
/// Returns whether the markers apply for the given environment
pub fn evaluate_markers(&self, env: &MarkerEnvironment, extras: &[ExtraName]) -> bool {
@ -283,7 +327,7 @@ impl<T: Pep508Url> Requirement<T> {
}
/// Type to parse URLs from `name @ <url>` into. Defaults to [`Url`].
pub trait Pep508Url: Display + Debug + Sized {
pub trait Pep508Url: Display + Debug + Sized + CacheKey {
/// String to URL parsing error
type Err: Error + Debug;

View File

@ -10,6 +10,7 @@ use arcstr::ArcStr;
use regex::Regex;
use thiserror::Error;
use url::{ParseError, Url};
use uv_cache_key::{CacheKey, CacheKeyHasher};
#[cfg_attr(not(feature = "non-pep508-extensions"), allow(unused_imports))]
use uv_fs::{normalize_absolute_path, normalize_url_path};
@ -37,6 +38,12 @@ impl Hash for VerbatimUrl {
}
}
impl CacheKey for VerbatimUrl {
fn cache_key(&self, state: &mut CacheKeyHasher) {
self.url.as_str().cache_key(state);
}
}
impl PartialEq for VerbatimUrl {
fn eq(&self, other: &Self) -> bool {
self.url == other.url

View File

@ -105,6 +105,11 @@ impl PlatformTag {
}
impl PlatformTag {
/// Returns `true` if the platform is "any" (i.e., not specific to a platform).
pub fn is_any(&self) -> bool {
matches!(self, Self::Any)
}
/// Returns `true` if the platform is manylinux-only.
pub fn is_manylinux(&self) -> bool {
matches!(
@ -134,6 +139,11 @@ impl PlatformTag {
matches!(self, Self::Macos { .. })
}
/// Returns `true` if the platform is Android-only.
pub fn is_android(&self) -> bool {
matches!(self, Self::Android { .. })
}
/// Returns `true` if the platform is Windows-only.
pub fn is_windows(&self) -> bool {
matches!(

View File

@ -0,0 +1,34 @@
[package]
name = "uv-platform"
version = "0.0.1"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
documentation = { workspace = true }
repository = { workspace = true }
authors = { workspace = true }
license = { workspace = true }
[lib]
doctest = false
[lints]
workspace = true
[dependencies]
uv-static = { workspace = true }
uv-fs = { workspace = true }
uv-platform-tags = { workspace = true }
fs-err = { workspace = true }
goblin = { workspace = true }
regex = { workspace = true }
target-lexicon = { workspace = true }
thiserror = { workspace = true }
tracing = { workspace = true }
[target.'cfg(target_os = "linux")'.dependencies]
procfs = { workspace = true }
[dev-dependencies]
indoc = { workspace = true }

View File

@ -0,0 +1,249 @@
use crate::Error;
use std::fmt::Display;
use std::str::FromStr;
use std::{cmp, fmt};
/// Architecture variants, e.g., with support for different instruction sets
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash, Ord, PartialOrd)]
pub enum ArchVariant {
/// Targets 64-bit Intel/AMD CPUs newer than Nehalem (2008).
/// Includes SSE3, SSE4 and other post-2003 CPU instructions.
V2,
/// Targets 64-bit Intel/AMD CPUs newer than Haswell (2013) and Excavator (2015).
/// Includes AVX, AVX2, MOVBE and other newer CPU instructions.
V3,
/// Targets 64-bit Intel/AMD CPUs with AVX-512 instructions (post-2017 Intel CPUs).
/// Many post-2017 Intel CPUs do not support AVX-512.
V4,
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub struct Arch {
pub(crate) family: target_lexicon::Architecture,
pub(crate) variant: Option<ArchVariant>,
}
impl Ord for Arch {
fn cmp(&self, other: &Self) -> cmp::Ordering {
if self.family == other.family {
return self.variant.cmp(&other.variant);
}
// For the time being, manually make aarch64 windows disfavored
// on its own host platform, because most packages don't have wheels for
// aarch64 windows, making emulation more useful than native execution!
//
// The reason we do this in "sorting" and not "supports" is so that we don't
// *refuse* to use an aarch64 windows pythons if they happen to be installed
// and nothing else is available.
//
// Similarly if someone manually requests an aarch64 windows install, we
// should respect that request (this is the way users should "override"
// this behaviour).
let preferred = if cfg!(all(windows, target_arch = "aarch64")) {
Arch {
family: target_lexicon::Architecture::X86_64,
variant: None,
}
} else {
// Prefer native architectures
Arch::from_env()
};
match (
self.family == preferred.family,
other.family == preferred.family,
) {
(true, true) => unreachable!(),
(true, false) => cmp::Ordering::Less,
(false, true) => cmp::Ordering::Greater,
(false, false) => {
// Both non-preferred, fallback to lexicographic order
self.family.to_string().cmp(&other.family.to_string())
}
}
}
}
impl PartialOrd for Arch {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Arch {
pub fn new(family: target_lexicon::Architecture, variant: Option<ArchVariant>) -> Self {
Self { family, variant }
}
pub fn from_env() -> Self {
Self {
family: target_lexicon::HOST.architecture,
variant: None,
}
}
/// Does the current architecture support running the other?
///
/// When the architecture is equal, this is always true. Otherwise, this is true if the
/// architecture is transparently emulated or is a microarchitecture with worse performance
/// characteristics.
pub fn supports(self, other: Self) -> bool {
if self == other {
return true;
}
// TODO: Implement `variant` support checks
// Windows ARM64 runs emulated x86_64 binaries transparently
// Similarly, macOS aarch64 runs emulated x86_64 binaries transparently if you have Rosetta
// installed. We don't try to be clever and check if that's the case here, we just assume
// that if x86_64 distributions are available, they're usable.
if (cfg!(windows) || cfg!(target_os = "macos"))
&& matches!(self.family, target_lexicon::Architecture::Aarch64(_))
{
return other.family == target_lexicon::Architecture::X86_64;
}
false
}
pub fn family(&self) -> target_lexicon::Architecture {
self.family
}
pub fn is_arm(&self) -> bool {
matches!(self.family, target_lexicon::Architecture::Arm(_))
}
}
impl Display for Arch {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.family {
target_lexicon::Architecture::X86_32(target_lexicon::X86_32Architecture::I686) => {
write!(f, "x86")?;
}
inner => write!(f, "{inner}")?,
}
if let Some(variant) = self.variant {
write!(f, "_{variant}")?;
}
Ok(())
}
}
impl FromStr for Arch {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
fn parse_family(s: &str) -> Result<target_lexicon::Architecture, Error> {
let inner = match s {
// Allow users to specify "x86" as a shorthand for the "i686" variant, they should not need
// to specify the exact architecture and this variant is what we have downloads for.
"x86" => {
target_lexicon::Architecture::X86_32(target_lexicon::X86_32Architecture::I686)
}
_ => target_lexicon::Architecture::from_str(s)
.map_err(|()| Error::UnknownArch(s.to_string()))?,
};
if matches!(inner, target_lexicon::Architecture::Unknown) {
return Err(Error::UnknownArch(s.to_string()));
}
Ok(inner)
}
// First check for a variant
if let Some((Ok(family), Ok(variant))) = s
.rsplit_once('_')
.map(|(family, variant)| (parse_family(family), ArchVariant::from_str(variant)))
{
// We only support variants for `x86_64` right now
if !matches!(family, target_lexicon::Architecture::X86_64) {
return Err(Error::UnsupportedVariant(
variant.to_string(),
family.to_string(),
));
}
return Ok(Self {
family,
variant: Some(variant),
});
}
let family = parse_family(s)?;
Ok(Self {
family,
variant: None,
})
}
}
impl FromStr for ArchVariant {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"v2" => Ok(Self::V2),
"v3" => Ok(Self::V3),
"v4" => Ok(Self::V4),
_ => Err(()),
}
}
}
impl Display for ArchVariant {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::V2 => write!(f, "v2"),
Self::V3 => write!(f, "v3"),
Self::V4 => write!(f, "v4"),
}
}
}
impl From<&uv_platform_tags::Arch> for Arch {
fn from(value: &uv_platform_tags::Arch) -> Self {
match value {
uv_platform_tags::Arch::Aarch64 => Arch::new(
target_lexicon::Architecture::Aarch64(target_lexicon::Aarch64Architecture::Aarch64),
None,
),
uv_platform_tags::Arch::Armv5TEL => Arch::new(
target_lexicon::Architecture::Arm(target_lexicon::ArmArchitecture::Armv5te),
None,
),
uv_platform_tags::Arch::Armv6L => Arch::new(
target_lexicon::Architecture::Arm(target_lexicon::ArmArchitecture::Armv6),
None,
),
uv_platform_tags::Arch::Armv7L => Arch::new(
target_lexicon::Architecture::Arm(target_lexicon::ArmArchitecture::Armv7),
None,
),
uv_platform_tags::Arch::S390X => Arch::new(target_lexicon::Architecture::S390x, None),
uv_platform_tags::Arch::Powerpc => {
Arch::new(target_lexicon::Architecture::Powerpc, None)
}
uv_platform_tags::Arch::Powerpc64 => {
Arch::new(target_lexicon::Architecture::Powerpc64, None)
}
uv_platform_tags::Arch::Powerpc64Le => {
Arch::new(target_lexicon::Architecture::Powerpc64le, None)
}
uv_platform_tags::Arch::X86 => Arch::new(
target_lexicon::Architecture::X86_32(target_lexicon::X86_32Architecture::I686),
None,
),
uv_platform_tags::Arch::X86_64 => Arch::new(target_lexicon::Architecture::X86_64, None),
uv_platform_tags::Arch::LoongArch64 => {
Arch::new(target_lexicon::Architecture::LoongArch64, None)
}
uv_platform_tags::Arch::Riscv64 => Arch::new(
target_lexicon::Architecture::Riscv64(target_lexicon::Riscv64Architecture::Riscv64),
None,
),
uv_platform_tags::Arch::Wasm32 => Arch::new(target_lexicon::Architecture::Wasm32, None),
}
}
}

View File

@ -1,6 +1,6 @@
//! Fetches CPU information.
use anyhow::Error;
use std::io::Error;
#[cfg(target_os = "linux")]
use procfs::{CpuInfo, Current};
@ -14,7 +14,7 @@ use procfs::{CpuInfo, Current};
/// More information on this can be found in the [Debian ARM Hard Float Port documentation](https://wiki.debian.org/ArmHardFloatPort#VFP).
#[cfg(target_os = "linux")]
pub(crate) fn detect_hardware_floating_point_support() -> Result<bool, Error> {
let cpu_info = CpuInfo::current()?;
let cpu_info = CpuInfo::current().map_err(Error::other)?;
if let Some(features) = cpu_info.fields.get("Features") {
if features.contains("vfp") {
return Ok(true); // "vfp" found: hard-float (gnueabihf) detected

View File

@ -0,0 +1,26 @@
//! Platform detection for operating system, architecture, and libc.
use thiserror::Error;
pub use crate::arch::{Arch, ArchVariant};
pub use crate::libc::{Libc, LibcDetectionError, LibcVersion};
pub use crate::os::Os;
mod arch;
mod cpuinfo;
mod libc;
mod os;
#[derive(Error, Debug)]
pub enum Error {
#[error("Unknown operating system: {0}")]
UnknownOs(String),
#[error("Unknown architecture: {0}")]
UnknownArch(String),
#[error("Unknown libc environment: {0}")]
UnknownLibc(String),
#[error("Unsupported variant `{0}` for architecture `{1}`")]
UnsupportedVariant(String, String),
#[error(transparent)]
LibcDetectionError(#[from] crate::libc::LibcDetectionError),
}

View File

@ -3,18 +3,22 @@
//! Taken from `glibc_version` (<https://github.com/delta-incubator/glibc-version-rs>),
//! which used the Apache 2.0 license (but not the MIT license)
use crate::cpuinfo::detect_hardware_floating_point_support;
use fs_err as fs;
use goblin::elf::Elf;
use regex::Regex;
use std::fmt::Display;
use std::io;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::str::FromStr;
use std::sync::LazyLock;
use thiserror::Error;
use std::{env, fmt};
use tracing::trace;
use uv_fs::Simplified;
use uv_static::EnvVars;
#[derive(Debug, Error)]
#[derive(Debug, thiserror::Error)]
pub enum LibcDetectionError {
#[error(
"Could not detect either glibc version nor musl libc version, at least one of which is required"
@ -45,11 +49,89 @@ pub enum LibcDetectionError {
/// We support glibc (manylinux) and musl (musllinux) on linux.
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum LibcVersion {
pub enum LibcVersion {
Manylinux { major: u32, minor: u32 },
Musllinux { major: u32, minor: u32 },
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub enum Libc {
Some(target_lexicon::Environment),
None,
}
impl Libc {
pub fn from_env() -> Result<Self, crate::Error> {
match env::consts::OS {
"linux" => {
if let Ok(libc) = env::var(EnvVars::UV_LIBC) {
if !libc.is_empty() {
return Self::from_str(&libc);
}
}
Ok(Self::Some(match detect_linux_libc()? {
LibcVersion::Manylinux { .. } => match env::consts::ARCH {
// Checks if the CPU supports hardware floating-point operations.
// Depending on the result, it selects either the `gnueabihf` (hard-float) or `gnueabi` (soft-float) environment.
// download-metadata.json only includes armv7.
"arm" | "armv5te" | "armv7" => {
match detect_hardware_floating_point_support() {
Ok(true) => target_lexicon::Environment::Gnueabihf,
Ok(false) => target_lexicon::Environment::Gnueabi,
Err(_) => target_lexicon::Environment::Gnu,
}
}
_ => target_lexicon::Environment::Gnu,
},
LibcVersion::Musllinux { .. } => target_lexicon::Environment::Musl,
}))
}
"windows" | "macos" => Ok(Self::None),
// Use `None` on platforms without explicit support.
_ => Ok(Self::None),
}
}
pub fn is_musl(&self) -> bool {
matches!(self, Self::Some(target_lexicon::Environment::Musl))
}
}
impl FromStr for Libc {
type Err = crate::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"gnu" => Ok(Self::Some(target_lexicon::Environment::Gnu)),
"gnueabi" => Ok(Self::Some(target_lexicon::Environment::Gnueabi)),
"gnueabihf" => Ok(Self::Some(target_lexicon::Environment::Gnueabihf)),
"musl" => Ok(Self::Some(target_lexicon::Environment::Musl)),
"none" => Ok(Self::None),
_ => Err(crate::Error::UnknownLibc(s.to_string())),
}
}
}
impl Display for Libc {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Some(env) => write!(f, "{env}"),
Self::None => write!(f, "none"),
}
}
}
impl From<&uv_platform_tags::Os> for Libc {
fn from(value: &uv_platform_tags::Os) -> Self {
match value {
uv_platform_tags::Os::Manylinux { .. } => Libc::Some(target_lexicon::Environment::Gnu),
uv_platform_tags::Os::Musllinux { .. } => Libc::Some(target_lexicon::Environment::Musl),
_ => Libc::None,
}
}
}
/// Determine whether we're running glibc or musl and in which version, given we are on linux.
///
/// Normally, we determine this from the python interpreter, which is more accurate, but when

View File

@ -0,0 +1,88 @@
use crate::Error;
use std::fmt;
use std::fmt::Display;
use std::ops::Deref;
use std::str::FromStr;
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub struct Os(pub(crate) target_lexicon::OperatingSystem);
impl Os {
pub fn new(os: target_lexicon::OperatingSystem) -> Self {
Self(os)
}
pub fn from_env() -> Self {
Self(target_lexicon::HOST.operating_system)
}
pub fn is_windows(&self) -> bool {
matches!(self.0, target_lexicon::OperatingSystem::Windows)
}
}
impl Display for Os {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &**self {
target_lexicon::OperatingSystem::Darwin(_) => write!(f, "macos"),
inner => write!(f, "{inner}"),
}
}
}
impl FromStr for Os {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let inner = match s {
"macos" => target_lexicon::OperatingSystem::Darwin(None),
_ => target_lexicon::OperatingSystem::from_str(s)
.map_err(|()| Error::UnknownOs(s.to_string()))?,
};
if matches!(inner, target_lexicon::OperatingSystem::Unknown) {
return Err(Error::UnknownOs(s.to_string()));
}
Ok(Self(inner))
}
}
impl Deref for Os {
type Target = target_lexicon::OperatingSystem;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<&uv_platform_tags::Os> for Os {
fn from(value: &uv_platform_tags::Os) -> Self {
match value {
uv_platform_tags::Os::Dragonfly { .. } => {
Os::new(target_lexicon::OperatingSystem::Dragonfly)
}
uv_platform_tags::Os::FreeBsd { .. } => {
Os::new(target_lexicon::OperatingSystem::Freebsd)
}
uv_platform_tags::Os::Haiku { .. } => Os::new(target_lexicon::OperatingSystem::Haiku),
uv_platform_tags::Os::Illumos { .. } => {
Os::new(target_lexicon::OperatingSystem::Illumos)
}
uv_platform_tags::Os::Macos { .. } => {
Os::new(target_lexicon::OperatingSystem::Darwin(None))
}
uv_platform_tags::Os::Manylinux { .. }
| uv_platform_tags::Os::Musllinux { .. }
| uv_platform_tags::Os::Android { .. } => {
Os::new(target_lexicon::OperatingSystem::Linux)
}
uv_platform_tags::Os::NetBsd { .. } => Os::new(target_lexicon::OperatingSystem::Netbsd),
uv_platform_tags::Os::OpenBsd { .. } => {
Os::new(target_lexicon::OperatingSystem::Openbsd)
}
uv_platform_tags::Os::Windows => Os::new(target_lexicon::OperatingSystem::Windows),
uv_platform_tags::Os::Pyodide { .. } => {
Os::new(target_lexicon::OperatingSystem::Emscripten)
}
}
}
}

View File

@ -16,6 +16,7 @@ doctest = false
workspace = true
[dependencies]
uv-cache-key = { workspace = true }
uv-distribution-filename = { workspace = true }
uv-git-types = { workspace = true }
uv-normalize = { workspace = true }

View File

@ -3,6 +3,7 @@ use std::path::{Path, PathBuf};
use thiserror::Error;
use url::{ParseError, Url};
use uv_cache_key::{CacheKey, CacheKeyHasher};
use uv_distribution_filename::{DistExtension, ExtensionError};
use uv_git_types::{GitUrl, GitUrlParseError};
@ -45,6 +46,12 @@ pub struct VerbatimParsedUrl {
pub verbatim: VerbatimUrl,
}
impl CacheKey for VerbatimParsedUrl {
fn cache_key(&self, state: &mut CacheKeyHasher) {
self.verbatim.cache_key(state);
}
}
impl VerbatimParsedUrl {
/// Returns `true` if the URL is editable.
pub fn is_editable(&self) -> bool {

View File

@ -28,6 +28,7 @@ uv-fs = { workspace = true }
uv-install-wheel = { workspace = true }
uv-pep440 = { workspace = true }
uv-pep508 = { workspace = true }
uv-platform = { workspace = true }
uv-platform-tags = { workspace = true }
uv-pypi-types = { workspace = true }
uv-redacted = { workspace = true }
@ -42,7 +43,6 @@ configparser = { workspace = true }
dunce = { workspace = true }
fs-err = { workspace = true, features = ["tokio"] }
futures = { workspace = true }
goblin = { workspace = true, default-features = false }
indexmap = { workspace = true }
itertools = { workspace = true }
owo-colors = { workspace = true }
@ -68,9 +68,6 @@ url = { workspace = true }
which = { workspace = true }
once_cell = { workspace = true }
[target.'cfg(target_os = "linux")'.dependencies]
procfs = { workspace = true }
[target.'cfg(target_os = "windows")'.dependencies]
windows-registry = { workspace = true }
windows-result = { workspace = true }

View File

@ -3066,8 +3066,8 @@ mod tests {
discovery::{PythonRequest, VersionRequest},
downloads::{ArchRequest, PythonDownloadRequest},
implementation::ImplementationName,
platform::{Arch, Libc, Os},
};
use uv_platform::{Arch, Libc, Os};
use super::{Error, PythonVariant};
@ -3154,11 +3154,11 @@ mod tests {
PythonVariant::Default
)),
implementation: Some(ImplementationName::CPython),
arch: Some(ArchRequest::Explicit(Arch {
family: Architecture::Aarch64(Aarch64Architecture::Aarch64),
variant: None
})),
os: Some(Os(target_lexicon::OperatingSystem::Darwin(None))),
arch: Some(ArchRequest::Explicit(Arch::new(
Architecture::Aarch64(Aarch64Architecture::Aarch64),
None
))),
os: Some(Os::new(target_lexicon::OperatingSystem::Darwin(None))),
libc: Some(Libc::None),
prereleases: None
})
@ -3189,10 +3189,10 @@ mod tests {
PythonVariant::Default
)),
implementation: None,
arch: Some(ArchRequest::Explicit(Arch {
family: Architecture::Aarch64(Aarch64Architecture::Aarch64),
variant: None
})),
arch: Some(ArchRequest::Explicit(Arch::new(
Architecture::Aarch64(Aarch64Architecture::Aarch64),
None
))),
os: None,
libc: None,
prereleases: None

View File

@ -25,6 +25,7 @@ use uv_client::{BaseClient, WrappedReqwestError, is_extended_transient_error};
use uv_distribution_filename::{ExtensionError, SourceDistExtension};
use uv_extract::hash::Hasher;
use uv_fs::{Simplified, rename_with_retry};
use uv_platform::{self as platform, Arch, Libc, Os};
use uv_pypi_types::{HashAlgorithm, HashDigest};
use uv_redacted::DisplaySafeUrl;
use uv_static::EnvVars;
@ -34,9 +35,7 @@ use crate::implementation::{
Error as ImplementationError, ImplementationName, LenientImplementationName,
};
use crate::installation::PythonInstallationKey;
use crate::libc::LibcDetectionError;
use crate::managed::ManagedPythonInstallation;
use crate::platform::{self, Arch, Libc, Os};
use crate::{Interpreter, PythonRequest, PythonVersion, VersionRequest};
#[derive(Error, Debug)]
@ -98,7 +97,7 @@ pub enum Error {
#[error("A mirror was provided via `{0}`, but the URL does not match the expected format: {0}")]
Mirror(&'static str, &'static str),
#[error("Failed to determine the libc used on the current platform")]
LibcDetection(#[from] LibcDetectionError),
LibcDetection(#[from] platform::LibcDetectionError),
#[error("Remote Python downloads JSON is not yet supported, please use a local path")]
RemoteJSONNotSupported,
#[error("The JSON of the python downloads is invalid: {0}")]

View File

@ -10,6 +10,7 @@ use uv_cache::Cache;
use uv_client::BaseClientBuilder;
use uv_configuration::Preview;
use uv_pep440::{Prerelease, Version};
use uv_platform::{Arch, Libc, Os};
use crate::discovery::{
EnvironmentPreference, PythonRequest, find_best_python_installation, find_python_installation,
@ -17,7 +18,6 @@ use crate::discovery::{
use crate::downloads::{DownloadResult, ManagedPythonDownload, PythonDownloadRequest, Reporter};
use crate::implementation::LenientImplementationName;
use crate::managed::{ManagedPythonInstallation, ManagedPythonInstallations};
use crate::platform::{Arch, Libc, Os};
use crate::{
Error, ImplementationName, Interpreter, PythonDownloads, PythonPreference, PythonSource,
PythonVariant, PythonVersion, downloads,

View File

@ -21,13 +21,13 @@ use uv_fs::{LockedFile, PythonExt, Simplified, write_atomic_sync};
use uv_install_wheel::Layout;
use uv_pep440::Version;
use uv_pep508::{MarkerEnvironment, StringVersion};
use uv_platform::{Arch, Libc, Os};
use uv_platform_tags::Platform;
use uv_platform_tags::{Tags, TagsError};
use uv_pypi_types::{ResolverMarkerEnvironment, Scheme};
use crate::implementation::LenientImplementationName;
use crate::managed::ManagedPythonInstallations;
use crate::platform::{Arch, Libc, Os};
use crate::pointer_size::PointerSize;
use crate::{
Prefix, PythonInstallationKey, PythonVariant, PythonVersion, Target, VersionRequest,

View File

@ -29,19 +29,16 @@ pub use crate::version_files::{
};
pub use crate::virtualenv::{Error as VirtualEnvError, PyVenvConfiguration, VirtualEnvironment};
mod cpuinfo;
mod discovery;
pub mod downloads;
mod environment;
mod implementation;
mod installation;
mod interpreter;
mod libc;
pub mod macos_dylib;
pub mod managed;
#[cfg(windows)]
mod microsoft_store;
pub mod platform;
mod pointer_size;
mod prefix;
mod python_version;

View File

@ -17,6 +17,8 @@ use uv_configuration::{Preview, PreviewFeatures};
use windows_sys::Win32::Storage::FileSystem::FILE_ATTRIBUTE_REPARSE_POINT;
use uv_fs::{LockedFile, Simplified, replace_symlink, symlink_or_copy_file};
use uv_platform::Error as PlatformError;
use uv_platform::{Arch, Libc, LibcDetectionError, Os};
use uv_state::{StateBucket, StateStore};
use uv_static::EnvVars;
use uv_trampoline_builder::{Launcher, windows_python_launcher};
@ -26,9 +28,6 @@ use crate::implementation::{
Error as ImplementationError, ImplementationName, LenientImplementationName,
};
use crate::installation::{self, PythonInstallationKey};
use crate::libc::LibcDetectionError;
use crate::platform::Error as PlatformError;
use crate::platform::{Arch, Libc, Os};
use crate::python_version::PythonVersion;
use crate::{
PythonInstallationMinorVersionKey, PythonRequest, PythonVariant, macos_dylib, sysconfig,
@ -271,7 +270,7 @@ impl ManagedPythonInstallations {
&& (arch.supports(installation.key.arch)
// TODO(zanieb): Allow inequal variants, as `Arch::supports` does not
// implement this yet. See https://github.com/astral-sh/uv/pull/9788
|| arch.family == installation.key.arch.family)
|| arch.family() == installation.key.arch.family())
&& installation.key.libc == libc
});
@ -545,7 +544,7 @@ impl ManagedPythonInstallation {
/// standard `EXTERNALLY-MANAGED` file.
pub fn ensure_externally_managed(&self) -> Result<(), Error> {
// Construct the path to the `stdlib` directory.
let stdlib = if matches!(self.key.os, Os(target_lexicon::OperatingSystem::Windows)) {
let stdlib = if self.key.os.is_windows() {
self.python_dir().join("Lib")
} else {
let lib_suffix = self.key.variant.suffix();

View File

@ -1,427 +0,0 @@
use crate::cpuinfo::detect_hardware_floating_point_support;
use crate::libc::{LibcDetectionError, LibcVersion, detect_linux_libc};
use std::fmt::Display;
use std::ops::Deref;
use std::{fmt, str::FromStr};
use thiserror::Error;
use uv_static::EnvVars;
#[derive(Error, Debug)]
pub enum Error {
#[error("Unknown operating system: {0}")]
UnknownOs(String),
#[error("Unknown architecture: {0}")]
UnknownArch(String),
#[error("Unknown libc environment: {0}")]
UnknownLibc(String),
#[error("Unsupported variant `{0}` for architecture `{1}`")]
UnsupportedVariant(String, String),
#[error(transparent)]
LibcDetectionError(#[from] LibcDetectionError),
}
/// Architecture variants, e.g., with support for different instruction sets
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash, Ord, PartialOrd)]
pub enum ArchVariant {
/// Targets 64-bit Intel/AMD CPUs newer than Nehalem (2008).
/// Includes SSE3, SSE4 and other post-2003 CPU instructions.
V2,
/// Targets 64-bit Intel/AMD CPUs newer than Haswell (2013) and Excavator (2015).
/// Includes AVX, AVX2, MOVBE and other newer CPU instructions.
V3,
/// Targets 64-bit Intel/AMD CPUs with AVX-512 instructions (post-2017 Intel CPUs).
/// Many post-2017 Intel CPUs do not support AVX-512.
V4,
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub struct Arch {
pub(crate) family: target_lexicon::Architecture,
pub(crate) variant: Option<ArchVariant>,
}
impl Ord for Arch {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
if self.family == other.family {
return self.variant.cmp(&other.variant);
}
// For the time being, manually make aarch64 windows disfavored
// on its own host platform, because most packages don't have wheels for
// aarch64 windows, making emulation more useful than native execution!
//
// The reason we do this in "sorting" and not "supports" is so that we don't
// *refuse* to use an aarch64 windows pythons if they happen to be installed
// and nothing else is available.
//
// Similarly if someone manually requests an aarch64 windows install, we
// should respect that request (this is the way users should "override"
// this behaviour).
let preferred = if cfg!(all(windows, target_arch = "aarch64")) {
Arch {
family: target_lexicon::Architecture::X86_64,
variant: None,
}
} else {
// Prefer native architectures
Arch::from_env()
};
match (
self.family == preferred.family,
other.family == preferred.family,
) {
(true, true) => unreachable!(),
(true, false) => std::cmp::Ordering::Less,
(false, true) => std::cmp::Ordering::Greater,
(false, false) => {
// Both non-preferred, fallback to lexicographic order
self.family.to_string().cmp(&other.family.to_string())
}
}
}
}
impl PartialOrd for Arch {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub struct Os(pub(crate) target_lexicon::OperatingSystem);
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub enum Libc {
Some(target_lexicon::Environment),
None,
}
impl Libc {
pub(crate) fn from_env() -> Result<Self, Error> {
match std::env::consts::OS {
"linux" => {
if let Ok(libc) = std::env::var(EnvVars::UV_LIBC) {
if !libc.is_empty() {
return Self::from_str(&libc);
}
}
Ok(Self::Some(match detect_linux_libc()? {
LibcVersion::Manylinux { .. } => match std::env::consts::ARCH {
// Checks if the CPU supports hardware floating-point operations.
// Depending on the result, it selects either the `gnueabihf` (hard-float) or `gnueabi` (soft-float) environment.
// download-metadata.json only includes armv7.
"arm" | "armv5te" | "armv7" => {
match detect_hardware_floating_point_support() {
Ok(true) => target_lexicon::Environment::Gnueabihf,
Ok(false) => target_lexicon::Environment::Gnueabi,
Err(_) => target_lexicon::Environment::Gnu,
}
}
_ => target_lexicon::Environment::Gnu,
},
LibcVersion::Musllinux { .. } => target_lexicon::Environment::Musl,
}))
}
"windows" | "macos" => Ok(Self::None),
// Use `None` on platforms without explicit support.
_ => Ok(Self::None),
}
}
pub fn is_musl(&self) -> bool {
matches!(self, Self::Some(target_lexicon::Environment::Musl))
}
}
impl FromStr for Libc {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"gnu" => Ok(Self::Some(target_lexicon::Environment::Gnu)),
"gnueabi" => Ok(Self::Some(target_lexicon::Environment::Gnueabi)),
"gnueabihf" => Ok(Self::Some(target_lexicon::Environment::Gnueabihf)),
"musl" => Ok(Self::Some(target_lexicon::Environment::Musl)),
"none" => Ok(Self::None),
_ => Err(Error::UnknownLibc(s.to_string())),
}
}
}
impl Os {
pub fn from_env() -> Self {
Self(target_lexicon::HOST.operating_system)
}
}
impl Arch {
pub fn from_env() -> Self {
Self {
family: target_lexicon::HOST.architecture,
variant: None,
}
}
/// Does the current architecture support running the other?
///
/// When the architecture is equal, this is always true. Otherwise, this is true if the
/// architecture is transparently emulated or is a microarchitecture with worse performance
/// characteristics.
pub(crate) fn supports(self, other: Self) -> bool {
if self == other {
return true;
}
// TODO: Implement `variant` support checks
// Windows ARM64 runs emulated x86_64 binaries transparently
// Similarly, macOS aarch64 runs emulated x86_64 binaries transparently if you have Rosetta
// installed. We don't try to be clever and check if that's the case here, we just assume
// that if x86_64 distributions are available, they're usable.
if (cfg!(windows) || cfg!(target_os = "macos"))
&& matches!(self.family, target_lexicon::Architecture::Aarch64(_))
{
return other.family == target_lexicon::Architecture::X86_64;
}
false
}
pub fn family(&self) -> target_lexicon::Architecture {
self.family
}
pub fn is_arm(&self) -> bool {
matches!(self.family, target_lexicon::Architecture::Arm(_))
}
}
impl Display for Libc {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Some(env) => write!(f, "{env}"),
Self::None => write!(f, "none"),
}
}
}
impl Display for Os {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &**self {
target_lexicon::OperatingSystem::Darwin(_) => write!(f, "macos"),
inner => write!(f, "{inner}"),
}
}
}
impl Display for Arch {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.family {
target_lexicon::Architecture::X86_32(target_lexicon::X86_32Architecture::I686) => {
write!(f, "x86")?;
}
inner => write!(f, "{inner}")?,
}
if let Some(variant) = self.variant {
write!(f, "_{variant}")?;
}
Ok(())
}
}
impl FromStr for Os {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let inner = match s {
"macos" => target_lexicon::OperatingSystem::Darwin(None),
_ => target_lexicon::OperatingSystem::from_str(s)
.map_err(|()| Error::UnknownOs(s.to_string()))?,
};
if matches!(inner, target_lexicon::OperatingSystem::Unknown) {
return Err(Error::UnknownOs(s.to_string()));
}
Ok(Self(inner))
}
}
impl FromStr for Arch {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
fn parse_family(s: &str) -> Result<target_lexicon::Architecture, Error> {
let inner = match s {
// Allow users to specify "x86" as a shorthand for the "i686" variant, they should not need
// to specify the exact architecture and this variant is what we have downloads for.
"x86" => {
target_lexicon::Architecture::X86_32(target_lexicon::X86_32Architecture::I686)
}
_ => target_lexicon::Architecture::from_str(s)
.map_err(|()| Error::UnknownArch(s.to_string()))?,
};
if matches!(inner, target_lexicon::Architecture::Unknown) {
return Err(Error::UnknownArch(s.to_string()));
}
Ok(inner)
}
// First check for a variant
if let Some((Ok(family), Ok(variant))) = s
.rsplit_once('_')
.map(|(family, variant)| (parse_family(family), ArchVariant::from_str(variant)))
{
// We only support variants for `x86_64` right now
if !matches!(family, target_lexicon::Architecture::X86_64) {
return Err(Error::UnsupportedVariant(
variant.to_string(),
family.to_string(),
));
}
return Ok(Self {
family,
variant: Some(variant),
});
}
let family = parse_family(s)?;
Ok(Self {
family,
variant: None,
})
}
}
impl FromStr for ArchVariant {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"v2" => Ok(Self::V2),
"v3" => Ok(Self::V3),
"v4" => Ok(Self::V4),
_ => Err(()),
}
}
}
impl Display for ArchVariant {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::V2 => write!(f, "v2"),
Self::V3 => write!(f, "v3"),
Self::V4 => write!(f, "v4"),
}
}
}
impl Deref for Os {
type Target = target_lexicon::OperatingSystem;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<&uv_platform_tags::Arch> for Arch {
fn from(value: &uv_platform_tags::Arch) -> Self {
match value {
uv_platform_tags::Arch::Aarch64 => Self {
family: target_lexicon::Architecture::Aarch64(
target_lexicon::Aarch64Architecture::Aarch64,
),
variant: None,
},
uv_platform_tags::Arch::Armv5TEL => Self {
family: target_lexicon::Architecture::Arm(target_lexicon::ArmArchitecture::Armv5te),
variant: None,
},
uv_platform_tags::Arch::Armv6L => Self {
family: target_lexicon::Architecture::Arm(target_lexicon::ArmArchitecture::Armv6),
variant: None,
},
uv_platform_tags::Arch::Armv7L => Self {
family: target_lexicon::Architecture::Arm(target_lexicon::ArmArchitecture::Armv7),
variant: None,
},
uv_platform_tags::Arch::S390X => Self {
family: target_lexicon::Architecture::S390x,
variant: None,
},
uv_platform_tags::Arch::Powerpc => Self {
family: target_lexicon::Architecture::Powerpc,
variant: None,
},
uv_platform_tags::Arch::Powerpc64 => Self {
family: target_lexicon::Architecture::Powerpc64,
variant: None,
},
uv_platform_tags::Arch::Powerpc64Le => Self {
family: target_lexicon::Architecture::Powerpc64le,
variant: None,
},
uv_platform_tags::Arch::X86 => Self {
family: target_lexicon::Architecture::X86_32(
target_lexicon::X86_32Architecture::I686,
),
variant: None,
},
uv_platform_tags::Arch::X86_64 => Self {
family: target_lexicon::Architecture::X86_64,
variant: None,
},
uv_platform_tags::Arch::LoongArch64 => Self {
family: target_lexicon::Architecture::LoongArch64,
variant: None,
},
uv_platform_tags::Arch::Riscv64 => Self {
family: target_lexicon::Architecture::Riscv64(
target_lexicon::Riscv64Architecture::Riscv64,
),
variant: None,
},
uv_platform_tags::Arch::Wasm32 => Self {
family: target_lexicon::Architecture::Wasm32,
variant: None,
},
}
}
}
impl From<&uv_platform_tags::Os> for Libc {
fn from(value: &uv_platform_tags::Os) -> Self {
match value {
uv_platform_tags::Os::Manylinux { .. } => Self::Some(target_lexicon::Environment::Gnu),
uv_platform_tags::Os::Musllinux { .. } => Self::Some(target_lexicon::Environment::Musl),
_ => Self::None,
}
}
}
impl From<&uv_platform_tags::Os> for Os {
fn from(value: &uv_platform_tags::Os) -> Self {
match value {
uv_platform_tags::Os::Dragonfly { .. } => {
Self(target_lexicon::OperatingSystem::Dragonfly)
}
uv_platform_tags::Os::FreeBsd { .. } => Self(target_lexicon::OperatingSystem::Freebsd),
uv_platform_tags::Os::Haiku { .. } => Self(target_lexicon::OperatingSystem::Haiku),
uv_platform_tags::Os::Illumos { .. } => Self(target_lexicon::OperatingSystem::Illumos),
uv_platform_tags::Os::Macos { .. } => {
Self(target_lexicon::OperatingSystem::Darwin(None))
}
uv_platform_tags::Os::Manylinux { .. }
| uv_platform_tags::Os::Musllinux { .. }
| uv_platform_tags::Os::Android { .. } => Self(target_lexicon::OperatingSystem::Linux),
uv_platform_tags::Os::NetBsd { .. } => Self(target_lexicon::OperatingSystem::Netbsd),
uv_platform_tags::Os::OpenBsd { .. } => Self(target_lexicon::OperatingSystem::Openbsd),
uv_platform_tags::Os::Windows => Self(target_lexicon::OperatingSystem::Windows),
uv_platform_tags::Os::Pyodide { .. } => {
Self(target_lexicon::OperatingSystem::Emscripten)
}
}
}
}

View File

@ -1,7 +1,6 @@
//! PEP 514 interactions with the Windows registry.
use crate::managed::ManagedPythonInstallation;
use crate::platform::Arch;
use crate::{COMPANY_DISPLAY_NAME, COMPANY_KEY, PythonInstallationKey, PythonVersion};
use anyhow::anyhow;
use std::cmp::Ordering;
@ -11,6 +10,7 @@ use std::str::FromStr;
use target_lexicon::PointerWidth;
use thiserror::Error;
use tracing::debug;
use uv_platform::Arch;
use uv_warnings::{warn_user, warn_user_once};
use windows_registry::{CURRENT_USER, HSTRING, Key, LOCAL_MACHINE, Value};
use windows_result::HRESULT;

View File

@ -1,30 +1,35 @@
#[cfg(feature = "schemars")]
use std::borrow::Cow;
use std::str::FromStr;
use std::{
ops::{Deref, DerefMut},
str::FromStr,
};
use jiff::{Timestamp, ToSpan, tz::TimeZone};
use rustc_hash::FxHashMap;
use uv_normalize::PackageName;
/// A timestamp that excludes files newer than it.
#[derive(Debug, Copy, Clone, PartialEq, Eq, serde::Deserialize, serde::Serialize)]
pub struct ExcludeNewer(Timestamp);
pub struct ExcludeNewerTimestamp(Timestamp);
impl ExcludeNewer {
impl ExcludeNewerTimestamp {
/// Returns the timestamp in milliseconds.
pub fn timestamp_millis(&self) -> i64 {
self.0.as_millisecond()
}
}
impl From<Timestamp> for ExcludeNewer {
impl From<Timestamp> for ExcludeNewerTimestamp {
fn from(timestamp: Timestamp) -> Self {
Self(timestamp)
}
}
impl FromStr for ExcludeNewer {
impl FromStr for ExcludeNewerTimestamp {
type Err = String;
/// Parse an [`ExcludeNewer`] from a string.
/// Parse an [`ExcludeNewerTimestamp`] from a string.
///
/// Accepts both RFC 3339 timestamps (e.g., `2006-12-02T02:07:43Z`) and local dates in the same
/// format (e.g., `2006-12-02`).
@ -61,16 +66,174 @@ impl FromStr for ExcludeNewer {
}
}
impl std::fmt::Display for ExcludeNewer {
impl std::fmt::Display for ExcludeNewerTimestamp {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
/// A package-specific exclude-newer entry.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ExcludeNewerPackageEntry {
pub package: PackageName,
pub timestamp: ExcludeNewerTimestamp,
}
impl FromStr for ExcludeNewerPackageEntry {
type Err = String;
/// Parses a [`ExcludeNewerPackageEntry`] from a string in the format `PACKAGE=DATE`.
fn from_str(s: &str) -> Result<Self, Self::Err> {
let Some((package, date)) = s.split_once('=') else {
return Err(format!(
"Invalid `exclude-newer-package` value `{s}`: expected format `PACKAGE=DATE`"
));
};
let package = PackageName::from_str(package).map_err(|err| {
format!("Invalid `exclude-newer-package` package name `{package}`: {err}")
})?;
let timestamp = ExcludeNewerTimestamp::from_str(date)
.map_err(|err| format!("Invalid `exclude-newer-package` timestamp `{date}`: {err}"))?;
Ok(Self { package, timestamp })
}
}
impl From<(PackageName, ExcludeNewerTimestamp)> for ExcludeNewerPackageEntry {
fn from((package, timestamp): (PackageName, ExcludeNewerTimestamp)) -> Self {
Self { package, timestamp }
}
}
#[derive(Debug, Clone, PartialEq, Eq, Default, serde::Serialize, serde::Deserialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ExcludeNewerPackage(FxHashMap<PackageName, ExcludeNewerTimestamp>);
impl Deref for ExcludeNewerPackage {
type Target = FxHashMap<PackageName, ExcludeNewerTimestamp>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for ExcludeNewerPackage {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl FromIterator<ExcludeNewerPackageEntry> for ExcludeNewerPackage {
fn from_iter<T: IntoIterator<Item = ExcludeNewerPackageEntry>>(iter: T) -> Self {
Self(
iter.into_iter()
.map(|entry| (entry.package, entry.timestamp))
.collect(),
)
}
}
impl IntoIterator for ExcludeNewerPackage {
type Item = (PackageName, ExcludeNewerTimestamp);
type IntoIter = std::collections::hash_map::IntoIter<PackageName, ExcludeNewerTimestamp>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl<'a> IntoIterator for &'a ExcludeNewerPackage {
type Item = (&'a PackageName, &'a ExcludeNewerTimestamp);
type IntoIter = std::collections::hash_map::Iter<'a, PackageName, ExcludeNewerTimestamp>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
impl ExcludeNewerPackage {
/// Convert to the inner `HashMap`.
pub fn into_inner(self) -> FxHashMap<PackageName, ExcludeNewerTimestamp> {
self.0
}
}
/// A setting that excludes files newer than a timestamp, at a global level or per-package.
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize, Default)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ExcludeNewer {
/// Global timestamp that applies to all packages if no package-specific timestamp is set.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub global: Option<ExcludeNewerTimestamp>,
/// Per-package timestamps that override the global timestamp.
#[serde(default, skip_serializing_if = "FxHashMap::is_empty")]
pub package: ExcludeNewerPackage,
}
impl ExcludeNewer {
/// Create a new exclude newer configuration with just a global timestamp.
pub fn global(global: ExcludeNewerTimestamp) -> Self {
Self {
global: Some(global),
package: ExcludeNewerPackage::default(),
}
}
/// Create a new exclude newer configuration.
pub fn new(global: Option<ExcludeNewerTimestamp>, package: ExcludeNewerPackage) -> Self {
Self { global, package }
}
/// Create from CLI arguments.
pub fn from_args(
global: Option<ExcludeNewerTimestamp>,
package: Vec<ExcludeNewerPackageEntry>,
) -> Self {
let package: ExcludeNewerPackage = package.into_iter().collect();
Self { global, package }
}
/// Returns the timestamp for a specific package, falling back to the global timestamp if set.
pub fn exclude_newer_package(
&self,
package_name: &PackageName,
) -> Option<ExcludeNewerTimestamp> {
self.package.get(package_name).copied().or(self.global)
}
/// Returns true if this has any configuration (global or per-package).
pub fn is_empty(&self) -> bool {
self.global.is_none() && self.package.is_empty()
}
}
impl std::fmt::Display for ExcludeNewer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(global) = self.global {
write!(f, "global: {global}")?;
if !self.package.is_empty() {
write!(f, ", ")?;
}
}
let mut first = true;
for (name, timestamp) in &self.package {
if !first {
write!(f, ", ")?;
}
write!(f, "{name}: {timestamp}")?;
first = false;
}
Ok(())
}
}
#[cfg(feature = "schemars")]
impl schemars::JsonSchema for ExcludeNewer {
impl schemars::JsonSchema for ExcludeNewerTimestamp {
fn schema_name() -> Cow<'static, str> {
Cow::Borrowed("ExcludeNewer")
Cow::Borrowed("ExcludeNewerTimestamp")
}
fn json_schema(_generator: &mut schemars::generate::SchemaGenerator) -> schemars::Schema {

View File

@ -1,6 +1,8 @@
pub use dependency_mode::DependencyMode;
pub use error::{ErrorTree, NoSolutionError, NoSolutionHeader, ResolveError, SentinelRange};
pub use exclude_newer::ExcludeNewer;
pub use exclude_newer::{
ExcludeNewer, ExcludeNewerPackage, ExcludeNewerPackageEntry, ExcludeNewerTimestamp,
};
pub use exclusions::Exclusions;
pub use flat_index::{FlatDistributions, FlatIndex};
pub use fork_strategy::ForkStrategy;

View File

@ -60,7 +60,8 @@ pub use crate::lock::tree::TreeDisplay;
use crate::resolution::{AnnotatedDist, ResolutionGraphNode};
use crate::universal_marker::{ConflictMarker, UniversalMarker};
use crate::{
ExcludeNewer, InMemoryIndex, MetadataResponse, PrereleaseMode, ResolutionMode, ResolverOutput,
ExcludeNewer, ExcludeNewerTimestamp, InMemoryIndex, MetadataResponse, PrereleaseMode,
ResolutionMode, ResolverOutput,
};
mod export;
@ -72,7 +73,7 @@ mod tree;
pub const VERSION: u32 = 1;
/// The current revision of the lockfile format.
const REVISION: u32 = 2;
const REVISION: u32 = 3;
static LINUX_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'linux'").unwrap();
@ -86,6 +87,10 @@ static MAC_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let pep508 = MarkerTree::from_str("os_name == 'posix' and sys_platform == 'darwin'").unwrap();
UniversalMarker::new(pep508, ConflictMarker::TRUE)
});
static ANDROID_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let pep508 = MarkerTree::from_str("sys_platform == 'android'").unwrap();
UniversalMarker::new(pep508, ConflictMarker::TRUE)
});
static ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let pep508 =
MarkerTree::from_str("platform_machine == 'aarch64' or platform_machine == 'arm64' or platform_machine == 'ARM64'")
@ -105,6 +110,66 @@ static X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
.unwrap();
UniversalMarker::new(pep508, ConflictMarker::TRUE)
});
static LINUX_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *LINUX_MARKERS;
marker.and(*ARM_MARKERS);
marker
});
static LINUX_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *LINUX_MARKERS;
marker.and(*X86_64_MARKERS);
marker
});
static LINUX_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *LINUX_MARKERS;
marker.and(*X86_MARKERS);
marker
});
static WINDOWS_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *WINDOWS_MARKERS;
marker.and(*ARM_MARKERS);
marker
});
static WINDOWS_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *WINDOWS_MARKERS;
marker.and(*X86_64_MARKERS);
marker
});
static WINDOWS_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *WINDOWS_MARKERS;
marker.and(*X86_MARKERS);
marker
});
static MAC_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *MAC_MARKERS;
marker.and(*ARM_MARKERS);
marker
});
static MAC_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *MAC_MARKERS;
marker.and(*X86_64_MARKERS);
marker
});
static MAC_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *MAC_MARKERS;
marker.and(*X86_MARKERS);
marker
});
static ANDROID_ARM_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *ANDROID_MARKERS;
marker.and(*ARM_MARKERS);
marker
});
static ANDROID_X86_64_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *ANDROID_MARKERS;
marker.and(*X86_64_MARKERS);
marker
});
static ANDROID_X86_MARKERS: LazyLock<UniversalMarker> = LazyLock::new(|| {
let mut marker = *ANDROID_MARKERS;
marker.and(*X86_MARKERS);
marker
});
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(try_from = "LockWire")]
@ -278,11 +343,23 @@ impl Lock {
}
let packages = packages.into_values().collect();
let (exclude_newer, exclude_newer_package) = {
let exclude_newer = &resolution.options.exclude_newer;
let global_exclude_newer = exclude_newer.global;
let package_exclude_newer = if exclude_newer.package.is_empty() {
None
} else {
Some(exclude_newer.package.clone().into_inner())
};
(global_exclude_newer, package_exclude_newer)
};
let options = ResolverOptions {
resolution_mode: resolution.options.resolution_mode,
prerelease_mode: resolution.options.prerelease_mode,
fork_strategy: resolution.options.fork_strategy,
exclude_newer: resolution.options.exclude_newer,
exclude_newer,
exclude_newer_package,
};
let lock = Self::new(
VERSION,
@ -323,14 +400,61 @@ impl Lock {
// a single disjointness check with the intersection is sufficient, so we have one
// constant per platform.
let platform_tags = wheel.filename.platform_tags();
if platform_tags.iter().all(PlatformTag::is_any) {
return true;
}
if platform_tags.iter().all(PlatformTag::is_linux) {
if graph.graph[node_index].marker().is_disjoint(*LINUX_MARKERS) {
if platform_tags.iter().all(PlatformTag::is_arm) {
if graph.graph[node_index]
.marker()
.is_disjoint(*LINUX_ARM_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86_64) {
if graph.graph[node_index]
.marker()
.is_disjoint(*LINUX_X86_64_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86) {
if graph.graph[node_index]
.marker()
.is_disjoint(*LINUX_X86_MARKERS)
{
return false;
}
} else if graph.graph[node_index].marker().is_disjoint(*LINUX_MARKERS) {
return false;
}
}
if platform_tags.iter().all(PlatformTag::is_windows) {
if graph.graph[node_index]
if platform_tags.iter().all(PlatformTag::is_arm) {
if graph.graph[node_index]
.marker()
.is_disjoint(*WINDOWS_ARM_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86_64) {
if graph.graph[node_index]
.marker()
.is_disjoint(*WINDOWS_X86_64_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86) {
if graph.graph[node_index]
.marker()
.is_disjoint(*WINDOWS_X86_MARKERS)
{
return false;
}
} else if graph.graph[node_index]
.marker()
.is_disjoint(*WINDOWS_MARKERS)
{
@ -339,7 +463,58 @@ impl Lock {
}
if platform_tags.iter().all(PlatformTag::is_macos) {
if graph.graph[node_index].marker().is_disjoint(*MAC_MARKERS) {
if platform_tags.iter().all(PlatformTag::is_arm) {
if graph.graph[node_index]
.marker()
.is_disjoint(*MAC_ARM_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86_64) {
if graph.graph[node_index]
.marker()
.is_disjoint(*MAC_X86_64_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86) {
if graph.graph[node_index]
.marker()
.is_disjoint(*MAC_X86_MARKERS)
{
return false;
}
} else if graph.graph[node_index].marker().is_disjoint(*MAC_MARKERS) {
return false;
}
}
if platform_tags.iter().all(PlatformTag::is_android) {
if platform_tags.iter().all(PlatformTag::is_arm) {
if graph.graph[node_index]
.marker()
.is_disjoint(*ANDROID_ARM_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86_64) {
if graph.graph[node_index]
.marker()
.is_disjoint(*ANDROID_X86_64_MARKERS)
{
return false;
}
} else if platform_tags.iter().all(PlatformTag::is_x86) {
if graph.graph[node_index]
.marker()
.is_disjoint(*ANDROID_X86_MARKERS)
{
return false;
}
} else if graph.graph[node_index]
.marker()
.is_disjoint(*ANDROID_MARKERS)
{
return false;
}
}
@ -643,8 +818,8 @@ impl Lock {
}
/// Returns the exclude newer setting used to generate this lock.
pub fn exclude_newer(&self) -> Option<ExcludeNewer> {
self.options.exclude_newer
pub fn exclude_newer(&self) -> ExcludeNewer {
self.options.exclude_newer()
}
/// Returns the conflicting groups that were used to generate this lock.
@ -890,8 +1065,21 @@ impl Lock {
value(self.options.fork_strategy.to_string()),
);
}
if let Some(exclude_newer) = self.options.exclude_newer {
options_table.insert("exclude-newer", value(exclude_newer.to_string()));
let exclude_newer = &self.options.exclude_newer();
if !exclude_newer.is_empty() {
// Always serialize global exclude-newer as a string
if let Some(global) = exclude_newer.global {
options_table.insert("exclude-newer", value(global.to_string()));
}
// Serialize package-specific exclusions as a separate field
if !exclude_newer.package.is_empty() {
let mut package_table = toml_edit::Table::new();
for (name, timestamp) in &exclude_newer.package {
package_table.insert(name.as_ref(), value(timestamp.to_string()));
}
options_table.insert("exclude-newer-package", Item::Table(package_table));
}
}
if !options_table.is_empty() {
@ -1870,8 +2058,25 @@ struct ResolverOptions {
/// The [`ForkStrategy`] used to generate this lock.
#[serde(default)]
fork_strategy: ForkStrategy,
/// The [`ExcludeNewer`] used to generate this lock.
exclude_newer: Option<ExcludeNewer>,
/// The global [`ExcludeNewer`] timestamp.
exclude_newer: Option<ExcludeNewerTimestamp>,
/// Package-specific [`ExcludeNewer`] timestamps.
exclude_newer_package: Option<FxHashMap<PackageName, ExcludeNewerTimestamp>>,
}
impl ResolverOptions {
/// Get the combined exclude-newer configuration.
fn exclude_newer(&self) -> ExcludeNewer {
ExcludeNewer::from_args(
self.exclude_newer,
self.exclude_newer_package
.clone()
.unwrap_or_default()
.into_iter()
.map(Into::into)
.collect(),
)
}
}
#[derive(Clone, Debug, Default, serde::Deserialize, PartialEq, Eq)]

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -37,6 +37,7 @@ Ok(
prerelease_mode: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
exclude_newer: None,
exclude_newer_package: None,
},
packages: [
Package {

View File

@ -12,7 +12,7 @@ pub struct Options {
pub prerelease_mode: PrereleaseMode,
pub dependency_mode: DependencyMode,
pub fork_strategy: ForkStrategy,
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: ExcludeNewer,
pub index_strategy: IndexStrategy,
pub required_environments: SupportedEnvironments,
pub flexibility: Flexibility,
@ -27,7 +27,7 @@ pub struct OptionsBuilder {
prerelease_mode: PrereleaseMode,
dependency_mode: DependencyMode,
fork_strategy: ForkStrategy,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
index_strategy: IndexStrategy,
required_environments: SupportedEnvironments,
flexibility: Flexibility,
@ -71,7 +71,7 @@ impl OptionsBuilder {
/// Sets the exclusion date.
#[must_use]
pub fn exclude_newer(mut self, exclude_newer: Option<ExcludeNewer>) -> Self {
pub fn exclude_newer(mut self, exclude_newer: ExcludeNewer) -> Self {
self.exclude_newer = exclude_newer;
self
}

View File

@ -182,7 +182,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider>
python_requirement.target(),
AllowedYanks::from_manifest(&manifest, &env, options.dependency_mode),
hasher,
options.exclude_newer,
options.exclude_newer.clone(),
build_context.build_options(),
build_context.capabilities(),
);
@ -366,7 +366,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
state.fork_indexes,
state.env,
self.current_environment.clone(),
self.options.exclude_newer,
Some(&self.options.exclude_newer),
&visited,
));
}
@ -2537,7 +2537,7 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
fork_indexes: ForkIndexes,
env: ResolverEnvironment,
current_environment: MarkerEnvironment,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: Option<&ExcludeNewer>,
visited: &FxHashSet<PackageName>,
) -> ResolveError {
err = NoSolutionError::collapse_local_version_segments(NoSolutionError::collapse_proxies(
@ -2596,7 +2596,9 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
for (version, dists) in version_map.iter(&Ranges::full()) {
// Don't show versions removed by excluded-newer in hints.
if let Some(exclude_newer) = exclude_newer {
if let Some(exclude_newer) =
exclude_newer.and_then(|en| en.exclude_newer_package(name))
{
let Some(prioritized_dist) = dists.prioritized_dist() else {
continue;
};

View File

@ -116,7 +116,7 @@ pub struct DefaultResolverProvider<'a, Context: BuildContext> {
requires_python: RequiresPython,
allowed_yanks: AllowedYanks,
hasher: HashStrategy,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
build_options: &'a BuildOptions,
capabilities: &'a IndexCapabilities,
}
@ -130,7 +130,7 @@ impl<'a, Context: BuildContext> DefaultResolverProvider<'a, Context> {
requires_python: &'a RequiresPython,
allowed_yanks: AllowedYanks,
hasher: &'a HashStrategy,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
build_options: &'a BuildOptions,
capabilities: &'a IndexCapabilities,
) -> Self {
@ -184,7 +184,7 @@ impl<Context: BuildContext> ResolverProvider for DefaultResolverProvider<'_, Con
&self.requires_python,
&self.allowed_yanks,
&self.hasher,
self.exclude_newer.as_ref(),
Some(&self.exclude_newer),
flat_index
.and_then(|flat_index| flat_index.get(package_name))
.cloned(),

View File

@ -22,7 +22,7 @@ use uv_types::HashStrategy;
use uv_warnings::warn_user_once;
use crate::flat_index::FlatDistributions;
use crate::{ExcludeNewer, yanks::AllowedYanks};
use crate::{ExcludeNewer, ExcludeNewerTimestamp, yanks::AllowedYanks};
/// A map from versions to distributions.
#[derive(Debug)]
@ -112,7 +112,7 @@ impl VersionMap {
allowed_yanks: allowed_yanks.clone(),
hasher: hasher.clone(),
requires_python: requires_python.clone(),
exclude_newer: exclude_newer.copied(),
exclude_newer: exclude_newer.and_then(|en| en.exclude_newer_package(package_name)),
}),
}
}
@ -365,7 +365,7 @@ struct VersionMapLazy {
/// in the current environment.
tags: Option<Tags>,
/// Whether files newer than this timestamp should be excluded or not.
exclude_newer: Option<ExcludeNewer>,
exclude_newer: Option<ExcludeNewerTimestamp>,
/// Which yanked versions are allowed
allowed_yanks: AllowedYanks,
/// The hashes of allowed distributions.
@ -420,7 +420,7 @@ impl VersionMapLazy {
for (filename, file) in files.all() {
// Support resolving as if it were an earlier timestamp, at least as long files have
// upload time information.
let (excluded, upload_time) = if let Some(exclude_newer) = self.exclude_newer {
let (excluded, upload_time) = if let Some(exclude_newer) = &self.exclude_newer {
match file.upload_time_utc_ms.as_ref() {
Some(&upload_time) if upload_time >= exclude_newer.timestamp_millis() => {
(true, Some(upload_time))

View File

@ -11,6 +11,8 @@ doctest = false
workspace = true
[dependencies]
uv-configuration = { workspace = true }
uv-distribution-types = { workspace = true }
uv-pep440 = { workspace = true }
uv-pep508 = { workspace = true }
uv-pypi-types = { workspace = true }

View File

@ -9,6 +9,7 @@ use serde::Deserialize;
use thiserror::Error;
use url::Url;
use uv_configuration::NoSources;
use uv_pep440::VersionSpecifiers;
use uv_pep508::PackageName;
use uv_pypi_types::VerbatimParsedUrl;
@ -96,6 +97,46 @@ impl Pep723ItemRef<'_> {
Self::Remote(..) => None,
}
}
/// Determine the working directory for the script.
pub fn directory(&self) -> Result<PathBuf, io::Error> {
match self {
Self::Script(script) => Ok(std::path::absolute(&script.path)?
.parent()
.expect("script path has no parent")
.to_owned()),
Self::Stdin(..) | Self::Remote(..) => std::env::current_dir(),
}
}
/// Collect any `tool.uv.index` from the script.
pub fn indexes(&self, source_strategy: &NoSources) -> &[uv_distribution_types::Index] {
match source_strategy {
NoSources::None => self
.metadata()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.top_level.index.as_deref())
.unwrap_or(&[]),
NoSources::All | NoSources::Packages(_) => &[],
}
}
/// Collect any `tool.uv.sources` from the script.
pub fn sources(&self, source_strategy: &NoSources) -> &BTreeMap<PackageName, Sources> {
static EMPTY: BTreeMap<PackageName, Sources> = BTreeMap::new();
match source_strategy {
NoSources::None => self
.metadata()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.unwrap_or(&EMPTY),
NoSources::All | NoSources::Packages(_) => &EMPTY,
}
}
}
impl<'item> From<&'item Pep723Item> for Pep723ItemRef<'item> {
@ -108,6 +149,12 @@ impl<'item> From<&'item Pep723Item> for Pep723ItemRef<'item> {
}
}
impl<'item> From<&'item Pep723Script> for Pep723ItemRef<'item> {
fn from(script: &'item Pep723Script) -> Self {
Self::Script(script)
}
}
/// A PEP 723 script, including its [`Pep723Metadata`].
#[derive(Debug, Clone)]
pub struct Pep723Script {
@ -381,6 +428,8 @@ pub struct ToolUv {
pub override_dependencies: Option<Vec<uv_pep508::Requirement<VerbatimParsedUrl>>>,
pub constraint_dependencies: Option<Vec<uv_pep508::Requirement<VerbatimParsedUrl>>>,
pub build_constraint_dependencies: Option<Vec<uv_pep508::Requirement<VerbatimParsedUrl>>>,
pub extra_build_dependencies:
Option<BTreeMap<PackageName, Vec<uv_pep508::Requirement<VerbatimParsedUrl>>>>,
pub sources: Option<BTreeMap<PackageName, Sources>>,
}

View File

@ -1,5 +1,5 @@
use std::num::NonZeroUsize;
use std::path::PathBuf;
use std::{collections::BTreeMap, num::NonZeroUsize};
use url::Url;
@ -12,8 +12,12 @@ use uv_install_wheel::LinkMode;
use uv_pypi_types::{SchemaConflicts, SupportedEnvironments};
use uv_python::{PythonDownloads, PythonPreference, PythonVersion};
use uv_redacted::DisplaySafeUrl;
use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode};
use uv_resolver::{
AnnotationStyle, ExcludeNewer, ExcludeNewerPackage, ExcludeNewerTimestamp, ForkStrategy,
PrereleaseMode, ResolutionMode,
};
use uv_torch::TorchMode;
use uv_workspace::pyproject::ExtraBuildDependencies;
use uv_workspace::pyproject_mut::AddBoundsKind;
use crate::{FilesystemOptions, Options, PipOptions};
@ -78,6 +82,7 @@ macro_rules! impl_combine_or {
impl_combine_or!(AddBoundsKind);
impl_combine_or!(AnnotationStyle);
impl_combine_or!(ExcludeNewer);
impl_combine_or!(ExcludeNewerTimestamp);
impl_combine_or!(ExportFormat);
impl_combine_or!(ForkStrategy);
impl_combine_or!(Index);
@ -120,6 +125,37 @@ impl<T> Combine for Option<Vec<T>> {
}
}
impl<K: Ord, T> Combine for Option<BTreeMap<K, Vec<T>>> {
/// Combine two maps of vecs by combining their vecs
fn combine(self, other: Option<BTreeMap<K, Vec<T>>>) -> Option<BTreeMap<K, Vec<T>>> {
match (self, other) {
(Some(mut a), Some(b)) => {
for (key, value) in b {
a.entry(key).or_default().extend(value);
}
Some(a)
}
(a, b) => a.or(b),
}
}
}
impl Combine for Option<ExcludeNewerPackage> {
/// Combine two [`ExcludeNewerPackage`] instances by merging them, with the values in `self` taking precedence.
fn combine(self, other: Option<ExcludeNewerPackage>) -> Option<ExcludeNewerPackage> {
match (self, other) {
(Some(mut a), Some(b)) => {
// Extend with values from b, but a takes precedence (we don't overwrite existing keys)
for (key, value) in b {
a.entry(key).or_insert(value);
}
Some(a)
}
(a, b) => a.or(b),
}
}
}
impl Combine for Option<ConfigSettings> {
/// Combine two maps by merging the map in `self` with the map in `other`, if they're both
/// `Some`.
@ -163,3 +199,49 @@ impl Combine for Option<serde::de::IgnoredAny> {
self
}
}
impl Combine for ExcludeNewer {
fn combine(mut self, other: Self) -> Self {
self.global = self.global.combine(other.global);
if !other.package.is_empty() {
if self.package.is_empty() {
self.package = other.package;
} else {
// Merge package-specific timestamps, with self taking precedence
for (pkg, timestamp) in &other.package {
self.package.entry(pkg.clone()).or_insert(*timestamp);
}
}
}
self
}
}
impl Combine for ExtraBuildDependencies {
fn combine(mut self, other: Self) -> Self {
for (key, value) in other {
match self.entry(key) {
std::collections::btree_map::Entry::Occupied(mut entry) => {
// Combine the vecs, with self taking precedence
let existing = entry.get_mut();
existing.extend(value);
}
std::collections::btree_map::Entry::Vacant(entry) => {
entry.insert(value);
}
}
}
self
}
}
impl Combine for Option<ExtraBuildDependencies> {
fn combine(self, other: Option<ExtraBuildDependencies>) -> Option<ExtraBuildDependencies> {
match (self, other) {
(Some(a), Some(b)) => Some(a.combine(b)),
(a, b) => a.or(b),
}
}
}

View File

@ -271,7 +271,6 @@ fn validate_uv_toml(path: &Path, options: &Options) -> Result<(), Error> {
"environments",
));
}
if required_environments.is_some() {
return Err(Error::PyprojectOnlyField(
path.to_path_buf(),
@ -318,7 +317,9 @@ fn warn_uv_toml_masked_fields(options: &Options) {
config_settings_package,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
exclude_newer,
exclude_newer_package,
link_mode,
compile_bytecode,
no_sources,
@ -446,9 +447,15 @@ fn warn_uv_toml_masked_fields(options: &Options) {
if no_build_isolation_package.is_some() {
masked_fields.push("no-build-isolation-package");
}
if extra_build_dependencies.is_some() {
masked_fields.push("extra-build-dependencies");
}
if exclude_newer.is_some() {
masked_fields.push("exclude-newer");
}
if exclude_newer_package.is_some() {
masked_fields.push("exclude-newer-package");
}
if link_mode.is_some() {
masked_fields.push("link-mode");
}

View File

@ -12,15 +12,19 @@ use uv_distribution_types::{
};
use uv_install_wheel::LinkMode;
use uv_macros::{CombineOptions, OptionsMetadata};
use uv_normalize::{ExtraName, PackageName, PipGroupName};
use uv_pep508::Requirement;
use uv_pypi_types::{SupportedEnvironments, VerbatimParsedUrl};
use uv_python::{PythonDownloads, PythonPreference, PythonVersion};
use uv_redacted::DisplaySafeUrl;
use uv_resolver::{AnnotationStyle, ExcludeNewer, ForkStrategy, PrereleaseMode, ResolutionMode};
use uv_resolver::{
AnnotationStyle, ExcludeNewer, ExcludeNewerPackage, ExcludeNewerTimestamp, ForkStrategy,
PrereleaseMode, ResolutionMode,
};
use uv_static::EnvVars;
use uv_torch::TorchMode;
use uv_workspace::pyproject_mut::AddBoundsKind;
use uv_workspace::{pyproject::ExtraBuildDependencies, pyproject_mut::AddBoundsKind};
/// A `pyproject.toml` with an (optional) `[tool.uv]` section.
#[allow(dead_code)]
@ -333,7 +337,7 @@ pub struct InstallerOptions {
pub index_strategy: Option<IndexStrategy>,
pub keyring_provider: Option<KeyringProviderType>,
pub config_settings: Option<ConfigSettings>,
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
pub link_mode: Option<LinkMode>,
pub compile_bytecode: Option<bool>,
pub reinstall: Option<bool>,
@ -363,7 +367,7 @@ pub struct ResolverOptions {
pub dependency_metadata: Option<Vec<StaticMetadata>>,
pub config_settings: Option<ConfigSettings>,
pub config_settings_package: Option<PackageConfigSettings>,
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: ExcludeNewer,
pub link_mode: Option<LinkMode>,
pub upgrade: Option<bool>,
pub upgrade_package: Option<Vec<Requirement<VerbatimParsedUrl>>>,
@ -373,6 +377,7 @@ pub struct ResolverOptions {
pub no_binary_package: Option<Vec<PackageName>>,
pub no_build_isolation: Option<bool>,
pub no_build_isolation_package: Option<Vec<PackageName>>,
pub extra_build_dependencies: Option<ExtraBuildDependencies>,
pub no_sources: Option<bool>,
pub no_sources_package: Option<Vec<PackageName>>,
}
@ -626,6 +631,20 @@ pub struct ResolverInstallerOptions {
"#
)]
pub no_build_isolation_package: Option<Vec<PackageName>>,
/// Additional build dependencies for packages.
///
/// This allows extending the PEP 517 build environment for the project's dependencies with
/// additional packages. This is useful for packages that assume the presence of packages like
/// `pip`, and do not declare them as build dependencies.
#[option(
default = "[]",
value_type = "dict",
example = r#"
[extra-build-dependencies]
pytest = ["setuptools"]
"#
)]
pub extra_build_dependencies: Option<ExtraBuildDependencies>,
/// Limit candidate packages to those that were uploaded prior to a given point in time.
///
/// Accepts a superset of [RFC 3339](https://www.rfc-editor.org/rfc/rfc3339.html) (e.g.,
@ -638,7 +657,18 @@ pub struct ResolverInstallerOptions {
exclude-newer = "2006-12-02T02:07:43Z"
"#
)]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
/// Limit candidate packages for specific packages to those that were uploaded prior to the given date.
///
/// Accepts package-date pairs in a dictionary format.
#[option(
default = "None",
value_type = "dict",
example = r#"
exclude-newer-package = { tqdm = "2022-04-04T00:00:00Z" }
"#
)]
pub exclude_newer_package: Option<ExcludeNewerPackage>,
/// The method to use when installing packages from the global cache.
///
/// Defaults to `clone` (also known as Copy-on-Write) on macOS, and `hardlink` on Linux and
@ -1131,6 +1161,20 @@ pub struct PipOptions {
"#
)]
pub no_build_isolation_package: Option<Vec<PackageName>>,
/// Additional build dependencies for packages.
///
/// This allows extending the PEP 517 build environment for the project's dependencies with
/// additional packages. This is useful for packages that assume the presence of packages like
/// `pip`, and do not declare them as build dependencies.
#[option(
default = "[]",
value_type = "dict",
example = r#"
[extra-build-dependencies]
pytest = ["setuptools"]
"#
)]
pub extra_build_dependencies: Option<ExtraBuildDependencies>,
/// Validate the Python environment, to detect packages with missing dependencies and other
/// issues.
#[option(
@ -1420,7 +1464,18 @@ pub struct PipOptions {
exclude-newer = "2006-12-02T02:07:43Z"
"#
)]
pub exclude_newer: Option<ExcludeNewer>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
/// Limit candidate packages for specific packages to those that were uploaded prior to the given date.
///
/// Accepts package-date pairs in a dictionary format.
#[option(
default = "None",
value_type = "dict",
example = r#"
exclude-newer-package = { tqdm = "2022-04-04T00:00:00Z" }
"#
)]
pub exclude_newer_package: Option<ExcludeNewerPackage>,
/// Specify a package to omit from the output resolution. Its dependencies will still be
/// included in the resolution. Equivalent to pip-compile's `--unsafe-package` option.
#[option(
@ -1695,7 +1750,15 @@ impl From<ResolverInstallerOptions> for ResolverOptions {
dependency_metadata: value.dependency_metadata,
config_settings: value.config_settings,
config_settings_package: value.config_settings_package,
exclude_newer: value.exclude_newer,
exclude_newer: ExcludeNewer::from_args(
value.exclude_newer,
value
.exclude_newer_package
.unwrap_or_default()
.into_iter()
.map(Into::into)
.collect(),
),
link_mode: value.link_mode,
upgrade: value.upgrade,
upgrade_package: value.upgrade_package,
@ -1705,6 +1768,7 @@ impl From<ResolverInstallerOptions> for ResolverOptions {
no_binary_package: value.no_binary_package,
no_build_isolation: value.no_build_isolation,
no_build_isolation_package: value.no_build_isolation_package,
extra_build_dependencies: value.extra_build_dependencies,
no_sources: value.no_sources,
no_sources_package: value.no_sources_package,
}
@ -1722,7 +1786,16 @@ impl From<ResolverInstallerOptions> for InstallerOptions {
index_strategy: value.index_strategy,
keyring_provider: value.keyring_provider,
config_settings: value.config_settings,
exclude_newer: value.exclude_newer,
exclude_newer: ExcludeNewer::from_args(
value.exclude_newer,
value
.exclude_newer_package
.unwrap_or_default()
.into_iter()
.map(Into::into)
.collect(),
)
.global,
link_mode: value.link_mode,
compile_bytecode: value.compile_bytecode,
reinstall: value.reinstall,
@ -1763,7 +1836,9 @@ pub struct ToolOptions {
pub config_settings_package: Option<PackageConfigSettings>,
pub no_build_isolation: Option<bool>,
pub no_build_isolation_package: Option<Vec<PackageName>>,
pub exclude_newer: Option<ExcludeNewer>,
pub extra_build_dependencies: Option<ExtraBuildDependencies>,
pub exclude_newer: Option<ExcludeNewerTimestamp>,
pub exclude_newer_package: Option<ExcludeNewerPackage>,
pub link_mode: Option<LinkMode>,
pub compile_bytecode: Option<bool>,
pub no_sources: Option<bool>,
@ -1792,7 +1867,9 @@ impl From<ResolverInstallerOptions> for ToolOptions {
config_settings_package: value.config_settings_package,
no_build_isolation: value.no_build_isolation,
no_build_isolation_package: value.no_build_isolation_package,
extra_build_dependencies: value.extra_build_dependencies,
exclude_newer: value.exclude_newer,
exclude_newer_package: value.exclude_newer_package,
link_mode: value.link_mode,
compile_bytecode: value.compile_bytecode,
no_sources: value.no_sources,
@ -1823,7 +1900,9 @@ impl From<ToolOptions> for ResolverInstallerOptions {
config_settings_package: value.config_settings_package,
no_build_isolation: value.no_build_isolation,
no_build_isolation_package: value.no_build_isolation_package,
extra_build_dependencies: value.extra_build_dependencies,
exclude_newer: value.exclude_newer,
exclude_newer_package: value.exclude_newer_package,
link_mode: value.link_mode,
compile_bytecode: value.compile_bytecode,
no_sources: value.no_sources,
@ -1877,7 +1956,9 @@ pub struct OptionsWire {
config_settings_package: Option<PackageConfigSettings>,
no_build_isolation: Option<bool>,
no_build_isolation_package: Option<Vec<PackageName>>,
exclude_newer: Option<ExcludeNewer>,
extra_build_dependencies: Option<ExtraBuildDependencies>,
exclude_newer: Option<ExcludeNewerTimestamp>,
exclude_newer_package: Option<ExcludeNewerPackage>,
link_mode: Option<LinkMode>,
compile_bytecode: Option<bool>,
no_sources: Option<bool>,
@ -1969,6 +2050,7 @@ impl From<OptionsWire> for Options {
no_build_isolation,
no_build_isolation_package,
exclude_newer,
exclude_newer_package,
link_mode,
compile_bytecode,
no_sources,
@ -1996,6 +2078,7 @@ impl From<OptionsWire> for Options {
sources,
default_groups,
dependency_groups,
extra_build_dependencies,
dev_dependencies,
managed,
package,
@ -2036,7 +2119,9 @@ impl From<OptionsWire> for Options {
config_settings_package,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
exclude_newer,
exclude_newer_package,
link_mode,
compile_bytecode,
no_sources,

View File

@ -735,9 +735,15 @@ impl EnvVars {
/// the installer from modifying shell profiles or environment variables.
pub const UV_UNMANAGED_INSTALL: &'static str = "UV_UNMANAGED_INSTALL";
/// The URL from which to download uv using the standalone installer. By default, installs from
/// uv's GitHub Releases. `INSTALLER_DOWNLOAD_URL` is also supported as an alias, for backwards
/// compatibility.
pub const UV_DOWNLOAD_URL: &'static str = "UV_DOWNLOAD_URL";
/// Avoid modifying the `PATH` environment variable when installing uv using the standalone
/// installer and `self update` feature.
pub const INSTALLER_NO_MODIFY_PATH: &'static str = "INSTALLER_NO_MODIFY_PATH";
/// installer and `self update` feature. `INSTALLER_NO_MODIFY_PATH` is also supported as an
/// alias, for backwards compatibility.
pub const UV_NO_MODIFY_PATH: &'static str = "UV_NO_MODIFY_PATH";
/// Skip writing `uv` installer metadata files (e.g., `INSTALLER`, `REQUESTED`, and `direct_url.json`) to site-packages `.dist-info` directories.
pub const UV_NO_INSTALLER_METADATA: &'static str = "UV_NO_INSTALLER_METADATA";

View File

@ -103,6 +103,7 @@ impl TryFrom<ToolWire> for Tool {
pub struct ToolEntrypoint {
pub name: String,
pub install_path: PathBuf,
pub from: Option<String>,
}
impl Display for ToolEntrypoint {
@ -166,10 +167,10 @@ impl Tool {
overrides: Vec<Requirement>,
build_constraints: Vec<Requirement>,
python: Option<PythonRequest>,
entrypoints: impl Iterator<Item = ToolEntrypoint>,
entrypoints: impl IntoIterator<Item = ToolEntrypoint>,
options: ToolOptions,
) -> Self {
let mut entrypoints: Vec<_> = entrypoints.collect();
let mut entrypoints: Vec<_> = entrypoints.into_iter().collect();
entrypoints.sort();
Self {
requirements,
@ -345,8 +346,15 @@ impl Tool {
impl ToolEntrypoint {
/// Create a new [`ToolEntrypoint`].
pub fn new(name: String, install_path: PathBuf) -> Self {
Self { name, install_path }
pub fn new(name: &str, install_path: PathBuf, from: String) -> Self {
let name = name
.trim_end_matches(std::env::consts::EXE_SUFFIX)
.to_string();
Self {
name,
install_path,
from: Some(from),
}
}
/// Returns the TOML table for this entrypoint.
@ -358,6 +366,9 @@ impl ToolEntrypoint {
// Use cross-platform slashes so the toml string type does not change
value(PortablePath::from(&self.install_path).to_string()),
);
if let Some(from) = &self.from {
table.insert("from", value(from));
}
table
}
}

View File

@ -101,6 +101,9 @@ pub trait BuildContext {
/// Workspace discovery caching.
fn workspace_cache(&self) -> &WorkspaceCache;
/// Get the extra build dependencies.
fn extra_build_dependencies(&self) -> &uv_workspace::pyproject::ExtraBuildDependencies;
/// Resolve the given requirements into a ready-to-install set of package versions.
fn resolve<'a>(
&'a self,

View File

@ -1,6 +1,6 @@
[package]
name = "uv-version"
version = "0.8.3"
version = "0.8.4"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }

View File

@ -1,3 +1,5 @@
use std::error::Error;
use std::iter;
use std::sync::atomic::AtomicBool;
use std::sync::{LazyLock, Mutex};
@ -6,6 +8,7 @@ use std::sync::{LazyLock, Mutex};
pub use anstream;
#[doc(hidden)]
pub use owo_colors;
use owo_colors::{DynColor, OwoColorize};
use rustc_hash::FxHashSet;
/// Whether user-facing warnings are enabled.
@ -56,3 +59,41 @@ macro_rules! warn_user_once {
}
}};
}
/// Format an error or warning chain.
///
/// # Example
///
/// ```text
/// error: Failed to install app
/// Caused By: Failed to install dependency
/// Caused By: Error writing failed `/home/ferris/deps/foo`: Permission denied
/// ```
///
/// ```text
/// warning: Failed to create registry entry for Python 3.12
/// Caused By: Security policy forbids chaining registry entries
/// ```
pub fn write_error_chain(
err: &dyn Error,
mut stream: impl std::fmt::Write,
level: impl AsRef<str>,
color: impl DynColor + Copy,
) -> std::fmt::Result {
writeln!(
&mut stream,
"{}{} {}",
level.as_ref().color(color).bold(),
":".bold(),
err.to_string().trim()
)?;
for source in iter::successors(err.source(), |&err| err.source()) {
writeln!(
&mut stream,
" {}: {}",
"Caused by".color(color).bold(),
source.to_string().trim()
)?;
}
Ok(())
}

View File

@ -50,6 +50,55 @@ pub enum PyprojectTomlError {
MissingVersion,
}
/// Helper function to deserialize a map while ensuring all keys are unique.
fn deserialize_unique_map<'de, D, K, V, F>(
deserializer: D,
error_msg: F,
) -> Result<BTreeMap<K, V>, D::Error>
where
D: Deserializer<'de>,
K: Deserialize<'de> + Ord + std::fmt::Display,
V: Deserialize<'de>,
F: FnOnce(&K) -> String,
{
struct Visitor<K, V, F>(F, std::marker::PhantomData<(K, V)>);
impl<'de, K, V, F> serde::de::Visitor<'de> for Visitor<K, V, F>
where
K: Deserialize<'de> + Ord + std::fmt::Display,
V: Deserialize<'de>,
F: FnOnce(&K) -> String,
{
type Value = BTreeMap<K, V>;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a map with unique keys")
}
fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
where
M: serde::de::MapAccess<'de>,
{
use std::collections::btree_map::Entry;
let mut map = BTreeMap::new();
while let Some((key, value)) = access.next_entry::<K, V>()? {
match map.entry(key) {
Entry::Occupied(entry) => {
return Err(serde::de::Error::custom((self.0)(entry.key())));
}
Entry::Vacant(entry) => {
entry.insert(value);
}
}
}
Ok(map)
}
}
deserializer.deserialize_map(Visitor(error_msg, std::marker::PhantomData))
}
/// A `pyproject.toml` as specified in PEP 517.
#[derive(Deserialize, Debug, Clone)]
#[cfg_attr(test, derive(Serialize))]
@ -378,6 +427,21 @@ pub struct ToolUv {
)]
pub dependency_groups: Option<ToolUvDependencyGroups>,
/// Additional build dependencies for packages.
///
/// This allows extending the PEP 517 build environment for the project's dependencies with
/// additional packages. This is useful for packages that assume the presence of packages, like,
/// `pip`, and do not declare them as build dependencies.
#[option(
default = "[]",
value_type = "dict",
example = r#"
[tool.uv.extra-build-dependencies]
pytest = ["pip"]
"#
)]
pub extra_build_dependencies: Option<ExtraBuildDependencies>,
/// The project's development dependencies.
///
/// Development dependencies will be installed by default in `uv run` and `uv sync`, but will
@ -643,38 +707,10 @@ impl<'de> serde::de::Deserialize<'de> for ToolUvSources {
where
D: Deserializer<'de>,
{
struct SourcesVisitor;
impl<'de> serde::de::Visitor<'de> for SourcesVisitor {
type Value = ToolUvSources;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a map with unique keys")
}
fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
where
M: serde::de::MapAccess<'de>,
{
let mut sources = BTreeMap::new();
while let Some((key, value)) = access.next_entry::<PackageName, Sources>()? {
match sources.entry(key) {
std::collections::btree_map::Entry::Occupied(entry) => {
return Err(serde::de::Error::custom(format!(
"duplicate sources for package `{}`",
entry.key()
)));
}
std::collections::btree_map::Entry::Vacant(entry) => {
entry.insert(value);
}
}
}
Ok(ToolUvSources(sources))
}
}
deserializer.deserialize_map(SourcesVisitor)
deserialize_unique_map(deserializer, |key: &PackageName| {
format!("duplicate sources for package `{key}`")
})
.map(ToolUvSources)
}
}
@ -702,40 +738,10 @@ impl<'de> serde::de::Deserialize<'de> for ToolUvDependencyGroups {
where
D: Deserializer<'de>,
{
struct SourcesVisitor;
impl<'de> serde::de::Visitor<'de> for SourcesVisitor {
type Value = ToolUvDependencyGroups;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a map with unique keys")
}
fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>
where
M: serde::de::MapAccess<'de>,
{
let mut groups = BTreeMap::new();
while let Some((key, value)) =
access.next_entry::<GroupName, DependencyGroupSettings>()?
{
match groups.entry(key) {
std::collections::btree_map::Entry::Occupied(entry) => {
return Err(serde::de::Error::custom(format!(
"duplicate settings for dependency group `{}`",
entry.key()
)));
}
std::collections::btree_map::Entry::Vacant(entry) => {
entry.insert(value);
}
}
}
Ok(ToolUvDependencyGroups(groups))
}
}
deserializer.deserialize_map(SourcesVisitor)
deserialize_unique_map(deserializer, |key: &GroupName| {
format!("duplicate settings for dependency group `{key}`")
})
.map(ToolUvDependencyGroups)
}
}
@ -749,6 +755,51 @@ pub struct DependencyGroupSettings {
pub requires_python: Option<VersionSpecifiers>,
}
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
pub struct ExtraBuildDependencies(
BTreeMap<PackageName, Vec<uv_pep508::Requirement<VerbatimParsedUrl>>>,
);
impl std::ops::Deref for ExtraBuildDependencies {
type Target = BTreeMap<PackageName, Vec<uv_pep508::Requirement<VerbatimParsedUrl>>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::ops::DerefMut for ExtraBuildDependencies {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl IntoIterator for ExtraBuildDependencies {
type Item = (PackageName, Vec<uv_pep508::Requirement<VerbatimParsedUrl>>);
type IntoIter = std::collections::btree_map::IntoIter<
PackageName,
Vec<uv_pep508::Requirement<VerbatimParsedUrl>>,
>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
/// Ensure that all keys in the TOML table are unique.
impl<'de> serde::de::Deserialize<'de> for ExtraBuildDependencies {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserialize_unique_map(deserializer, |key: &PackageName| {
format!("duplicate extra-build-dependencies for `{key}`")
})
.map(ExtraBuildDependencies)
}
}
#[derive(Deserialize, OptionsMetadata, Default, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(Serialize))]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]

View File

@ -1970,6 +1970,7 @@ mod tests {
"package": null,
"default-groups": null,
"dependency-groups": null,
"extra-build-dependencies": null,
"dev-dependencies": null,
"override-dependencies": null,
"constraint-dependencies": null,
@ -2070,6 +2071,7 @@ mod tests {
"package": null,
"default-groups": null,
"dependency-groups": null,
"extra-build-dependencies": null,
"dev-dependencies": null,
"override-dependencies": null,
"constraint-dependencies": null,
@ -2283,6 +2285,7 @@ mod tests {
"package": null,
"default-groups": null,
"dependency-groups": null,
"extra-build-dependencies": null,
"dev-dependencies": null,
"override-dependencies": null,
"constraint-dependencies": null,
@ -2392,6 +2395,7 @@ mod tests {
"package": null,
"default-groups": null,
"dependency-groups": null,
"extra-build-dependencies": null,
"dev-dependencies": null,
"override-dependencies": null,
"constraint-dependencies": null,
@ -2514,6 +2518,7 @@ mod tests {
"package": null,
"default-groups": null,
"dependency-groups": null,
"extra-build-dependencies": null,
"dev-dependencies": null,
"override-dependencies": null,
"constraint-dependencies": null,
@ -2610,6 +2615,7 @@ mod tests {
"package": null,
"default-groups": null,
"dependency-groups": null,
"extra-build-dependencies": null,
"dev-dependencies": null,
"override-dependencies": null,
"constraint-dependencies": null,

View File

@ -1,6 +1,6 @@
[package]
name = "uv"
version = "0.8.3"
version = "0.8.4"
edition = { workspace = true }
rust-version = { workspace = true }
homepage = { workspace = true }
@ -38,6 +38,7 @@ uv-normalize = { workspace = true }
uv-pep440 = { workspace = true }
uv-pep508 = { workspace = true }
uv-performance-memory-allocator = { path = "../uv-performance-memory-allocator", optional = true }
uv-platform = { workspace = true }
uv-platform-tags = { workspace = true }
uv-publish = { workspace = true }
uv-pypi-types = { workspace = true }

View File

@ -38,6 +38,7 @@ use uv_requirements::RequirementsSource;
use uv_resolver::{ExcludeNewer, FlatIndex};
use uv_settings::PythonInstallMirrors;
use uv_types::{AnyErrorBuild, BuildContext, BuildIsolation, BuildStack, HashStrategy};
use uv_workspace::pyproject::ExtraBuildDependencies;
use uv_workspace::{DiscoveryOptions, Workspace, WorkspaceCache, WorkspaceError};
use crate::commands::ExitStatus;
@ -200,6 +201,7 @@ async fn build_impl(
config_settings_package,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
exclude_newer,
link_mode,
upgrade: _,
@ -346,9 +348,10 @@ async fn build_impl(
build_constraints,
*no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
*index_strategy,
*keyring_provider,
*exclude_newer,
exclude_newer.clone(),
sources.clone(),
concurrency,
build_options,
@ -424,9 +427,10 @@ async fn build_package(
build_constraints: &[RequirementsSource],
no_build_isolation: bool,
no_build_isolation_package: &[PackageName],
extra_build_dependencies: &ExtraBuildDependencies,
index_strategy: IndexStrategy,
keyring_provider: KeyringProviderType,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
sources: NoSources,
concurrency: Concurrency,
build_options: &BuildOptions,
@ -560,6 +564,8 @@ async fn build_package(
let workspace_cache = WorkspaceCache::default();
// Create a build dispatch.
let extra_build_requires =
uv_distribution::ExtraBuildRequires::from_lowered(extra_build_dependencies.clone());
let build_dispatch = BuildDispatch::new(
&client,
cache,
@ -573,6 +579,7 @@ async fn build_package(
config_setting,
config_settings_package,
build_isolation,
&extra_build_requires,
link_mode,
build_options,
&hasher,

View File

@ -14,7 +14,7 @@ use uv_cache::Cache;
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{
BuildOptions, Concurrency, ConfigSettings, Constraints, ExportFormat, ExtrasSpecification,
IndexStrategy, NoBinary, NoBuild, NoSources, PackageConfigSettings, Preview, Reinstall,
IndexStrategy, NoBinary, NoBuild, NoSources, PackageConfigSettings, Preview, PreviewFeatures, Reinstall,
Upgrade,
};
use uv_configuration::{KeyringProviderType, TargetTriple};
@ -44,8 +44,9 @@ use uv_resolver::{
};
use uv_torch::{TorchMode, TorchStrategy};
use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
use uv_warnings::warn_user;
use uv_warnings::{warn_user, warn_user_once};
use uv_workspace::WorkspaceCache;
use uv_workspace::pyproject::ExtraBuildDependencies;
use crate::commands::pip::loggers::DefaultResolveLogger;
use crate::commands::pip::{operations, resolution_environment};
@ -95,11 +96,12 @@ pub(crate) async fn pip_compile(
config_settings_package: PackageConfigSettings,
no_build_isolation: bool,
no_build_isolation_package: Vec<PackageName>,
extra_build_dependencies: &ExtraBuildDependencies,
build_options: BuildOptions,
mut python_version: Option<PythonVersion>,
python_platform: Option<TargetTriple>,
universal: bool,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
sources: NoSources,
annotation_style: AnnotationStyle,
link_mode: LinkMode,
@ -112,6 +114,15 @@ pub(crate) async fn pip_compile(
printer: Printer,
preview: Preview,
) -> Result<ExitStatus> {
if !preview.is_enabled(PreviewFeatures::EXTRA_BUILD_DEPENDENCIES)
&& !extra_build_dependencies.is_empty()
{
warn_user_once!(
"The `extra-build-dependencies` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
PreviewFeatures::EXTRA_BUILD_DEPENDENCIES
);
}
// If the user provides a `pyproject.toml` or other TOML file as the output file, raise an
// error.
if output_file
@ -469,6 +480,8 @@ pub(crate) async fn pip_compile(
.map(|constraint| constraint.requirement.clone()),
);
let extra_build_requires =
uv_distribution::ExtraBuildRequires::from_lowered(extra_build_dependencies.clone());
let build_dispatch = BuildDispatch::new(
&client,
&cache,
@ -482,10 +495,11 @@ pub(crate) async fn pip_compile(
&config_settings,
&config_settings_package,
build_isolation,
&extra_build_requires,
link_mode,
&build_options,
&build_hashes,
exclude_newer,
exclude_newer.clone(),
sources,
WorkspaceCache::default(),
concurrency,
@ -497,7 +511,7 @@ pub(crate) async fn pip_compile(
.prerelease_mode(prerelease_mode)
.fork_strategy(fork_strategy)
.dependency_mode(dependency_mode)
.exclude_newer(exclude_newer)
.exclude_newer(exclude_newer.clone())
.index_strategy(index_strategy)
.torch_backend(torch_backend)
.build_options(build_options.clone())

View File

@ -36,8 +36,9 @@ use uv_resolver::{
};
use uv_torch::{TorchMode, TorchStrategy};
use uv_types::{BuildIsolation, HashStrategy};
use uv_warnings::warn_user;
use uv_warnings::{warn_user, warn_user_once};
use uv_workspace::WorkspaceCache;
use uv_workspace::pyproject::ExtraBuildDependencies;
use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger, InstallLogger};
use crate::commands::pip::operations::Modifications;
@ -78,12 +79,13 @@ pub(crate) async fn pip_install(
config_settings_package: &PackageConfigSettings,
no_build_isolation: bool,
no_build_isolation_package: Vec<PackageName>,
extra_build_dependencies: &ExtraBuildDependencies,
build_options: BuildOptions,
modifications: Modifications,
python_version: Option<PythonVersion>,
python_platform: Option<TargetTriple>,
strict: bool,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
sources: NoSources,
python: Option<String>,
system: bool,
@ -99,6 +101,15 @@ pub(crate) async fn pip_install(
) -> anyhow::Result<ExitStatus> {
let start = std::time::Instant::now();
if !preview.is_enabled(PreviewFeatures::EXTRA_BUILD_DEPENDENCIES)
&& !extra_build_dependencies.is_empty()
{
warn_user_once!(
"The `extra-build-dependencies` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
PreviewFeatures::EXTRA_BUILD_DEPENDENCIES
);
}
let client_builder = BaseClientBuilder::new()
.retries_from_env()?
.connectivity(network_settings.connectivity)
@ -413,6 +424,8 @@ pub(crate) async fn pip_install(
let state = SharedState::default();
// Create a build dispatch.
let extra_build_requires =
uv_distribution::ExtraBuildRequires::from_lowered(extra_build_dependencies.clone());
let build_dispatch = BuildDispatch::new(
&client,
&cache,
@ -426,10 +439,11 @@ pub(crate) async fn pip_install(
config_settings,
config_settings_package,
build_isolation,
&extra_build_requires,
link_mode,
&build_options,
&build_hasher,
exclude_newer,
exclude_newer.clone(),
sources,
WorkspaceCache::default(),
concurrency,

View File

@ -13,12 +13,12 @@ use uv_warnings::warn_user_once;
///
/// The returned distribution is guaranteed to be compatible with the provided tags and Python
/// requirement.
#[derive(Debug, Copy, Clone)]
#[derive(Debug, Clone)]
pub(crate) struct LatestClient<'env> {
pub(crate) client: &'env RegistryClient,
pub(crate) capabilities: &'env IndexCapabilities,
pub(crate) prerelease: PrereleaseMode,
pub(crate) exclude_newer: Option<ExcludeNewer>,
pub(crate) exclude_newer: ExcludeNewer,
pub(crate) tags: Option<&'env Tags>,
pub(crate) requires_python: &'env RequiresPython,
}
@ -70,7 +70,7 @@ impl LatestClient<'_> {
for (filename, file) in files.all() {
// Skip distributions uploaded after the cutoff.
if let Some(exclude_newer) = self.exclude_newer {
if let Some(exclude_newer) = self.exclude_newer.exclude_newer_package(package) {
match file.upload_time_utc_ms.as_ref() {
Some(&upload_time)
if upload_time >= exclude_newer.timestamp_millis() =>
@ -79,8 +79,9 @@ impl LatestClient<'_> {
}
None => {
warn_user_once!(
"{} is missing an upload date, but user provided: {exclude_newer}",
"{} is missing an upload date, but user provided: {}",
file.filename,
self.exclude_newer
);
}
_ => {}

View File

@ -49,7 +49,7 @@ pub(crate) async fn pip_list(
network_settings: &NetworkSettings,
concurrency: Concurrency,
strict: bool,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
python: Option<&str>,
system: bool,
cache: &Cache,

View File

@ -32,8 +32,9 @@ use uv_resolver::{
};
use uv_torch::{TorchMode, TorchStrategy};
use uv_types::{BuildIsolation, HashStrategy};
use uv_warnings::warn_user;
use uv_warnings::{warn_user, warn_user_once};
use uv_workspace::WorkspaceCache;
use uv_workspace::pyproject::ExtraBuildDependencies;
use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger};
use crate::commands::pip::operations::Modifications;
@ -67,11 +68,12 @@ pub(crate) async fn pip_sync(
config_settings_package: &PackageConfigSettings,
no_build_isolation: bool,
no_build_isolation_package: Vec<PackageName>,
extra_build_dependencies: &ExtraBuildDependencies,
build_options: BuildOptions,
python_version: Option<PythonVersion>,
python_platform: Option<TargetTriple>,
strict: bool,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
python: Option<String>,
system: bool,
break_system_packages: bool,
@ -85,6 +87,15 @@ pub(crate) async fn pip_sync(
printer: Printer,
preview: Preview,
) -> Result<ExitStatus> {
if !preview.is_enabled(PreviewFeatures::EXTRA_BUILD_DEPENDENCIES)
&& !extra_build_dependencies.is_empty()
{
warn_user_once!(
"The `extra-build-dependencies` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
PreviewFeatures::EXTRA_BUILD_DEPENDENCIES
);
}
let client_builder = BaseClientBuilder::new()
.retries_from_env()?
.connectivity(network_settings.connectivity)
@ -348,6 +359,8 @@ pub(crate) async fn pip_sync(
let state = SharedState::default();
// Create a build dispatch.
let extra_build_requires =
uv_distribution::ExtraBuildRequires::from_lowered(extra_build_dependencies.clone());
let build_dispatch = BuildDispatch::new(
&client,
&cache,
@ -361,10 +374,11 @@ pub(crate) async fn pip_sync(
config_settings,
config_settings_package,
build_isolation,
&extra_build_requires,
link_mode,
&build_options,
&build_hasher,
exclude_newer,
exclude_newer.clone(),
sources,
WorkspaceCache::default(),
concurrency,

View File

@ -47,7 +47,7 @@ pub(crate) async fn pip_tree(
network_settings: NetworkSettings,
concurrency: Concurrency,
strict: bool,
exclude_newer: Option<ExcludeNewer>,
exclude_newer: ExcludeNewer,
python: Option<&str>,
system: bool,
cache: &Cache,

View File

@ -37,7 +37,7 @@ use uv_python::{Interpreter, PythonDownloads, PythonEnvironment, PythonPreferenc
use uv_redacted::DisplaySafeUrl;
use uv_requirements::{NamedRequirementsResolver, RequirementsSource, RequirementsSpecification};
use uv_resolver::FlatIndex;
use uv_scripts::{Pep723ItemRef, Pep723Metadata, Pep723Script};
use uv_scripts::{Pep723Metadata, Pep723Script};
use uv_settings::PythonInstallMirrors;
use uv_types::{BuildIsolation, HashStrategy};
use uv_warnings::warn_user_once;
@ -104,6 +104,15 @@ pub(crate) async fn add(
);
}
if !preview.is_enabled(PreviewFeatures::EXTRA_BUILD_DEPENDENCIES)
&& !settings.resolver.extra_build_dependencies.is_empty()
{
warn_user_once!(
"The `extra-build-dependencies` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
PreviewFeatures::EXTRA_BUILD_DEPENDENCIES
);
}
for source in &requirements {
match source {
RequirementsSource::PyprojectToml(_) => {
@ -212,7 +221,7 @@ pub(crate) async fn add(
// Discover the interpreter.
let interpreter = ScriptInterpreter::discover(
Pep723ItemRef::Script(&script),
(&script).into(),
python.as_deref().map(PythonRequest::parse),
&network_settings,
python_preference,
@ -428,6 +437,18 @@ pub(crate) async fn add(
};
// Create a build dispatch.
let extra_build_requires = if let AddTarget::Project(project, _) = &target {
uv_distribution::ExtraBuildRequires::from_workspace(
settings.resolver.extra_build_dependencies.clone(),
project.workspace(),
&settings.resolver.index_locations,
&settings.resolver.sources,
)?
} else {
uv_distribution::ExtraBuildRequires::from_lowered(
settings.resolver.extra_build_dependencies.clone(),
)
};
let build_dispatch = BuildDispatch::new(
&client,
cache,
@ -441,10 +462,11 @@ pub(crate) async fn add(
&settings.resolver.config_setting,
&settings.resolver.config_settings_package,
build_isolation,
&extra_build_requires,
settings.resolver.link_mode,
&settings.resolver.build_options,
&build_hasher,
settings.resolver.exclude_newer,
settings.resolver.exclude_newer.clone(),
sources,
// No workspace caching since `uv add` changes the workspace definition.
WorkspaceCache::default(),

View File

@ -15,7 +15,7 @@ use uv_normalize::{DefaultExtras, DefaultGroups, PackageName};
use uv_python::{PythonDownloads, PythonPreference, PythonRequest};
use uv_requirements::is_pylock_toml;
use uv_resolver::{PylockToml, RequirementsTxtExport};
use uv_scripts::{Pep723ItemRef, Pep723Script};
use uv_scripts::Pep723Script;
use uv_settings::PythonInstallMirrors;
use uv_workspace::{DiscoveryOptions, MemberDiscovery, VirtualProject, Workspace, WorkspaceCache};
@ -132,7 +132,7 @@ pub(crate) async fn export(
} else {
Some(match &target {
ExportTarget::Script(script) => ScriptInterpreter::discover(
Pep723ItemRef::Script(script),
script.into(),
python.as_deref().map(PythonRequest::parse),
&network_settings,
python_preference,

View File

@ -13,7 +13,7 @@ use uv_cache::Cache;
use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{
Concurrency, Constraints, DependencyGroupsWithDefaults, DryRun, ExtrasSpecification, Preview,
Reinstall, Upgrade,
PreviewFeatures, Reinstall, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution::DistributionDatabase;
@ -32,7 +32,7 @@ use uv_resolver::{
FlatIndex, InMemoryIndex, Lock, Options, OptionsBuilder, PythonRequirement,
ResolverEnvironment, ResolverManifest, SatisfiesResult, UniversalMarker,
};
use uv_scripts::{Pep723ItemRef, Pep723Script};
use uv_scripts::Pep723Script;
use uv_settings::PythonInstallMirrors;
use uv_types::{BuildContext, BuildIsolation, EmptyInstalledPackages, HashStrategy};
use uv_warnings::{warn_user, warn_user_once};
@ -42,7 +42,7 @@ use crate::commands::pip::loggers::{DefaultResolveLogger, ResolveLogger, Summary
use crate::commands::project::lock_target::LockTarget;
use crate::commands::project::{
ProjectError, ProjectInterpreter, ScriptInterpreter, UniversalState,
init_script_python_requirement,
init_script_python_requirement, script_extra_build_requires,
};
use crate::commands::reporters::{PythonDownloadReporter, ResolverReporter};
use crate::commands::{ExitStatus, ScriptPath, diagnostics, pip};
@ -162,7 +162,7 @@ pub(crate) async fn lock(
.await?
.into_interpreter(),
LockTarget::Script(script) => ScriptInterpreter::discover(
Pep723ItemRef::Script(script),
script.into(),
python.as_deref().map(PythonRequest::parse),
&network_settings,
python_preference,
@ -435,6 +435,7 @@ async fn do_lock(
config_settings_package,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
exclude_newer,
link_mode,
upgrade,
@ -442,6 +443,15 @@ async fn do_lock(
sources,
} = settings;
if !preview.is_enabled(PreviewFeatures::EXTRA_BUILD_DEPENDENCIES)
&& !extra_build_dependencies.is_empty()
{
warn_user_once!(
"The `extra-build-dependencies` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
PreviewFeatures::EXTRA_BUILD_DEPENDENCIES
);
}
// Collect the requirements, etc.
let members = target.members();
let packages = target.packages();
@ -641,7 +651,7 @@ async fn do_lock(
.resolution_mode(*resolution)
.prerelease_mode(*prerelease)
.fork_strategy(*fork_strategy)
.exclude_newer(*exclude_newer)
.exclude_newer(exclude_newer.clone())
.index_strategy(*index_strategy)
.build_options(build_options.clone())
.required_environments(required_environments.cloned().unwrap_or_default())
@ -664,6 +674,18 @@ async fn do_lock(
};
// Create a build dispatch.
let extra_build_requires = match &target {
LockTarget::Workspace(workspace) => uv_distribution::ExtraBuildRequires::from_workspace(
extra_build_dependencies.clone(),
workspace,
index_locations,
sources,
)?,
LockTarget::Script(script) => {
// Try to get extra build dependencies from the script metadata
script_extra_build_requires((*script).into(), settings)?
}
};
let build_dispatch = BuildDispatch::new(
&client,
cache,
@ -677,10 +699,11 @@ async fn do_lock(
config_setting,
config_settings_package,
build_isolation,
&extra_build_requires,
*link_mode,
build_options,
&build_hasher,
*exclude_newer,
exclude_newer.clone(),
sources.clone(),
workspace_cache.clone(),
concurrency,
@ -943,7 +966,7 @@ impl ValidatedLock {
if lock.prerelease_mode() != options.prerelease_mode {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in pre-release mode: `{}` vs. `{}`",
"Resolving despite existing lockfile due to change in pre-release mode: `{}` vs. `{}`",
lock.prerelease_mode().cyan(),
options.prerelease_mode.cyan()
);
@ -958,31 +981,37 @@ impl ValidatedLock {
);
return Ok(Self::Unusable(lock));
}
match (lock.exclude_newer(), options.exclude_newer) {
(None, None) => (),
(Some(existing), Some(provided)) if existing == provided => (),
(Some(existing), Some(provided)) => {
let lock_exclude_newer = lock.exclude_newer();
let options_exclude_newer = &options.exclude_newer;
match (
lock_exclude_newer.is_empty(),
options_exclude_newer.is_empty(),
) {
(true, true) => (),
(false, false) if lock_exclude_newer == *options_exclude_newer => (),
(false, false) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to change in timestamp cutoff: `{}` vs. `{}`",
existing.cyan(),
provided.cyan()
lock_exclude_newer.cyan(),
options_exclude_newer.cyan()
);
return Ok(Self::Unusable(lock));
}
(Some(existing), None) => {
(false, true) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to removal of timestamp cutoff: `{}`",
existing.cyan(),
lock_exclude_newer.cyan(),
);
return Ok(Self::Unusable(lock));
}
(None, Some(provided)) => {
(true, false) => {
let _ = writeln!(
printer.stderr(),
"Ignoring existing lockfile due to addition of timestamp cutoff: `{}`",
provided.cyan()
options_exclude_newer.cyan()
);
return Ok(Self::Unusable(lock));
}
@ -1009,7 +1038,7 @@ impl ValidatedLock {
// to re-use the existing fork markers.
if let Err((fork_markers_union, environments_union)) = lock.check_marker_coverage() {
warn_user!(
"Ignoring existing lockfile due to fork markers not covering the supported environments: `{}` vs `{}`",
"Resolving despite existing lockfile due to fork markers not covering the supported environments: `{}` vs `{}`",
fork_markers_union
.try_to_string()
.unwrap_or("true".to_string()),
@ -1026,7 +1055,7 @@ impl ValidatedLock {
lock.requires_python_coverage(requires_python)
{
warn_user!(
"Ignoring existing lockfile due to fork markers being disjoint with `requires-python`: `{}` vs `{}`",
"Resolving despite existing lockfile due to fork markers being disjoint with `requires-python`: `{}` vs `{}`",
fork_markers_union
.try_to_string()
.unwrap_or("true".to_string()),
@ -1040,7 +1069,7 @@ impl ValidatedLock {
if let Upgrade::Packages(_) = upgrade {
// If the user specified `--upgrade-package`, then at best we can prefer some of
// the existing versions.
debug!("Ignoring existing lockfile due to `--upgrade-package`");
debug!("Resolving despite existing lockfile due to `--upgrade-package`");
return Ok(Self::Preferable(lock));
}
@ -1048,7 +1077,7 @@ impl ValidatedLock {
// the set of `resolution-markers` may no longer cover the entire supported Python range.
if lock.requires_python().range() != requires_python.range() {
debug!(
"Ignoring existing lockfile due to change in Python requirement: `{}` vs. `{}`",
"Resolving despite existing lockfile due to change in Python requirement: `{}` vs. `{}`",
lock.requires_python(),
requires_python,
);
@ -1070,7 +1099,7 @@ impl ValidatedLock {
.collect::<Vec<_>>();
if expected != actual {
debug!(
"Ignoring existing lockfile due to change in supported environments: `{:?}` vs. `{:?}`",
"Resolving despite existing lockfile due to change in supported environments: `{:?}` vs. `{:?}`",
expected, actual
);
return Ok(Self::Versions(lock));
@ -1087,7 +1116,7 @@ impl ValidatedLock {
.collect::<Vec<_>>();
if expected != actual {
debug!(
"Ignoring existing lockfile due to change in supported environments: `{:?}` vs. `{:?}`",
"Resolving despite existing lockfile due to change in supported environments: `{:?}` vs. `{:?}`",
expected, actual
);
return Ok(Self::Versions(lock));
@ -1096,7 +1125,7 @@ impl ValidatedLock {
// If the conflicting group config has changed, we have to perform a clean resolution.
if conflicts != lock.conflicts() {
debug!(
"Ignoring existing lockfile due to change in conflicting groups: `{:?}` vs. `{:?}`",
"Resolving despite existing lockfile due to change in conflicting groups: `{:?}` vs. `{:?}`",
conflicts,
lock.conflicts(),
);
@ -1143,7 +1172,7 @@ impl ValidatedLock {
}
SatisfiesResult::MismatchedMembers(expected, actual) => {
debug!(
"Ignoring existing lockfile due to mismatched members:\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched members:\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
Ok(Self::Preferable(lock))
@ -1151,11 +1180,11 @@ impl ValidatedLock {
SatisfiesResult::MismatchedVirtual(name, expected) => {
if expected {
debug!(
"Ignoring existing lockfile due to mismatched source: `{name}` (expected: `virtual`)"
"Resolving despite existing lockfile due to mismatched source: `{name}` (expected: `virtual`)"
);
} else {
debug!(
"Ignoring existing lockfile due to mismatched source: `{name}` (expected: `editable`)"
"Resolving despite existing lockfile due to mismatched source: `{name}` (expected: `editable`)"
);
}
Ok(Self::Preferable(lock))
@ -1163,11 +1192,11 @@ impl ValidatedLock {
SatisfiesResult::MismatchedDynamic(name, expected) => {
if expected {
debug!(
"Ignoring existing lockfile due to static version: `{name}` (expected a dynamic version)"
"Resolving despite existing lockfile due to static version: `{name}` (expected a dynamic version)"
);
} else {
debug!(
"Ignoring existing lockfile due to dynamic version: `{name}` (expected a static version)"
"Resolving despite existing lockfile due to dynamic version: `{name}` (expected a static version)"
);
}
Ok(Self::Preferable(lock))
@ -1175,70 +1204,70 @@ impl ValidatedLock {
SatisfiesResult::MismatchedVersion(name, expected, actual) => {
if let Some(actual) = actual {
debug!(
"Ignoring existing lockfile due to mismatched version: `{name}` (expected: `{expected}`, found: `{actual}`)"
"Resolving despite existing lockfile due to mismatched version: `{name}` (expected: `{expected}`, found: `{actual}`)"
);
} else {
debug!(
"Ignoring existing lockfile due to mismatched version: `{name}` (expected: `{expected}`)"
"Resolving despite existing lockfile due to mismatched version: `{name}` (expected: `{expected}`)"
);
}
Ok(Self::Preferable(lock))
}
SatisfiesResult::MismatchedRequirements(expected, actual) => {
debug!(
"Ignoring existing lockfile due to mismatched requirements:\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched requirements:\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
Ok(Self::Preferable(lock))
}
SatisfiesResult::MismatchedConstraints(expected, actual) => {
debug!(
"Ignoring existing lockfile due to mismatched constraints:\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched constraints:\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
Ok(Self::Preferable(lock))
}
SatisfiesResult::MismatchedOverrides(expected, actual) => {
debug!(
"Ignoring existing lockfile due to mismatched overrides:\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched overrides:\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
Ok(Self::Preferable(lock))
}
SatisfiesResult::MismatchedBuildConstraints(expected, actual) => {
debug!(
"Ignoring existing lockfile due to mismatched build constraints:\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched build constraints:\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
Ok(Self::Preferable(lock))
}
SatisfiesResult::MismatchedDependencyGroups(expected, actual) => {
debug!(
"Ignoring existing lockfile due to mismatched dependency groups:\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched dependency groups:\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
Ok(Self::Preferable(lock))
}
SatisfiesResult::MismatchedStaticMetadata(expected, actual) => {
debug!(
"Ignoring existing lockfile due to mismatched static metadata:\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched static metadata:\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
Ok(Self::Preferable(lock))
}
SatisfiesResult::MissingRoot(name) => {
debug!("Ignoring existing lockfile due to missing root package: `{name}`");
debug!("Resolving despite existing lockfile due to missing root package: `{name}`");
Ok(Self::Preferable(lock))
}
SatisfiesResult::MissingRemoteIndex(name, version, index) => {
debug!(
"Ignoring existing lockfile due to missing remote index: `{name}` `{version}` from `{index}`"
"Resolving despite existing lockfile due to missing remote index: `{name}` `{version}` from `{index}`"
);
Ok(Self::Preferable(lock))
}
SatisfiesResult::MissingLocalIndex(name, version, index) => {
debug!(
"Ignoring existing lockfile due to missing local index: `{name}` `{version}` from `{}`",
"Resolving despite existing lockfile due to missing local index: `{name}` `{version}` from `{}`",
index.display()
);
Ok(Self::Preferable(lock))
@ -1246,12 +1275,12 @@ impl ValidatedLock {
SatisfiesResult::MismatchedPackageRequirements(name, version, expected, actual) => {
if let Some(version) = version {
debug!(
"Ignoring existing lockfile due to mismatched requirements for: `{name}=={version}`\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched requirements for: `{name}=={version}`\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
} else {
debug!(
"Ignoring existing lockfile due to mismatched requirements for: `{name}`\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched requirements for: `{name}`\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
}
@ -1260,12 +1289,12 @@ impl ValidatedLock {
SatisfiesResult::MismatchedPackageDependencyGroups(name, version, expected, actual) => {
if let Some(version) = version {
debug!(
"Ignoring existing lockfile due to mismatched dependency groups for: `{name}=={version}`\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched dependency groups for: `{name}=={version}`\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
} else {
debug!(
"Ignoring existing lockfile due to mismatched dependency groups for: `{name}`\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched dependency groups for: `{name}`\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
}
@ -1274,19 +1303,19 @@ impl ValidatedLock {
SatisfiesResult::MismatchedPackageProvidesExtra(name, version, expected, actual) => {
if let Some(version) = version {
debug!(
"Ignoring existing lockfile due to mismatched extras for: `{name}=={version}`\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched extras for: `{name}=={version}`\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
} else {
debug!(
"Ignoring existing lockfile due to mismatched extras for: `{name}`\n Requested: {:?}\n Existing: {:?}",
"Resolving despite existing lockfile due to mismatched extras for: `{name}`\n Requested: {:?}\n Existing: {:?}",
expected, actual
);
}
Ok(Self::Preferable(lock))
}
SatisfiesResult::MissingVersion(name) => {
debug!("Ignoring existing lockfile due to missing version: `{name}`");
debug!("Resolving despite existing lockfile due to missing version: `{name}`");
Ok(Self::Preferable(lock))
}
}

View File

@ -46,6 +46,7 @@ use uv_types::{BuildIsolation, EmptyInstalledPackages, HashStrategy};
use uv_virtualenv::remove_virtualenv;
use uv_warnings::{warn_user, warn_user_once};
use uv_workspace::dependency_groups::DependencyGroupError;
use uv_workspace::pyproject::ExtraBuildDependencies;
use uv_workspace::pyproject::PyProjectToml;
use uv_workspace::{RequiresPythonSources, Workspace, WorkspaceCache};
@ -1692,6 +1693,7 @@ pub(crate) async fn resolve_names(
link_mode,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
prerelease: _,
resolution: _,
sources,
@ -1740,6 +1742,8 @@ pub(crate) async fn resolve_names(
let build_hasher = HashStrategy::default();
// Create a build dispatch.
let extra_build_requires =
uv_distribution::ExtraBuildRequires::from_lowered(extra_build_dependencies.clone());
let build_dispatch = BuildDispatch::new(
&client,
cache,
@ -1753,10 +1757,11 @@ pub(crate) async fn resolve_names(
config_setting,
config_settings_package,
build_isolation,
&extra_build_requires,
*link_mode,
build_options,
&build_hasher,
*exclude_newer,
exclude_newer.clone(),
sources.clone(),
workspace_cache.clone(),
concurrency,
@ -1845,6 +1850,7 @@ pub(crate) async fn resolve_environment(
config_settings_package,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
exclude_newer,
link_mode,
upgrade: _,
@ -1901,7 +1907,7 @@ pub(crate) async fn resolve_environment(
.resolution_mode(*resolution)
.prerelease_mode(*prerelease)
.fork_strategy(*fork_strategy)
.exclude_newer(*exclude_newer)
.exclude_newer(exclude_newer.clone())
.index_strategy(*index_strategy)
.build_options(build_options.clone())
.build();
@ -1948,6 +1954,8 @@ pub(crate) async fn resolve_environment(
let workspace_cache = WorkspaceCache::default();
// Create a build dispatch.
let extra_build_requires =
uv_distribution::ExtraBuildRequires::from_lowered(extra_build_dependencies.clone());
let resolve_dispatch = BuildDispatch::new(
&client,
cache,
@ -1961,10 +1969,11 @@ pub(crate) async fn resolve_environment(
config_setting,
config_settings_package,
build_isolation,
&extra_build_requires,
*link_mode,
build_options,
&build_hasher,
*exclude_newer,
exclude_newer.clone(),
sources.clone(),
workspace_cache,
concurrency,
@ -2028,6 +2037,7 @@ pub(crate) async fn sync_environment(
config_settings_package,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
exclude_newer,
link_mode,
compile_bytecode,
@ -2086,6 +2096,8 @@ pub(crate) async fn sync_environment(
};
// Create a build dispatch.
let extra_build_requires =
uv_distribution::ExtraBuildRequires::from_lowered(extra_build_dependencies.clone());
let build_dispatch = BuildDispatch::new(
&client,
cache,
@ -2099,6 +2111,7 @@ pub(crate) async fn sync_environment(
config_setting,
config_settings_package,
build_isolation,
&extra_build_requires,
link_mode,
build_options,
&build_hasher,
@ -2164,6 +2177,7 @@ pub(crate) async fn update_environment(
spec: RequirementsSpecification,
modifications: Modifications,
build_constraints: Constraints,
extra_build_requires: uv_distribution::ExtraBuildRequires,
settings: &ResolverInstallerSettings,
network_settings: &NetworkSettings,
state: &SharedState,
@ -2194,6 +2208,7 @@ pub(crate) async fn update_environment(
link_mode,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies: _,
prerelease,
resolution,
sources,
@ -2283,7 +2298,7 @@ pub(crate) async fn update_environment(
.resolution_mode(*resolution)
.prerelease_mode(*prerelease)
.fork_strategy(*fork_strategy)
.exclude_newer(*exclude_newer)
.exclude_newer(exclude_newer.clone())
.index_strategy(*index_strategy)
.build_options(build_options.clone())
.build();
@ -2323,10 +2338,11 @@ pub(crate) async fn update_environment(
config_setting,
config_settings_package,
build_isolation,
&extra_build_requires,
*link_mode,
build_options,
&build_hasher,
*exclude_newer,
exclude_newer.clone(),
sources.clone(),
workspace_cache,
concurrency,
@ -2537,42 +2553,9 @@ pub(crate) fn script_specification(
return Ok(None);
};
// Determine the working directory for the script.
let script_dir = match &script {
Pep723ItemRef::Script(script) => std::path::absolute(&script.path)?
.parent()
.expect("script path has no parent")
.to_owned(),
Pep723ItemRef::Stdin(..) | Pep723ItemRef::Remote(..) => std::env::current_dir()?,
};
// Collect any `tool.uv.index` from the script.
let empty = Vec::default();
let script_indexes = if settings.sources.no_sources() {
&empty
} else {
script
.metadata()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.top_level.index.as_deref())
.unwrap_or(&empty)
};
// Collect any `tool.uv.sources` from the script.
let empty = BTreeMap::default();
let script_sources = if settings.sources.no_sources() {
&empty
} else {
script
.metadata()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.sources.as_ref())
.unwrap_or(&empty)
};
let script_dir = script.directory()?;
let script_indexes = script.indexes(&settings.sources);
let script_sources = script.sources(&settings.sources);
let requirements = dependencies
.iter()
@ -2636,6 +2619,51 @@ pub(crate) fn script_specification(
)))
}
/// Determine the extra build requires for a script.
#[allow(clippy::result_large_err)]
pub(crate) fn script_extra_build_requires(
script: Pep723ItemRef<'_>,
settings: &ResolverSettings,
) -> Result<uv_distribution::ExtraBuildRequires, ProjectError> {
let script_dir = script.directory()?;
let script_indexes = script.indexes(&settings.sources);
let script_sources = script.sources(&settings.sources);
// Collect any `tool.uv.extra-build-dependencies` from the script.
let empty = BTreeMap::default();
let script_extra_build_dependencies = script
.metadata()
.tool
.as_ref()
.and_then(|tool| tool.uv.as_ref())
.and_then(|uv| uv.extra_build_dependencies.as_ref())
.unwrap_or(&empty);
// Lower the extra build dependencies
let mut extra_build_dependencies = ExtraBuildDependencies::default();
for (name, requirements) in script_extra_build_dependencies {
let lowered_requirements: Vec<_> = requirements
.iter()
.cloned()
.flat_map(|requirement| {
LoweredRequirement::from_non_workspace_requirement(
requirement,
script_dir.as_ref(),
script_sources,
script_indexes,
&settings.index_locations,
)
.map_ok(|req| req.into_inner().into())
})
.collect::<Result<Vec<_>, _>>()?;
extra_build_dependencies.insert(name.clone(), lowered_requirements);
}
Ok(uv_distribution::ExtraBuildRequires::from_lowered(
extra_build_dependencies,
))
}
/// Warn if the user provides (e.g.) an `--index-url` in a requirements file.
fn warn_on_requirements_txt_setting(spec: &RequirementsSpecification, settings: &ResolverSettings) {
let RequirementsSpecification {

View File

@ -16,7 +16,7 @@ use uv_fs::Simplified;
use uv_normalize::{DEV_DEPENDENCIES, DefaultExtras, DefaultGroups};
use uv_pep508::PackageName;
use uv_python::{PythonDownloads, PythonPreference, PythonRequest};
use uv_scripts::{Pep723ItemRef, Pep723Metadata, Pep723Script};
use uv_scripts::{Pep723Metadata, Pep723Script};
use uv_settings::PythonInstallMirrors;
use uv_warnings::warn_user_once;
use uv_workspace::pyproject::DependencyType;
@ -261,7 +261,7 @@ pub(crate) async fn remove(
}
RemoveTarget::Script(script) => {
let interpreter = ScriptInterpreter::discover(
Pep723ItemRef::Script(&script),
(&script).into(),
python.as_deref().map(PythonRequest::parse),
&network_settings,
python_preference,

View File

@ -53,8 +53,8 @@ use crate::commands::project::lock_target::LockTarget;
use crate::commands::project::{
EnvironmentSpecification, PreferenceLocation, ProjectEnvironment, ProjectError,
ScriptEnvironment, ScriptInterpreter, UniversalState, WorkspacePython,
default_dependency_groups, script_specification, update_environment,
validate_project_requires_python,
default_dependency_groups, script_extra_build_requires, script_specification,
update_environment, validate_project_requires_python,
};
use crate::commands::reporters::PythonDownloadReporter;
use crate::commands::{ExitStatus, diagnostics, project};
@ -359,6 +359,8 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
// Install the script requirements, if necessary. Otherwise, use an isolated environment.
if let Some(spec) = script_specification((&script).into(), &settings.resolver)? {
let script_extra_build_requires =
script_extra_build_requires((&script).into(), &settings.resolver)?;
let environment = ScriptEnvironment::get_or_init(
(&script).into(),
python.as_deref().map(PythonRequest::parse),
@ -407,6 +409,7 @@ hint: If you are running a script with `{}` in the shebang, you may need to incl
spec,
modifications,
build_constraints.unwrap_or_default(),
script_extra_build_requires,
&settings,
&network_settings,
&sync_state,
@ -1803,8 +1806,15 @@ fn copy_entrypoint(
' '''
"#,
)
// Or an absolute path shebang
// Or, an absolute path shebang
.or_else(|| contents.strip_prefix(&format!("#!{}\n", previous_executable.display())))
// If the previous executable ends with `python3`, check for a shebang with `python` too
.or_else(|| {
previous_executable
.to_str()
.and_then(|path| path.strip_suffix("3"))
.and_then(|path| contents.strip_prefix(&format!("#!{path}\n")))
})
else {
// If it's not a Python shebang, we'll skip it
trace!(

View File

@ -14,7 +14,7 @@ use uv_client::{BaseClientBuilder, FlatIndexClient, RegistryClientBuilder};
use uv_configuration::{
Concurrency, Constraints, DependencyGroups, DependencyGroupsWithDefaults, DryRun, EditableMode,
ExtrasSpecification, ExtrasSpecificationWithDefaults, HashCheckingMode, InstallOptions,
Preview, PreviewFeatures, TargetTriple,
Preview, PreviewFeatures, TargetTriple, Upgrade,
};
use uv_dispatch::BuildDispatch;
use uv_distribution_types::{
@ -26,11 +26,11 @@ use uv_normalize::{DefaultExtras, DefaultGroups, PackageName};
use uv_pep508::{MarkerTree, VersionOrUrl};
use uv_pypi_types::{ParsedArchiveUrl, ParsedGitUrl, ParsedUrl};
use uv_python::{PythonDownloads, PythonEnvironment, PythonPreference, PythonRequest};
use uv_resolver::{FlatIndex, Installable, Lock};
use uv_scripts::{Pep723ItemRef, Pep723Script};
use uv_resolver::{FlatIndex, ForkStrategy, Installable, Lock, PrereleaseMode, ResolutionMode};
use uv_scripts::Pep723Script;
use uv_settings::PythonInstallMirrors;
use uv_types::{BuildIsolation, HashStrategy};
use uv_warnings::warn_user;
use uv_warnings::{warn_user, warn_user_once};
use uv_workspace::pyproject::Source;
use uv_workspace::{DiscoveryOptions, MemberDiscovery, VirtualProject, Workspace, WorkspaceCache};
@ -43,11 +43,14 @@ use crate::commands::project::lock::{LockMode, LockOperation, LockResult};
use crate::commands::project::lock_target::LockTarget;
use crate::commands::project::{
PlatformState, ProjectEnvironment, ProjectError, ScriptEnvironment, UniversalState,
default_dependency_groups, detect_conflicts, script_specification, update_environment,
default_dependency_groups, detect_conflicts, script_extra_build_requires, script_specification,
update_environment,
};
use crate::commands::{ExitStatus, diagnostics};
use crate::printer::Printer;
use crate::settings::{InstallerSettingsRef, NetworkSettings, ResolverInstallerSettings};
use crate::settings::{
InstallerSettingsRef, NetworkSettings, ResolverInstallerSettings, ResolverSettings,
};
/// Sync the project environment.
#[allow(clippy::fn_params_excessive_bools)]
@ -164,7 +167,7 @@ pub(crate) async fn sync(
),
SyncTarget::Script(script) => SyncEnvironment::Script(
ScriptEnvironment::get_or_init(
Pep723ItemRef::Script(script),
script.into(),
python.as_deref().map(PythonRequest::parse),
&network_settings,
python_preference,
@ -222,8 +225,9 @@ pub(crate) async fn sync(
}
// Parse the requirements from the script.
let spec = script_specification(Pep723ItemRef::Script(script), &settings.resolver)?
.unwrap_or_default();
let spec = script_specification(script.into(), &settings.resolver)?.unwrap_or_default();
let script_extra_build_requires =
script_extra_build_requires(script.into(), &settings.resolver)?;
// Parse the build constraints from the script.
let build_constraints = script
@ -248,6 +252,7 @@ pub(crate) async fn sync(
spec,
modifications,
build_constraints.unwrap_or_default(),
script_extra_build_requires,
&settings,
&network_settings,
&PlatformState::default(),
@ -579,6 +584,7 @@ pub(super) async fn do_sync(
config_settings_package,
no_build_isolation,
no_build_isolation_package,
extra_build_dependencies,
exclude_newer,
link_mode,
compile_bytecode,
@ -587,6 +593,52 @@ pub(super) async fn do_sync(
sources,
} = settings;
if !preview.is_enabled(PreviewFeatures::EXTRA_BUILD_DEPENDENCIES)
&& !extra_build_dependencies.is_empty()
{
warn_user_once!(
"The `extra-build-dependencies` option is experimental and may change without warning. Pass `--preview-features {}` to disable this warning.",
PreviewFeatures::EXTRA_BUILD_DEPENDENCIES
);
}
// Lower the extra build dependencies with source resolution
let extra_build_requires = match &target {
InstallTarget::Workspace { workspace, .. }
| InstallTarget::Project { workspace, .. }
| InstallTarget::NonProjectWorkspace { workspace, .. } => {
uv_distribution::ExtraBuildRequires::from_workspace(
extra_build_dependencies.clone(),
workspace,
index_locations,
&sources,
)?
}
InstallTarget::Script { script, .. } => {
// Try to get extra build dependencies from the script metadata
let resolver_settings = ResolverSettings {
build_options: build_options.clone(),
config_setting: config_setting.clone(),
config_settings_package: config_settings_package.clone(),
dependency_metadata: dependency_metadata.clone(),
exclude_newer: exclude_newer.clone(),
fork_strategy: ForkStrategy::default(),
index_locations: index_locations.clone(),
index_strategy,
keyring_provider,
link_mode,
no_build_isolation,
no_build_isolation_package: no_build_isolation_package.to_vec(),
extra_build_dependencies: extra_build_dependencies.clone(),
prerelease: PrereleaseMode::default(),
resolution: ResolutionMode::default(),
sources: sources.clone(),
upgrade: Upgrade::default(),
};
script_extra_build_requires((*script).into(), &resolver_settings)?
}
};
let client_builder = BaseClientBuilder::new()
.retries_from_env()?
.connectivity(network_settings.connectivity)
@ -715,11 +767,12 @@ pub(super) async fn do_sync(
config_setting,
config_settings_package,
build_isolation,
&extra_build_requires,
link_mode,
build_options,
&build_hasher,
exclude_newer,
sources,
exclude_newer.clone(),
sources.clone(),
workspace_cache.clone(),
concurrency,
preview,

View File

@ -13,7 +13,7 @@ use uv_normalize::DefaultGroups;
use uv_pep508::PackageName;
use uv_python::{PythonDownloads, PythonPreference, PythonRequest, PythonVersion};
use uv_resolver::{PackageMap, TreeDisplay};
use uv_scripts::{Pep723ItemRef, Pep723Script};
use uv_scripts::Pep723Script;
use uv_settings::PythonInstallMirrors;
use uv_workspace::{DiscoveryOptions, Workspace, WorkspaceCache};
@ -86,7 +86,7 @@ pub(crate) async fn tree(
} else {
Some(match target {
LockTarget::Script(script) => ScriptInterpreter::discover(
Pep723ItemRef::Script(script),
script.into(),
python.as_deref().map(PythonRequest::parse),
network_settings,
python_preference,
@ -203,6 +203,7 @@ pub(crate) async fn tree(
config_settings_package: _,
no_build_isolation: _,
no_build_isolation_package: _,
extra_build_dependencies: _,
exclude_newer: _,
link_mode: _,
upgrade: _,

View File

@ -1,11 +1,10 @@
use std::fmt::Write;
use std::iter;
use std::sync::Arc;
use std::time::Duration;
use anyhow::{Context, Result, bail};
use console::Term;
use owo_colors::OwoColorize;
use owo_colors::{AnsiColors, OwoColorize};
use tokio::sync::Semaphore;
use tracing::{debug, info};
use uv_auth::Credentials;
@ -17,7 +16,7 @@ use uv_publish::{
CheckUrlClient, TrustedPublishResult, check_trusted_publishing, files_for_publishing, upload,
};
use uv_redacted::DisplaySafeUrl;
use uv_warnings::warn_user_once;
use uv_warnings::{warn_user_once, write_error_chain};
use crate::commands::reporters::PublishReporter;
use crate::commands::{ExitStatus, human_readable_bytes};
@ -274,19 +273,15 @@ async fn gather_credentials(
fetching the trusted publishing token. If you don't want to use trusted \
publishing, you can ignore this error, but you need to provide credentials."
)?;
writeln!(
write_error_chain(
anyhow::Error::from(err)
.context("Trusted publishing failed")
.as_ref(),
printer.stderr(),
"{}: {err}",
"Trusted publishing error".red().bold()
"error",
AnsiColors::Red,
)?;
for source in iter::successors(std::error::Error::source(&err), |&err| err.source()) {
writeln!(
printer.stderr(),
" {}: {}",
"Caused by".red().bold(),
source.to_string().trim()
)?;
}
}
}

View File

@ -10,12 +10,13 @@ use futures::StreamExt;
use futures::stream::FuturesUnordered;
use indexmap::IndexSet;
use itertools::{Either, Itertools};
use owo_colors::OwoColorize;
use owo_colors::{AnsiColors, OwoColorize};
use rustc_hash::{FxHashMap, FxHashSet};
use tracing::{debug, trace};
use uv_configuration::{Preview, PreviewFeatures};
use uv_fs::Simplified;
use uv_platform::{Arch, Libc};
use uv_python::downloads::{
self, ArchRequest, DownloadResult, ManagedPythonDownload, PythonDownloadRequest,
};
@ -23,14 +24,13 @@ use uv_python::managed::{
ManagedPythonInstallation, ManagedPythonInstallations, PythonMinorVersionLink,
create_link_to_executable, python_executable_dir,
};
use uv_python::platform::{Arch, Libc};
use uv_python::{
PythonDownloads, PythonInstallationKey, PythonInstallationMinorVersionKey, PythonRequest,
PythonVersionFile, VersionFileDiscoveryOptions, VersionFilePreference, VersionRequest,
};
use uv_shell::Shell;
use uv_trampoline_builder::{Launcher, LauncherKind};
use uv_warnings::warn_user;
use uv_warnings::{warn_user, write_error_chain};
use crate::commands::python::{ChangeEvent, ChangeEventKind};
use crate::commands::reporters::PythonDownloadReporter;
@ -139,7 +139,7 @@ impl Changelog {
enum InstallErrorKind {
DownloadUnpack,
Bin,
#[cfg(windows)]
#[cfg_attr(not(windows), allow(dead_code))]
Registry,
}
@ -667,7 +667,6 @@ pub(crate) async fn install(
// to warn
let fatal = !errors.iter().all(|(kind, _, _)| match kind {
InstallErrorKind::Bin => bin.is_none(),
#[cfg(windows)]
InstallErrorKind::Registry => registry.is_none(),
InstallErrorKind::DownloadUnpack => false,
});
@ -676,40 +675,45 @@ pub(crate) async fn install(
.into_iter()
.sorted_unstable_by(|(_, key_a, _), (_, key_b, _)| key_a.cmp(key_b))
{
let (level, verb) = match kind {
InstallErrorKind::DownloadUnpack => ("error".red().bold().to_string(), "install"),
match kind {
InstallErrorKind::DownloadUnpack => {
write_error_chain(
err.context(format!("Failed to install {key}")).as_ref(),
printer.stderr(),
"error",
AnsiColors::Red,
)?;
}
InstallErrorKind::Bin => {
let level = match bin {
None => "warning".yellow().bold().to_string(),
let (level, color) = match bin {
None => ("warning", AnsiColors::Yellow),
Some(false) => continue,
Some(true) => "error".red().bold().to_string(),
Some(true) => ("error", AnsiColors::Red),
};
(level, "install executable for")
}
#[cfg(windows)]
InstallErrorKind::Registry => {
let level = match registry {
None => "warning".yellow().bold().to_string(),
Some(false) => continue,
Some(true) => "error".red().bold().to_string(),
};
(level, "install registry entry for")
}
};
writeln!(
printer.stderr(),
"{level}{} Failed to {verb} {}",
":".bold(),
key.green()
)?;
for err in err.chain() {
writeln!(
printer.stderr(),
" {}: {}",
"Caused by".red().bold(),
err.to_string().trim()
)?;
write_error_chain(
err.context(format!("Failed to install executable for {key}"))
.as_ref(),
printer.stderr(),
level,
color,
)?;
}
InstallErrorKind::Registry => {
let (level, color) = match registry {
None => ("warning", AnsiColors::Yellow),
Some(false) => continue,
Some(true) => ("error", AnsiColors::Red),
};
write_error_chain(
err.context(format!("Failed to create registry entry for {key}"))
.as_ref(),
printer.stderr(),
level,
color,
)?;
}
}
}

View File

@ -1,9 +1,12 @@
use anyhow::{Context, bail};
use itertools::Itertools;
use owo_colors::OwoColorize;
use std::collections::Bound;
use std::fmt::Write;
use std::{collections::BTreeSet, ffi::OsString};
use std::{
collections::{BTreeSet, Bound},
ffi::OsString,
fmt::Write,
path::Path,
};
use tracing::{debug, warn};
use uv_cache::Cache;
use uv_client::BaseClientBuilder;
@ -22,12 +25,12 @@ use uv_python::{
};
use uv_settings::{PythonInstallMirrors, ToolOptions};
use uv_shell::Shell;
use uv_tool::{InstalledTools, Tool, ToolEntrypoint, entrypoint_paths, tool_executable_dir};
use uv_warnings::warn_user;
use uv_tool::{InstalledTools, Tool, ToolEntrypoint, entrypoint_paths};
use uv_warnings::warn_user_once;
use crate::commands::pip;
use crate::commands::project::ProjectError;
use crate::commands::reporters::PythonDownloadReporter;
use crate::commands::{ExitStatus, pip};
use crate::printer::Printer;
/// Return all packages which contain an executable with the given name.
@ -169,8 +172,9 @@ pub(crate) async fn refine_interpreter(
pub(crate) fn finalize_tool_install(
environment: &PythonEnvironment,
name: &PackageName,
entrypoints: &[PackageName],
installed_tools: &InstalledTools,
options: ToolOptions,
options: &ToolOptions,
force: bool,
python: Option<PythonRequest>,
requirements: Vec<Requirement>,
@ -178,120 +182,152 @@ pub(crate) fn finalize_tool_install(
overrides: Vec<Requirement>,
build_constraints: Vec<Requirement>,
printer: Printer,
) -> anyhow::Result<ExitStatus> {
let site_packages = SitePackages::from_environment(environment)?;
let installed = site_packages.get_packages(name);
let Some(installed_dist) = installed.first().copied() else {
bail!("Expected at least one requirement")
};
// Find a suitable path to install into
let executable_directory = tool_executable_dir()?;
) -> anyhow::Result<()> {
let executable_directory = uv_tool::tool_executable_dir()?;
fs_err::create_dir_all(&executable_directory)
.context("Failed to create executable directory")?;
debug!(
"Installing tool executables into: {}",
executable_directory.user_display()
);
let entry_points = entrypoint_paths(
&site_packages,
installed_dist.name(),
installed_dist.version(),
)?;
// Determine the entry points targets. Use a sorted collection for deterministic output.
let target_entry_points = entry_points
let mut installed_entrypoints = Vec::new();
let site_packages = SitePackages::from_environment(environment)?;
let ordered_packages = entrypoints
// Install dependencies first
.iter()
.filter(|pkg| *pkg != name)
.collect::<BTreeSet<_>>()
// Then install the root package last
.into_iter()
.map(|(name, source_path)| {
let target_path = executable_directory.join(
source_path
.file_name()
.map(std::borrow::ToOwned::to_owned)
.unwrap_or_else(|| OsString::from(name.clone())),
);
(name, source_path, target_path)
})
.collect::<BTreeSet<_>>();
.chain(std::iter::once(name));
if target_entry_points.is_empty() {
writeln!(
printer.stdout(),
"No executables are provided by package `{from}`; removing tool",
from = name.cyan()
)?;
for package in ordered_packages {
if package == name {
debug!("Installing entrypoints for tool `{package}`");
} else {
debug!("Installing entrypoints for `{package}` as part of tool `{name}`");
}
hint_executable_from_dependency(name, &site_packages, printer)?;
let installed = site_packages.get_packages(package);
let dist = installed
.first()
.context("Expected at least one requirement")?;
let dist_entrypoints = entrypoint_paths(&site_packages, dist.name(), dist.version())?;
// Clean up the environment we just created.
installed_tools.remove_environment(name)?;
// Determine the entry points targets. Use a sorted collection for deterministic output.
let target_entrypoints = dist_entrypoints
.into_iter()
.map(|(name, source_path)| {
let target_path = executable_directory.join(
source_path
.file_name()
.map(std::borrow::ToOwned::to_owned)
.unwrap_or_else(|| OsString::from(name.clone())),
);
(name, source_path, target_path)
})
.collect::<BTreeSet<_>>();
return Ok(ExitStatus::Failure);
}
if target_entrypoints.is_empty() {
// If package is not the root package, suggest to install it as a dependency.
if package != name {
writeln!(
printer.stdout(),
"No executables are provided by package `{}`\n{}{} Use `--with {}` to include `{}` as a dependency without installing its executables.",
package.cyan(),
"hint".bold().cyan(),
":".bold(),
package.cyan(),
package.cyan(),
)?;
continue;
}
// Error if we're overwriting an existing entrypoint, unless the user passed `--force`.
if !force {
let mut existing_entry_points = target_entry_points
.iter()
.filter(|(_, _, target_path)| target_path.exists())
.peekable();
if existing_entry_points.peek().is_some() {
// Clean up the environment we just created
// For the root package, this is a fatal error
writeln!(
printer.stdout(),
"No executables are provided by package `{}`; removing tool",
package.cyan()
)?;
hint_executable_from_dependency(package, &site_packages, printer)?;
// Clean up the environment we just created.
installed_tools.remove_environment(name)?;
let existing_entry_points = existing_entry_points
// SAFETY: We know the target has a filename because we just constructed it above
.map(|(_, _, target)| target.file_name().unwrap().to_string_lossy())
.collect::<Vec<_>>();
let (s, exists) = if existing_entry_points.len() == 1 {
("", "exists")
} else {
("s", "exist")
};
bail!(
"Executable{s} already {exists}: {} (use `--force` to overwrite)",
existing_entry_points
.iter()
.map(|name| name.bold())
.join(", ")
)
return Err(anyhow::anyhow!(
"Failed to install entrypoints for `{}`",
package.cyan()
));
}
}
#[cfg(windows)]
let itself = std::env::current_exe().ok();
// Error if we're overwriting an existing entrypoint, unless the user passed `--force`.
if !force {
let mut existing_entrypoints = target_entrypoints
.iter()
.filter(|(_, _, target_path)| target_path.exists())
.peekable();
if existing_entrypoints.peek().is_some() {
// Clean up the environment we just created
installed_tools.remove_environment(name)?;
for (name, source_path, target_path) in &target_entry_points {
debug!("Installing executable: `{name}`");
#[cfg(unix)]
replace_symlink(source_path, target_path).context("Failed to install executable")?;
let existing_entrypoints = existing_entrypoints
// SAFETY: We know the target has a filename because we just constructed it above
.map(|(_, _, target)| target.file_name().unwrap().to_string_lossy())
.collect::<Vec<_>>();
let (s, exists) = if existing_entrypoints.len() == 1 {
("", "exists")
} else {
("s", "exist")
};
bail!(
"Executable{s} already {exists}: {} (use `--force` to overwrite)",
existing_entrypoints
.iter()
.map(|name| name.bold())
.join(", ")
)
}
}
#[cfg(windows)]
if itself.as_ref().is_some_and(|itself| {
std::path::absolute(target_path).is_ok_and(|target| *itself == target)
}) {
self_replace::self_replace(source_path).context("Failed to install entrypoint")?;
} else {
fs_err::copy(source_path, target_path).context("Failed to install entrypoint")?;
}
}
let itself = std::env::current_exe().ok();
let s = if target_entry_points.len() == 1 {
""
} else {
"s"
};
writeln!(
printer.stderr(),
"Installed {} executable{s}: {}",
target_entry_points.len(),
target_entry_points
.iter()
.map(|(name, _, _)| name.bold())
.join(", ")
)?;
let mut names = BTreeSet::new();
for (name, src, target) in target_entrypoints {
debug!("Installing executable: `{name}`");
#[cfg(unix)]
replace_symlink(src, &target).context("Failed to install executable")?;
#[cfg(windows)]
if itself.as_ref().is_some_and(|itself| {
std::path::absolute(&target).is_ok_and(|target| *itself == target)
}) {
self_replace::self_replace(src).context("Failed to install entrypoint")?;
} else {
fs_err::copy(src, &target).context("Failed to install entrypoint")?;
}
let tool_entry = ToolEntrypoint::new(&name, target, package.to_string());
names.insert(tool_entry.name.clone());
installed_entrypoints.push(tool_entry);
}
let s = if names.len() == 1 { "" } else { "s" };
let from_pkg = if name == package {
String::new()
} else {
format!(" from `{package}`")
};
writeln!(
printer.stderr(),
"Installed {} executable{s}{from_pkg}: {}",
names.len(),
names.iter().map(|name| name.bold()).join(", ")
)?;
}
debug!("Adding receipt for tool `{name}`");
let tool = Tool::new(
@ -300,45 +336,48 @@ pub(crate) fn finalize_tool_install(
overrides,
build_constraints,
python,
target_entry_points
.into_iter()
.map(|(name, _, target_path)| ToolEntrypoint::new(name, target_path)),
options,
installed_entrypoints,
options.clone(),
);
installed_tools.add_tool_receipt(name, tool)?;
warn_out_of_path(&executable_directory);
Ok(())
}
fn warn_out_of_path(executable_directory: &Path) {
// If the executable directory isn't on the user's PATH, warn.
if !Shell::contains_path(&executable_directory) {
if !Shell::contains_path(executable_directory) {
if let Some(shell) = Shell::from_env() {
if let Some(command) = shell.prepend_path(&executable_directory) {
if let Some(command) = shell.prepend_path(executable_directory) {
if shell.supports_update() {
warn_user!(
warn_user_once!(
"`{}` is not on your PATH. To use installed tools, run `{}` or `{}`.",
executable_directory.simplified_display().cyan(),
command.green(),
"uv tool update-shell".green()
);
} else {
warn_user!(
warn_user_once!(
"`{}` is not on your PATH. To use installed tools, run `{}`.",
executable_directory.simplified_display().cyan(),
command.green()
);
}
} else {
warn_user!(
warn_user_once!(
"`{}` is not on your PATH. To use installed tools, add the directory to your PATH.",
executable_directory.simplified_display().cyan(),
);
}
} else {
warn_user!(
warn_user_once!(
"`{}` is not on your PATH. To use installed tools, add the directory to your PATH.",
executable_directory.simplified_display().cyan(),
);
}
}
Ok(ExitStatus::Success)
}
/// Displays a hint if an executable matching the package name can be found in a dependency of the package.

Some files were not shown because too many files have changed in this diff Show More